From 6c769f9a535f3941b90c695ddbdfcb238cbbee06 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Wed, 7 Aug 2024 21:25:41 +0200 Subject: [PATCH 01/11] enable more than 30 fps --- .../camera/camera_avfoundation/CHANGELOG.md | 6 +- .../ios/RunnerTests/CameraSettingsTests.m | 20 +++- .../example/ios/RunnerTests/CameraTestUtils.m | 32 +++++- .../Sources/camera_avfoundation/FLTCam.m | 80 ++++++++++--- .../camera/camera_avfoundation/pubspec.yaml | 2 +- .../video_player_avfoundation/CHANGELOG.md | 5 + .../darwin/RunnerTests/VideoPlayerTests.m | 105 ++++++++++++++++-- .../FVPVideoPlayerPlugin.m | 54 +++++---- .../video_player_avfoundation/pubspec.yaml | 2 +- 9 files changed, 253 insertions(+), 53 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index d519ed410ec6..b04d89989ecc 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,6 +1,10 @@ +## 0.9.17+2 + +* Adds possibility to use any supported FPS and fixes crash when using unsupported FPS. + ## 0.9.17+1 -* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO +* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO. ## 0.9.17 diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 039ce122174e..418b316bea62 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -148,9 +148,9 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ - injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation, + injectedWrapper.beginConfigurationExpectation, injectedWrapper.lockExpectation, injectedWrapper.minFrameDurationExpectation, injectedWrapper.maxFrameDurationExpectation, - injectedWrapper.commitConfigurationExpectation, injectedWrapper.unlockExpectation + injectedWrapper.unlockExpectation, injectedWrapper.commitConfigurationExpectation ] timeout:1 enforceOrder:YES]; @@ -202,4 +202,20 @@ - (void)testSettings_ShouldBeSupportedByMethodCall { XCTAssertNotNil(resultValue); } +- (void)testSettings_ShouldSelectFormatWhichSupports60FPS { + FCPPlatformMediaSettings *settings = + [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset + framesPerSecond:@(60) + videoBitrate:@(gTestVideoBitrate) + audioBitrate:@(gTestAudioBitrate) + enableAudio:gTestEnableAudio]; + + FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( + dispatch_queue_create("test", NULL), settings, nil, nil); + + AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; + XCTAssertLessThanOrEqual(range.minFrameRate, 60); + XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60); +} + @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index e1a3aaec702e..503a5c255c59 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -52,6 +52,36 @@ OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]); + OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3); + OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30); + id captureDeviceFormatMock1 = OCMClassMock([AVCaptureDeviceFormat class]); + OCMStub([captureDeviceFormatMock1 videoSupportedFrameRateRanges]).andReturn(@[ + frameRateRangeMock1 + ]); + + id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]); + OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3); + OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60); + id captureDeviceFormatMock2 = OCMClassMock([AVCaptureDeviceFormat class]); + OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[ + frameRateRangeMock2 + ]); + + id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); + OCMStub([captureDeviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); + OCMStub([captureDeviceMock formats]).andReturn((@[ + captureDeviceFormatMock1, captureDeviceFormatMock2 + ])); + __block AVCaptureDeviceFormat *format = captureDeviceFormatMock1; + OCMStub([captureDeviceMock setActiveFormat:[OCMArg any]]).andDo(^(NSInvocation *invocation) { + [invocation retainArguments]; + [invocation getArgument:&format atIndex:2]; + }); + OCMStub([captureDeviceMock activeFormat]).andDo(^(NSInvocation *invocation) { + [invocation setReturnValue:&format]; + }); + id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings mediaSettingsAVWrapper:mediaSettingsAVWrapper orientation:UIDeviceOrientationPortrait @@ -59,7 +89,7 @@ audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue captureDeviceFactory:captureDeviceFactory ?: ^AVCaptureDevice *(void) { - return [AVCaptureDevice deviceWithUniqueID:@"camera"]; + return captureDeviceMock; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 45a993e9d443..bfb4d690737d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -210,20 +210,50 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [_motionManager startAccelerometerUpdates]; if (_mediaSettings.framesPerSecond) { + [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; + + // Possible values for presets are hard-coded in FLT interface having + // corresponding AVCaptureSessionPreset counterparts. + // If _resolutionPreset is not supported by camera there is + // fallback to lower resolution presets. + // If none can be selected there is error condition. + if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { + [_videoCaptureSession commitConfiguration]; + return nil; + } + // The frame rate can be changed only on a locked for configuration device. if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { - [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; - - // Possible values for presets are hard-coded in FLT interface having - // corresponding AVCaptureSessionPreset counterparts. - // If _resolutionPreset is not supported by camera there is - // fallback to lower resolution presets. - // If none can be selected there is error condition. - if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { - [_videoCaptureSession commitConfiguration]; - [_captureDevice unlockForConfiguration]; - return nil; + // find the format which frame rate ranges are closest to the wanted frame rate + CMVideoDimensions targetRes = self.videoDimensionsForFormat(_captureDevice.activeFormat); + double targetFrameRate = _mediaSettings.framesPerSecond.doubleValue; + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); + AVCaptureDeviceFormat *bestFormat = _captureDevice.activeFormat; + double bestFrameRate = [self frameRateForFormat:bestFormat closestTo:targetFrameRate]; + double minDistance = fabs(bestFrameRate - targetFrameRate); + int bestSubTypeScore = 1; + for (AVCaptureDeviceFormat *format in _captureDevice.formats) { + CMVideoDimensions res = self.videoDimensionsForFormat(format); + if (res.width != targetRes.width || res.height != targetRes.height) { + continue; + } + double frameRate = [self frameRateForFormat:format closestTo:targetFrameRate]; + double distance = fabs(frameRate - targetFrameRate); + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int subTypeScore = subType == preferredSubType ? 1 : 0; + if (distance < minDistance || + (distance == minDistance && subTypeScore > bestSubTypeScore)) { + bestFormat = format; + bestFrameRate = frameRate; + minDistance = distance; + bestSubTypeScore = subTypeScore; + } } + if (![bestFormat isEqual:_captureDevice.activeFormat]) { + _captureDevice.activeFormat = bestFormat; + } + _mediaSettings.framesPerSecond = @(bestFrameRate); // Set frame rate with 1/10 precision allowing not integral values. int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); @@ -232,9 +262,10 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; [mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; - [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; + [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; } else { + [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; return nil; } } else { @@ -250,6 +281,20 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } +- (double)frameRateForFormat:(AVCaptureDeviceFormat *)format closestTo:(double)targetFrameRate { + double bestFrameRate = 0; + double minDistance = DBL_MAX; + for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); + double distance = fabs(frameRate - targetFrameRate); + if (distance < minDistance) { + bestFrameRate = frameRate; + minDistance = distance; + } + } + return bestFrameRate; +} + - (AVCaptureConnection *)createConnection:(NSError **)error { // Setup video capture input. _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error]; @@ -543,16 +588,23 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset /// Finds the highest available resolution in terms of pixel count for the given device. - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: (AVCaptureDevice *)captureDevice { + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); AVCaptureDeviceFormat *bestFormat = nil; NSUInteger maxPixelCount = 0; + int bestSubTypeScore = 0; for (AVCaptureDeviceFormat *format in _captureDevice.formats) { CMVideoDimensions res = self.videoDimensionsForFormat(format); NSUInteger height = res.height; NSUInteger width = res.width; NSUInteger pixelCount = height * width; - if (pixelCount > maxPixelCount) { - maxPixelCount = pixelCount; + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int subTypeScore = subType == preferredSubType ? 1 : 0; + if (pixelCount > maxPixelCount || + (pixelCount == maxPixelCount && subTypeScore > bestSubTypeScore)) { bestFormat = format; + maxPixelCount = pixelCount; + bestSubTypeScore = subTypeScore; } } return bestFormat; diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index fbf5e3b3a76b..4444026a41fe 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.17+1 +version: 0.9.17+2 environment: sdk: ^3.2.3 diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 4ea05f7f1a9e..583fa4346bb5 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 2.6.2 + +* Adds possibility to play videos at more than 30 FPS. +* Fixes playing state not updating in some paths. + ## 2.6.1 * Adds files to make include directory permanent. diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index 3ec96e78538a..eafbe99f55e7 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -125,6 +125,7 @@ @interface StubFVPDisplayLinkFactory : NSObject /** This display link to return. */ @property(nonatomic, strong) FVPDisplayLink *displayLink; +@property(nonatomic) void (^fireDisplayLink)(void); - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink; @@ -138,6 +139,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink { } - (FVPDisplayLink *)displayLinkWithRegistrar:(id)registrar callback:(void (^)(void))callback { + self.fireDisplayLink = callback; return self.displayLink; } @@ -243,13 +245,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -294,14 +297,15 @@ - (void)testInitStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId]; - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -357,13 +361,14 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since the video was playing, the display link should not be paused after getting a buffer. OCMVerify(never(), [mockDisplayLink setRunning:NO]); } @@ -790,6 +795,82 @@ - (void)testPublishesInRegistration { XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]); } +- (void)testPlayerShouldNotDropEverySecondFrame { + NSObject *registrar = + [GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"]; + NSObject *partialRegistrar = OCMPartialMock(registrar); + NSObject *mockTextureRegistry = + OCMProtocolMock(@protocol(FlutterTextureRegistry)); + OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry); + + FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar + callback:^(){ + }]; + StubFVPDisplayLinkFactory *stubDisplayLinkFactory = + [[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink]; + AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]); + FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] + initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput] + displayLinkFactory:stubDisplayLinkFactory + registrar:partialRegistrar]; + + FlutterError *error; + [videoPlayerPlugin initialize:&error]; + XCTAssertNil(error); + FVPCreationOptions *create = [FVPCreationOptions + makeWithAsset:nil + uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" + packageName:nil + formatHint:nil + httpHeaders:@{}]; + NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error]; + FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId]; + + __block CMTime currentTime = kCMTimeZero; + OCMStub([mockVideoOutput itemTimeForHostTime:0]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + [invocation setReturnValue:¤tTime]; + }); + __block NSMutableSet *pixelBuffers = NSMutableSet.new; + OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + CMTime itemTime; + [invocation getArgument:&itemTime atIndex:2]; + BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]; + [invocation setReturnValue:&has]; + }); + OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero + itemTimeForDisplay:[OCMArg anyPointer]]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + CMTime itemTime; + [invocation getArgument:&itemTime atIndex:2]; + CVPixelBufferRef bufferRef = NULL; + if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) { + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); + } + [pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]]; + [invocation setReturnValue:&bufferRef]; + }); + void (^advanceFrame)(void) = ^{ + currentTime.value++; + [pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]]; + }; + + advanceFrame(); + OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]); + stubDisplayLinkFactory.fireDisplayLink(); + OCMVerifyAllWithDelay(mockTextureRegistry, 10); + + advanceFrame(); + OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]); + CFRelease([player copyPixelBuffer]); + stubDisplayLinkFactory.fireDisplayLink(); + OCMVerifyAllWithDelay(mockTextureRegistry, 10); +} + #if TARGET_OS_IOS - (void)validateTransformFixForOrientation:(UIImageOrientation)orientation { AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation]; diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m index 14ee7ccefde2..33fd5bef05a7 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m @@ -21,8 +21,8 @@ @interface FVPFrameUpdater : NSObject @property(nonatomic, weak, readonly) NSObject *registry; // The output that this updater is managing. @property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput; -// The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:. -@property(nonatomic, assign) CMTime lastKnownAvailableTime; +@property(nonatomic) CVPixelBufferRef latestPixelBuffer; +@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; @end @implementation FVPFrameUpdater @@ -30,7 +30,6 @@ - (FVPFrameUpdater *)initWithRegistry:(NSObject *)regist NSAssert(self, @"super init cannot be nil"); if (self == nil) return nil; _registry = registry; - _lastKnownAvailableTime = kCMTimeInvalid; return self; } @@ -38,10 +37,22 @@ - (void)displayLinkFired { // Only report a new frame if one is actually available. CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()]; if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { - _lastKnownAvailableTime = outputItemTime; + dispatch_async(self.pixelBufferSynchronizationQueue, ^{ + if (self.latestPixelBuffer) { + CFRelease(self.latestPixelBuffer); + } + self.latestPixelBuffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime + itemTimeForDisplay:NULL]; + }); [_registry textureFrameAvailable:_textureId]; } } + +- (void)dealloc { + if (_latestPixelBuffer) { + CFRelease(_latestPixelBuffer); + } +} @end @interface FVPDefaultAVFactory : NSObject @@ -92,6 +103,7 @@ @interface FVPVideoPlayer () // (e.g., after a seek while paused). If YES, the display link should continue to run until the next // frame is successfully provided. @property(nonatomic, assign) BOOL waitingForFrame; +@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; - (instancetype)initWithURL:(NSURL *)url frameUpdater:(FVPFrameUpdater *)frameUpdater @@ -234,9 +246,8 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo } videoComposition.renderSize = CGSizeMake(width, height); - // TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ? - // Currently set at a constant 30 FPS - videoComposition.frameDuration = CMTimeMake(1, 30); + videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID; + videoComposition.frameDuration = videoTrack.minFrameDuration; return videoComposition; } @@ -283,6 +294,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item error:nil] == AVKeyValueStatusLoaded) { // Rotate the video by using a videoComposition and the preferredTransform self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack); + // do not use video composition when it is not needed + if (CGAffineTransformIsIdentity(self->_preferredTransform)) { + return; + } // Note: // https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition // Video composition can only be used with file-based media and is not supported for @@ -320,6 +335,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item _videoOutput = [avFactory videoOutputWithPixelBufferAttributes:pixBuffAttributes]; frameUpdater.videoOutput = _videoOutput; + _pixelBufferSynchronizationQueue = + dispatch_queue_create("io.flutter.video_player.pixelBufferSynchronizationQueue", NULL); + frameUpdater.pixelBufferSynchronizationQueue = _pixelBufferSynchronizationQueue; + [self addObserversForItem:item player:_player]; [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler]; @@ -358,7 +377,6 @@ - (void)observeValueForKeyPath:(NSString *)path case AVPlayerItemStatusReadyToPlay: [item addOutput:_videoOutput]; [self setupEventSinkIfReadyToPlay]; - [self updatePlayingState]; break; } } else if (context == presentationSizeContext || context == durationContext) { @@ -368,7 +386,6 @@ - (void)observeValueForKeyPath:(NSString *)path // its presentation size or duration. When these properties are finally set, re-check if // all required properties and instantiate the event sink if it is not already set up. [self setupEventSinkIfReadyToPlay]; - [self updatePlayingState]; } } else if (context == playbackLikelyToKeepUpContext) { [self updatePlayingState]; @@ -447,6 +464,8 @@ - (void)setupEventSinkIfReadyToPlay { } _isInitialized = YES; + [self updatePlayingState]; + _eventSink(@{ @"event" : @"initialized", @"duration" : @(duration), @@ -543,18 +562,11 @@ - (void)setPlaybackSpeed:(double)speed { } - (CVPixelBufferRef)copyPixelBuffer { - CVPixelBufferRef buffer = NULL; - CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()]; - if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { - buffer = [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; - } else { - // If the current time isn't available yet, use the time that was checked when informing the - // engine that a frame was available (if any). - CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime; - if (CMTIME_IS_VALID(lastAvailableTime)) { - buffer = [_videoOutput copyPixelBufferForItemTime:lastAvailableTime itemTimeForDisplay:NULL]; - } - } + __block CVPixelBufferRef buffer = NULL; + dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ + buffer = self.frameUpdater.latestPixelBuffer; + self.frameUpdater.latestPixelBuffer = NULL; + }); if (self.waitingForFrame && buffer) { self.waitingForFrame = NO; diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index 674684ee789d..5fbf5aa50ef1 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: video_player_avfoundation description: iOS and macOS implementation of the video_player plugin. repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 -version: 2.6.1 +version: 2.6.2 environment: sdk: ^3.2.3 From 4f1483f0b7644f31a59ac65a984d5d17529b3feb Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 15 Aug 2024 17:18:25 +0200 Subject: [PATCH 02/11] correctly set _previewSize --- .../Sources/camera_avfoundation/FLTCam.m | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index a6f3ecb08d60..230aa5e91eb1 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -519,11 +519,6 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset // Set the best device format found and finish the device configuration. _captureDevice.activeFormat = bestFormat; [_captureDevice unlockForConfiguration]; - - // Set the preview size based on values from the current capture device. - _previewSize = - CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width, - _captureDevice.activeFormat.highResolutionStillImageDimensions.height); break; } } @@ -531,44 +526,35 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset case FCPPlatformResolutionPresetUltraHigh: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160; - _previewSize = CGSizeMake(3840, 2160); break; } if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; - _previewSize = - CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width, - _captureDevice.activeFormat.highResolutionStillImageDimensions.height); break; } case FCPPlatformResolutionPresetVeryHigh: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1920x1080; - _previewSize = CGSizeMake(1920, 1080); break; } case FCPPlatformResolutionPresetHigh: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720; - _previewSize = CGSizeMake(1280, 720); break; } case FCPPlatformResolutionPresetMedium: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPreset640x480; - _previewSize = CGSizeMake(640, 480); break; } case FCPPlatformResolutionPresetLow: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPreset352x288; - _previewSize = CGSizeMake(352, 288); break; } default: if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetLow]) { _videoCaptureSession.sessionPreset = AVCaptureSessionPresetLow; - _previewSize = CGSizeMake(352, 288); } else { if (error != nil) { *error = @@ -582,6 +568,8 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset return NO; } } + CMVideoDimensions size = self.videoDimensionsForFormat(_captureDevice.activeFormat); + _previewSize = CGSizeMake(size.width, size.height); _audioCaptureSession.sessionPreset = _videoCaptureSession.sessionPreset; return YES; } From 30a485683d9ec87a79bdaa4979920c51b39ae9a2 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Wed, 21 Aug 2024 18:14:57 +0200 Subject: [PATCH 03/11] split camera changes from more_fps --- .../video_player_avfoundation/CHANGELOG.md | 4 +- .../darwin/RunnerTests/VideoPlayerTests.m | 105 ++---------------- .../FVPVideoPlayerPlugin.m | 54 ++++----- .../video_player_avfoundation/pubspec.yaml | 2 +- 4 files changed, 35 insertions(+), 130 deletions(-) diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 7ad372b36039..e51f69fd6138 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,7 +1,5 @@ -## 2.6.2 +## NEXT -* Adds possibility to play videos at more than 30 FPS. -* Fixes playing state not updating in some paths. * Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. ## 2.6.1 diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index eafbe99f55e7..3ec96e78538a 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -125,7 +125,6 @@ @interface StubFVPDisplayLinkFactory : NSObject /** This display link to return. */ @property(nonatomic, strong) FVPDisplayLink *displayLink; -@property(nonatomic) void (^fireDisplayLink)(void); - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink; @@ -139,7 +138,6 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink { } - (FVPDisplayLink *)displayLinkWithRegistrar:(id)registrar callback:(void (^)(void))callback { - self.fireDisplayLink = callback; return self.displayLink; } @@ -245,14 +243,13 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); + // Any non-zero value is fine here since it won't actually be used, just NULL-checked. + CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(bufferRef); + .andReturn(fakeBufferRef); // Simulate a callback from the engine to request a new frame. - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); + [player copyPixelBuffer]; // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -297,15 +294,14 @@ - (void)testInitStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); + // Any non-zero value is fine here since it won't actually be used, just NULL-checked. + CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(bufferRef); + .andReturn(fakeBufferRef); // Simulate a callback from the engine to request a new frame. FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId]; - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); + [player copyPixelBuffer]; // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -361,14 +357,13 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); + // Any non-zero value is fine here since it won't actually be used, just NULL-checked. + CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(bufferRef); + .andReturn(fakeBufferRef); // Simulate a callback from the engine to request a new frame. - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); + [player copyPixelBuffer]; // Since the video was playing, the display link should not be paused after getting a buffer. OCMVerify(never(), [mockDisplayLink setRunning:NO]); } @@ -795,82 +790,6 @@ - (void)testPublishesInRegistration { XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]); } -- (void)testPlayerShouldNotDropEverySecondFrame { - NSObject *registrar = - [GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"]; - NSObject *partialRegistrar = OCMPartialMock(registrar); - NSObject *mockTextureRegistry = - OCMProtocolMock(@protocol(FlutterTextureRegistry)); - OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry); - - FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar - callback:^(){ - }]; - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = - [[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink]; - AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]); - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput] - displayLinkFactory:stubDisplayLinkFactory - registrar:partialRegistrar]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - FVPCreationOptions *create = [FVPCreationOptions - makeWithAsset:nil - uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - packageName:nil - formatHint:nil - httpHeaders:@{}]; - NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error]; - FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId]; - - __block CMTime currentTime = kCMTimeZero; - OCMStub([mockVideoOutput itemTimeForHostTime:0]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:¤tTime]; - }); - __block NSMutableSet *pixelBuffers = NSMutableSet.new; - OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - CMTime itemTime; - [invocation getArgument:&itemTime atIndex:2]; - BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]; - [invocation setReturnValue:&has]; - }); - OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero - itemTimeForDisplay:[OCMArg anyPointer]]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - CMTime itemTime; - [invocation getArgument:&itemTime atIndex:2]; - CVPixelBufferRef bufferRef = NULL; - if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) { - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); - } - [pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]]; - [invocation setReturnValue:&bufferRef]; - }); - void (^advanceFrame)(void) = ^{ - currentTime.value++; - [pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]]; - }; - - advanceFrame(); - OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]); - stubDisplayLinkFactory.fireDisplayLink(); - OCMVerifyAllWithDelay(mockTextureRegistry, 10); - - advanceFrame(); - OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]); - CFRelease([player copyPixelBuffer]); - stubDisplayLinkFactory.fireDisplayLink(); - OCMVerifyAllWithDelay(mockTextureRegistry, 10); -} - #if TARGET_OS_IOS - (void)validateTransformFixForOrientation:(UIImageOrientation)orientation { AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation]; diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m index 33fd5bef05a7..14ee7ccefde2 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayerPlugin.m @@ -21,8 +21,8 @@ @interface FVPFrameUpdater : NSObject @property(nonatomic, weak, readonly) NSObject *registry; // The output that this updater is managing. @property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput; -@property(nonatomic) CVPixelBufferRef latestPixelBuffer; -@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; +// The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:. +@property(nonatomic, assign) CMTime lastKnownAvailableTime; @end @implementation FVPFrameUpdater @@ -30,6 +30,7 @@ - (FVPFrameUpdater *)initWithRegistry:(NSObject *)regist NSAssert(self, @"super init cannot be nil"); if (self == nil) return nil; _registry = registry; + _lastKnownAvailableTime = kCMTimeInvalid; return self; } @@ -37,22 +38,10 @@ - (void)displayLinkFired { // Only report a new frame if one is actually available. CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()]; if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { - dispatch_async(self.pixelBufferSynchronizationQueue, ^{ - if (self.latestPixelBuffer) { - CFRelease(self.latestPixelBuffer); - } - self.latestPixelBuffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime - itemTimeForDisplay:NULL]; - }); + _lastKnownAvailableTime = outputItemTime; [_registry textureFrameAvailable:_textureId]; } } - -- (void)dealloc { - if (_latestPixelBuffer) { - CFRelease(_latestPixelBuffer); - } -} @end @interface FVPDefaultAVFactory : NSObject @@ -103,7 +92,6 @@ @interface FVPVideoPlayer () // (e.g., after a seek while paused). If YES, the display link should continue to run until the next // frame is successfully provided. @property(nonatomic, assign) BOOL waitingForFrame; -@property(nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; - (instancetype)initWithURL:(NSURL *)url frameUpdater:(FVPFrameUpdater *)frameUpdater @@ -246,8 +234,9 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo } videoComposition.renderSize = CGSizeMake(width, height); - videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID; - videoComposition.frameDuration = videoTrack.minFrameDuration; + // TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ? + // Currently set at a constant 30 FPS + videoComposition.frameDuration = CMTimeMake(1, 30); return videoComposition; } @@ -294,10 +283,6 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item error:nil] == AVKeyValueStatusLoaded) { // Rotate the video by using a videoComposition and the preferredTransform self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack); - // do not use video composition when it is not needed - if (CGAffineTransformIsIdentity(self->_preferredTransform)) { - return; - } // Note: // https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition // Video composition can only be used with file-based media and is not supported for @@ -335,10 +320,6 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item _videoOutput = [avFactory videoOutputWithPixelBufferAttributes:pixBuffAttributes]; frameUpdater.videoOutput = _videoOutput; - _pixelBufferSynchronizationQueue = - dispatch_queue_create("io.flutter.video_player.pixelBufferSynchronizationQueue", NULL); - frameUpdater.pixelBufferSynchronizationQueue = _pixelBufferSynchronizationQueue; - [self addObserversForItem:item player:_player]; [asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler]; @@ -377,6 +358,7 @@ - (void)observeValueForKeyPath:(NSString *)path case AVPlayerItemStatusReadyToPlay: [item addOutput:_videoOutput]; [self setupEventSinkIfReadyToPlay]; + [self updatePlayingState]; break; } } else if (context == presentationSizeContext || context == durationContext) { @@ -386,6 +368,7 @@ - (void)observeValueForKeyPath:(NSString *)path // its presentation size or duration. When these properties are finally set, re-check if // all required properties and instantiate the event sink if it is not already set up. [self setupEventSinkIfReadyToPlay]; + [self updatePlayingState]; } } else if (context == playbackLikelyToKeepUpContext) { [self updatePlayingState]; @@ -464,8 +447,6 @@ - (void)setupEventSinkIfReadyToPlay { } _isInitialized = YES; - [self updatePlayingState]; - _eventSink(@{ @"event" : @"initialized", @"duration" : @(duration), @@ -562,11 +543,18 @@ - (void)setPlaybackSpeed:(double)speed { } - (CVPixelBufferRef)copyPixelBuffer { - __block CVPixelBufferRef buffer = NULL; - dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ - buffer = self.frameUpdater.latestPixelBuffer; - self.frameUpdater.latestPixelBuffer = NULL; - }); + CVPixelBufferRef buffer = NULL; + CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()]; + if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { + buffer = [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; + } else { + // If the current time isn't available yet, use the time that was checked when informing the + // engine that a frame was available (if any). + CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime; + if (CMTIME_IS_VALID(lastAvailableTime)) { + buffer = [_videoOutput copyPixelBufferForItemTime:lastAvailableTime itemTimeForDisplay:NULL]; + } + } if (self.waitingForFrame && buffer) { self.waitingForFrame = NO; diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index 63952af7c43e..dad297f40207 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: video_player_avfoundation description: iOS and macOS implementation of the video_player plugin. repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 -version: 2.6.2 +version: 2.6.1 environment: sdk: ^3.3.0 From 5e1f2495f4225942a304f28293dc97b05706c315 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:05:55 +0200 Subject: [PATCH 04/11] extract format selection into function and more --- .../camera/camera_avfoundation/CHANGELOG.md | 2 +- .../Sources/camera_avfoundation/FLTCam.m | 75 ++++++++++--------- 2 files changed, 41 insertions(+), 36 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index cf53a7fc39e4..82040ff1891b 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,6 +1,6 @@ ## 0.9.17+4 -* Adds possibility to use any supported FPS and fixes crash when using unsupported FPS. +* Adds ability to use any supported FPS and fixes crash when using unsupported FPS. * Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. ## 0.9.17+3 diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 230aa5e91eb1..3b1fe466f7c8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -225,36 +225,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings // The frame rate can be changed only on a locked for configuration device. if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { - // find the format which frame rate ranges are closest to the wanted frame rate - CMVideoDimensions targetRes = self.videoDimensionsForFormat(_captureDevice.activeFormat); - double targetFrameRate = _mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = _captureDevice.activeFormat; - double bestFrameRate = [self frameRateForFormat:bestFormat closestTo:targetFrameRate]; - double minDistance = fabs(bestFrameRate - targetFrameRate); - int bestSubTypeScore = 1; - for (AVCaptureDeviceFormat *format in _captureDevice.formats) { - CMVideoDimensions res = self.videoDimensionsForFormat(format); - if (res.width != targetRes.width || res.height != targetRes.height) { - continue; - } - double frameRate = [self frameRateForFormat:format closestTo:targetFrameRate]; - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int subTypeScore = subType == preferredSubType ? 1 : 0; - if (distance < minDistance || - (distance == minDistance && subTypeScore > bestSubTypeScore)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - bestSubTypeScore = subTypeScore; - } - } - if (![bestFormat isEqual:_captureDevice.activeFormat]) { - _captureDevice.activeFormat = bestFormat; - } - _mediaSettings.framesPerSecond = @(bestFrameRate); + selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, videoDimensionsForFormat); // Set frame rate with 1/10 precision allowing not integral values. int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); @@ -282,7 +253,42 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } -- (double)frameRateForFormat:(AVCaptureDeviceFormat *)format closestTo:(double)targetFrameRate { +static void selectBestFormatForRequestedFrameRate(AVCaptureDevice *captureDevice, + FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat) +{ + // Find the format which frame rate ranges are closest to the wanted frame rate. + CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); + double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); + AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; + double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); + double minDistance = fabs(bestFrameRate - targetFrameRate); + FourCharCode bestSubType = preferredSubType; + for (AVCaptureDeviceFormat *format in captureDevice.formats) { + CMVideoDimensions resolution = videoDimensionsForFormat(format); + if (resolution.width != targetResolution.width || resolution.height != targetResolution.height) { + continue; + } + double frameRate = bestFrameRateForFormat(format, targetFrameRate); + double distance = fabs(frameRate - targetFrameRate); + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + if (distance < minDistance || + (distance == minDistance && subType == preferredSubType && bestSubType != preferredSubType)) { + bestFormat = format; + bestFrameRate = frameRate; + minDistance = distance; + bestSubType = subType; + } + } + if (![bestFormat isEqual:captureDevice.activeFormat]) { + captureDevice.activeFormat = bestFormat; + } + mediaSettings.framesPerSecond = @(bestFrameRate); +} + +static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { double bestFrameRate = 0; double minDistance = DBL_MAX; for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { @@ -581,19 +587,18 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); AVCaptureDeviceFormat *bestFormat = nil; NSUInteger maxPixelCount = 0; - int bestSubTypeScore = 0; + FourCharCode bestSubType = 0; for (AVCaptureDeviceFormat *format in _captureDevice.formats) { CMVideoDimensions res = self.videoDimensionsForFormat(format); NSUInteger height = res.height; NSUInteger width = res.width; NSUInteger pixelCount = height * width; FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int subTypeScore = subType == preferredSubType ? 1 : 0; if (pixelCount > maxPixelCount || - (pixelCount == maxPixelCount && subTypeScore > bestSubTypeScore)) { + (pixelCount == maxPixelCount && subType == preferredSubType && bestSubType != preferredSubType)) { bestFormat = format; maxPixelCount = pixelCount; - bestSubTypeScore = subTypeScore; + bestSubType = subType; } } return bestFormat; From 5007358e4acfd5990b4716cccfadb33be678725d Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:17:20 +0200 Subject: [PATCH 05/11] revert reordering lockDevice with beginConfigurationForSession --- .../ios/RunnerTests/CameraSettingsTests.m | 4 +-- .../Sources/camera_avfoundation/FLTCam.m | 28 +++++++++---------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 418b316bea62..9bed6bea4883 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -148,9 +148,9 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ - injectedWrapper.beginConfigurationExpectation, injectedWrapper.lockExpectation, + injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation, injectedWrapper.minFrameDurationExpectation, injectedWrapper.maxFrameDurationExpectation, - injectedWrapper.unlockExpectation, injectedWrapper.commitConfigurationExpectation + injectedWrapper.commitConfigurationExpectation, injectedWrapper.unlockExpectation ] timeout:1 enforceOrder:YES]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 3b1fe466f7c8..a5fb2154555e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -211,20 +211,21 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [_motionManager startAccelerometerUpdates]; if (_mediaSettings.framesPerSecond) { - [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; - - // Possible values for presets are hard-coded in FLT interface having - // corresponding AVCaptureSessionPreset counterparts. - // If _resolutionPreset is not supported by camera there is - // fallback to lower resolution presets. - // If none can be selected there is error condition. - if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { - [_videoCaptureSession commitConfiguration]; - return nil; - } - // The frame rate can be changed only on a locked for configuration device. if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { + [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; + + // Possible values for presets are hard-coded in FLT interface having + // corresponding AVCaptureSessionPreset counterparts. + // If _resolutionPreset is not supported by camera there is + // fallback to lower resolution presets. + // If none can be selected there is error condition. + if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) { + [_videoCaptureSession commitConfiguration]; + [_captureDevice unlockForConfiguration]; + return nil; + } + selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, videoDimensionsForFormat); // Set frame rate with 1/10 precision allowing not integral values. @@ -234,10 +235,9 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; [mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; - [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; + [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; } else { - [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; return nil; } } else { From 65f8113ada30bd5bd127d8086e43fd9cc3ce0d5a Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:31:14 +0200 Subject: [PATCH 06/11] fix format --- .../Sources/camera_avfoundation/FLTCam.m | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index a5fb2154555e..7187f2e510f5 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -226,7 +226,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return nil; } - selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, videoDimensionsForFormat); + selectBestFormatForRequestedFrameRate(_captureDevice, _mediaSettings, + _videoDimensionsForFormat); // Set frame rate with 1/10 precision allowing not integral values. int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); @@ -253,9 +254,9 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } -static void selectBestFormatForRequestedFrameRate(AVCaptureDevice *captureDevice, - FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) +static void selectBestFormatForRequestedFrameRate( + AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat) { // Find the format which frame rate ranges are closest to the wanted frame rate. CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); @@ -268,14 +269,15 @@ static void selectBestFormatForRequestedFrameRate(AVCaptureDevice *captureDevice FourCharCode bestSubType = preferredSubType; for (AVCaptureDeviceFormat *format in captureDevice.formats) { CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || resolution.height != targetResolution.height) { + if (resolution.width != targetResolution.width || + resolution.height != targetResolution.height) { continue; } double frameRate = bestFrameRateForFormat(format, targetFrameRate); double distance = fabs(frameRate - targetFrameRate); FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (distance < minDistance || - (distance == minDistance && subType == preferredSubType && bestSubType != preferredSubType)) { + if (distance < minDistance || (distance == minDistance && subType == preferredSubType && + bestSubType != preferredSubType)) { bestFormat = format; bestFrameRate = frameRate; minDistance = distance; @@ -594,8 +596,8 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: NSUInteger width = res.width; NSUInteger pixelCount = height * width; FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (pixelCount > maxPixelCount || - (pixelCount == maxPixelCount && subType == preferredSubType && bestSubType != preferredSubType)) { + if (pixelCount > maxPixelCount || (pixelCount == maxPixelCount && subType == preferredSubType && + bestSubType != preferredSubType)) { bestFormat = format; maxPixelCount = pixelCount; bestSubType = subType; From 27868dfb15e1814bb97781ba36603c96da5a91a5 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 5 Sep 2024 21:38:27 +0200 Subject: [PATCH 07/11] fix format --- .../camera_avfoundation/Sources/camera_avfoundation/FLTCam.m | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 7187f2e510f5..ffd820810650 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -256,8 +256,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings static void selectBestFormatForRequestedFrameRate( AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) -{ + VideoDimensionsForFormat videoDimensionsForFormat) { // Find the format which frame rate ranges are closest to the wanted frame rate. CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; From 39832731fe14925808bdf9613baada04256abb4a Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 19 Sep 2024 19:33:13 +0200 Subject: [PATCH 08/11] add comments and small changes --- .../Sources/camera_avfoundation/FLTCam.m | 112 +++++++++--------- 1 file changed, 59 insertions(+), 53 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index ffd820810650..646de6ce9570 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -153,6 +153,60 @@ - (instancetype)initWithCameraName:(NSString *)cameraName error:error]; } +// Returns frame rate supported by format closest to targetFrameRate. +static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { + double bestFrameRate = 0; + double minDistance = DBL_MAX; + for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); + double distance = fabs(frameRate - targetFrameRate); + if (distance < minDistance) { + bestFrameRate = frameRate; + minDistance = distance; + } + } + return bestFrameRate; +} + +// Finds format with same resolution as current activeFormat for which bestFrameRateForFormat +// returned frame rate closest to mediaSettings.framesPerSecond. Preferred are formats with the +// same subtype as current activeFormat. Sets this format as activeFormat and also updates +// mediaSettings.framesPerSecond to value which bestFrameRateForFormat returned for that format. +static void selectBestFormatForRequestedFrameRate( + AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat) { + CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); + double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); + AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; + double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); + double minDistance = fabs(bestFrameRate - targetFrameRate); + BOOL isBestSubTypePreferred = YES; + for (AVCaptureDeviceFormat *format in captureDevice.formats) { + CMVideoDimensions resolution = videoDimensionsForFormat(format); + if (resolution.width != targetResolution.width || + resolution.height != targetResolution.height) { + continue; + } + double frameRate = bestFrameRateForFormat(format, targetFrameRate); + double distance = fabs(frameRate - targetFrameRate); + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + BOOL isSubTypePreferred = subType == preferredSubType; + if (distance < minDistance || (distance == minDistance && isSubTypePreferred && + !isBestSubTypePreferred)) { + bestFormat = format; + bestFrameRate = frameRate; + minDistance = distance; + isBestSubTypePreferred = isSubTypePreferred; + } + } + if (![bestFormat isEqual:captureDevice.activeFormat]) { + captureDevice.activeFormat = bestFormat; + } + mediaSettings.framesPerSecond = @(bestFrameRate); +} + - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper orientation:(UIDeviceOrientation)orientation @@ -254,55 +308,6 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } -static void selectBestFormatForRequestedFrameRate( - AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) { - // Find the format which frame rate ranges are closest to the wanted frame rate. - CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); - double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; - double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); - double minDistance = fabs(bestFrameRate - targetFrameRate); - FourCharCode bestSubType = preferredSubType; - for (AVCaptureDeviceFormat *format in captureDevice.formats) { - CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || - resolution.height != targetResolution.height) { - continue; - } - double frameRate = bestFrameRateForFormat(format, targetFrameRate); - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (distance < minDistance || (distance == minDistance && subType == preferredSubType && - bestSubType != preferredSubType)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - bestSubType = subType; - } - } - if (![bestFormat isEqual:captureDevice.activeFormat]) { - captureDevice.activeFormat = bestFormat; - } - mediaSettings.framesPerSecond = @(bestFrameRate); -} - -static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { - double bestFrameRate = 0; - double minDistance = DBL_MAX; - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { - double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); - double distance = fabs(frameRate - targetFrameRate); - if (distance < minDistance) { - bestFrameRate = frameRate; - minDistance = distance; - } - } - return bestFrameRate; -} - - (AVCaptureConnection *)createConnection:(NSError **)error { // Setup video capture input. _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error]; @@ -588,18 +593,19 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); AVCaptureDeviceFormat *bestFormat = nil; NSUInteger maxPixelCount = 0; - FourCharCode bestSubType = 0; + BOOL isBestSubTypePreferred = NO; for (AVCaptureDeviceFormat *format in _captureDevice.formats) { CMVideoDimensions res = self.videoDimensionsForFormat(format); NSUInteger height = res.height; NSUInteger width = res.width; NSUInteger pixelCount = height * width; FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (pixelCount > maxPixelCount || (pixelCount == maxPixelCount && subType == preferredSubType && - bestSubType != preferredSubType)) { + BOOL isSubTypePreferred = subType == preferredSubType; + if (pixelCount > maxPixelCount || (pixelCount == maxPixelCount && isSubTypePreferred && + !isBestSubTypePreferred)) { bestFormat = format; maxPixelCount = pixelCount; - bestSubType = subType; + isBestSubTypePreferred = isSubTypePreferred; } } return bestFormat; From 1d8f05a45e9fa92beca65d5fbde19d4082066ff8 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Thu, 19 Sep 2024 19:44:18 +0200 Subject: [PATCH 09/11] fix format --- .../Sources/camera_avfoundation/FLTCam.m | 90 +++++++++---------- 1 file changed, 45 insertions(+), 45 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 646de6ce9570..80f49084935e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -155,17 +155,17 @@ - (instancetype)initWithCameraName:(NSString *)cameraName // Returns frame rate supported by format closest to targetFrameRate. static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { - double bestFrameRate = 0; - double minDistance = DBL_MAX; - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { - double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); - double distance = fabs(frameRate - targetFrameRate); - if (distance < minDistance) { - bestFrameRate = frameRate; - minDistance = distance; - } - } - return bestFrameRate; + double bestFrameRate = 0; + double minDistance = DBL_MAX; + for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); + double distance = fabs(frameRate - targetFrameRate); + if (distance < minDistance) { + bestFrameRate = frameRate; + minDistance = distance; + } + } + return bestFrameRate; } // Finds format with same resolution as current activeFormat for which bestFrameRateForFormat @@ -173,38 +173,38 @@ static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targe // same subtype as current activeFormat. Sets this format as activeFormat and also updates // mediaSettings.framesPerSecond to value which bestFrameRateForFormat returned for that format. static void selectBestFormatForRequestedFrameRate( - AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, - VideoDimensionsForFormat videoDimensionsForFormat) { - CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); - double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; - FourCharCode preferredSubType = - CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; - double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); - double minDistance = fabs(bestFrameRate - targetFrameRate); - BOOL isBestSubTypePreferred = YES; - for (AVCaptureDeviceFormat *format in captureDevice.formats) { - CMVideoDimensions resolution = videoDimensionsForFormat(format); - if (resolution.width != targetResolution.width || - resolution.height != targetResolution.height) { - continue; - } - double frameRate = bestFrameRateForFormat(format, targetFrameRate); - double distance = fabs(frameRate - targetFrameRate); - FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - BOOL isSubTypePreferred = subType == preferredSubType; - if (distance < minDistance || (distance == minDistance && isSubTypePreferred && - !isBestSubTypePreferred)) { - bestFormat = format; - bestFrameRate = frameRate; - minDistance = distance; - isBestSubTypePreferred = isSubTypePreferred; - } - } - if (![bestFormat isEqual:captureDevice.activeFormat]) { - captureDevice.activeFormat = bestFormat; - } - mediaSettings.framesPerSecond = @(bestFrameRate); + AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, + VideoDimensionsForFormat videoDimensionsForFormat) { + CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); + double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; + FourCharCode preferredSubType = + CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); + AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; + double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); + double minDistance = fabs(bestFrameRate - targetFrameRate); + BOOL isBestSubTypePreferred = YES; + for (AVCaptureDeviceFormat *format in captureDevice.formats) { + CMVideoDimensions resolution = videoDimensionsForFormat(format); + if (resolution.width != targetResolution.width || + resolution.height != targetResolution.height) { + continue; + } + double frameRate = bestFrameRateForFormat(format, targetFrameRate); + double distance = fabs(frameRate - targetFrameRate); + FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + BOOL isSubTypePreferred = subType == preferredSubType; + if (distance < minDistance || + (distance == minDistance && isSubTypePreferred && !isBestSubTypePreferred)) { + bestFormat = format; + bestFrameRate = frameRate; + minDistance = distance; + isBestSubTypePreferred = isSubTypePreferred; + } + } + if (![bestFormat isEqual:captureDevice.activeFormat]) { + captureDevice.activeFormat = bestFormat; + } + mediaSettings.framesPerSecond = @(bestFrameRate); } - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings @@ -601,8 +601,8 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: NSUInteger pixelCount = height * width; FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription); BOOL isSubTypePreferred = subType == preferredSubType; - if (pixelCount > maxPixelCount || (pixelCount == maxPixelCount && isSubTypePreferred && - !isBestSubTypePreferred)) { + if (pixelCount > maxPixelCount || + (pixelCount == maxPixelCount && isSubTypePreferred && !isBestSubTypePreferred)) { bestFormat = format; maxPixelCount = pixelCount; isBestSubTypePreferred = isSubTypePreferred; From cc632ec4c6556ed68579780b4f81e24c245700d5 Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Sat, 12 Oct 2024 21:08:51 +0200 Subject: [PATCH 10/11] change comments --- packages/camera/camera_avfoundation/CHANGELOG.md | 2 +- .../Sources/camera_avfoundation/FLTCam.m | 10 ++++++---- packages/camera/camera_avfoundation/pubspec.yaml | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 82040ff1891b..0f2c82c38a25 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.9.17+4 +## 0.9.17+5 * Adds ability to use any supported FPS and fixes crash when using unsupported FPS. * Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 80f49084935e..f98a3bb7777b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -168,10 +168,11 @@ static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targe return bestFrameRate; } -// Finds format with same resolution as current activeFormat for which bestFrameRateForFormat -// returned frame rate closest to mediaSettings.framesPerSecond. Preferred are formats with the -// same subtype as current activeFormat. Sets this format as activeFormat and also updates -// mediaSettings.framesPerSecond to value which bestFrameRateForFormat returned for that format. +// Finds format with same resolution as current activeFormat in captureDevice for which +// bestFrameRateForFormat returned frame rate closest to mediaSettings.framesPerSecond. +// Preferred are formats with the same subtype as current activeFormat. Sets this format +// as activeFormat and also updates mediaSettings.framesPerSecond to value which +// bestFrameRateForFormat returned for that format. static void selectBestFormatForRequestedFrameRate( AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, VideoDimensionsForFormat videoDimensionsForFormat) { @@ -587,6 +588,7 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset } /// Finds the highest available resolution in terms of pixel count for the given device. +/// Preferred are formats with the same subtype as current activeFormat. - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: (AVCaptureDevice *)captureDevice { FourCharCode preferredSubType = diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 713a43cbb259..ba8e22fc9e4c 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.17+4 +version: 0.9.17+5 environment: sdk: ^3.3.0 From 0125d77b025b84fe5e52a2f158373b76b016841a Mon Sep 17 00:00:00 2001 From: misos1 <30872003+misos1@users.noreply.github.com> Date: Sat, 12 Oct 2024 21:22:13 +0200 Subject: [PATCH 11/11] change activeFormat always if framesPerSecond is set --- .../camera_avfoundation/Sources/camera_avfoundation/FLTCam.m | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index f98a3bb7777b..0b065026f10e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -202,9 +202,7 @@ static void selectBestFormatForRequestedFrameRate( isBestSubTypePreferred = isSubTypePreferred; } } - if (![bestFormat isEqual:captureDevice.activeFormat]) { - captureDevice.activeFormat = bestFormat; - } + captureDevice.activeFormat = bestFormat; mediaSettings.framesPerSecond = @(bestFrameRate); }