Skip to content

Commit

Permalink
enable more than 30 fps
Browse files Browse the repository at this point in the history
  • Loading branch information
misos1 committed Aug 14, 2024
1 parent 19daf6f commit 6c769f9
Show file tree
Hide file tree
Showing 9 changed files with 253 additions and 53 deletions.
6 changes: 5 additions & 1 deletion packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
## 0.9.17+2

* Adds possibility to use any supported FPS and fixes crash when using unsupported FPS.

## 0.9.17+1

* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO
* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO.

## 0.9.17

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter {

// Expect FPS configuration is passed to camera device.
[self waitForExpectations:@[
injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation,
injectedWrapper.beginConfigurationExpectation, injectedWrapper.lockExpectation,
injectedWrapper.minFrameDurationExpectation, injectedWrapper.maxFrameDurationExpectation,
injectedWrapper.commitConfigurationExpectation, injectedWrapper.unlockExpectation
injectedWrapper.unlockExpectation, injectedWrapper.commitConfigurationExpectation
]
timeout:1
enforceOrder:YES];
Expand Down Expand Up @@ -202,4 +202,20 @@ - (void)testSettings_ShouldBeSupportedByMethodCall {
XCTAssertNotNil(resultValue);
}

- (void)testSettings_ShouldSelectFormatWhichSupports60FPS {
FCPPlatformMediaSettings *settings =
[FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
framesPerSecond:@(60)
videoBitrate:@(gTestVideoBitrate)
audioBitrate:@(gTestAudioBitrate)
enableAudio:gTestEnableAudio];

FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
dispatch_queue_create("test", NULL), settings, nil, nil);

AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0];
XCTAssertLessThanOrEqual(range.minFrameRate, 60);
XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60);
}

@end
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,44 @@
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);

id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]);
OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3);
OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30);
id captureDeviceFormatMock1 = OCMClassMock([AVCaptureDeviceFormat class]);
OCMStub([captureDeviceFormatMock1 videoSupportedFrameRateRanges]).andReturn(@[
frameRateRangeMock1
]);

id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]);
OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3);
OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60);
id captureDeviceFormatMock2 = OCMClassMock([AVCaptureDeviceFormat class]);
OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[
frameRateRangeMock2
]);

id captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub([captureDeviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([captureDeviceMock formats]).andReturn((@[
captureDeviceFormatMock1, captureDeviceFormatMock2
]));
__block AVCaptureDeviceFormat *format = captureDeviceFormatMock1;
OCMStub([captureDeviceMock setActiveFormat:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
[invocation retainArguments];
[invocation getArgument:&format atIndex:2];
});
OCMStub([captureDeviceMock activeFormat]).andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&format];
});

id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings
mediaSettingsAVWrapper:mediaSettingsAVWrapper
orientation:UIDeviceOrientationPortrait
videoCaptureSession:videoSessionMock
audioCaptureSession:audioSessionMock
captureSessionQueue:captureSessionQueue
captureDeviceFactory:captureDeviceFactory ?: ^AVCaptureDevice *(void) {
return [AVCaptureDevice deviceWithUniqueID:@"camera"];
return captureDeviceMock;
}
videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) {
return CMVideoFormatDescriptionGetDimensions(format.formatDescription);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,20 +210,50 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
[_motionManager startAccelerometerUpdates];

if (_mediaSettings.framesPerSecond) {
[_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession];

// Possible values for presets are hard-coded in FLT interface having
// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
[_videoCaptureSession commitConfiguration];
return nil;
}

// The frame rate can be changed only on a locked for configuration device.
if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) {
[_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession];

// Possible values for presets are hard-coded in FLT interface having
// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
[_videoCaptureSession commitConfiguration];
[_captureDevice unlockForConfiguration];
return nil;
// find the format which frame rate ranges are closest to the wanted frame rate
CMVideoDimensions targetRes = self.videoDimensionsForFormat(_captureDevice.activeFormat);
double targetFrameRate = _mediaSettings.framesPerSecond.doubleValue;
FourCharCode preferredSubType =
CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription);
AVCaptureDeviceFormat *bestFormat = _captureDevice.activeFormat;
double bestFrameRate = [self frameRateForFormat:bestFormat closestTo:targetFrameRate];
double minDistance = fabs(bestFrameRate - targetFrameRate);
int bestSubTypeScore = 1;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
CMVideoDimensions res = self.videoDimensionsForFormat(format);
if (res.width != targetRes.width || res.height != targetRes.height) {
continue;
}
double frameRate = [self frameRateForFormat:format closestTo:targetFrameRate];
double distance = fabs(frameRate - targetFrameRate);
FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int subTypeScore = subType == preferredSubType ? 1 : 0;
if (distance < minDistance ||
(distance == minDistance && subTypeScore > bestSubTypeScore)) {
bestFormat = format;
bestFrameRate = frameRate;
minDistance = distance;
bestSubTypeScore = subTypeScore;
}
}
if (![bestFormat isEqual:_captureDevice.activeFormat]) {
_captureDevice.activeFormat = bestFormat;
}
_mediaSettings.framesPerSecond = @(bestFrameRate);

// Set frame rate with 1/10 precision allowing not integral values.
int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0);
Expand All @@ -232,9 +262,10 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
[mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice];
[mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice];

[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
[_mediaSettingsAVWrapper unlockDevice:_captureDevice];
[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
} else {
[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
return nil;
}
} else {
Expand All @@ -250,6 +281,20 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
return self;
}

- (double)frameRateForFormat:(AVCaptureDeviceFormat *)format closestTo:(double)targetFrameRate {
double bestFrameRate = 0;
double minDistance = DBL_MAX;
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate);
double distance = fabs(frameRate - targetFrameRate);
if (distance < minDistance) {
bestFrameRate = frameRate;
minDistance = distance;
}
}
return bestFrameRate;
}

- (AVCaptureConnection *)createConnection:(NSError **)error {
// Setup video capture input.
_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error];
Expand Down Expand Up @@ -543,16 +588,23 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
/// Finds the highest available resolution in terms of pixel count for the given device.
- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice:
(AVCaptureDevice *)captureDevice {
FourCharCode preferredSubType =
CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription);
AVCaptureDeviceFormat *bestFormat = nil;
NSUInteger maxPixelCount = 0;
int bestSubTypeScore = 0;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
CMVideoDimensions res = self.videoDimensionsForFormat(format);
NSUInteger height = res.height;
NSUInteger width = res.width;
NSUInteger pixelCount = height * width;
if (pixelCount > maxPixelCount) {
maxPixelCount = pixelCount;
FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int subTypeScore = subType == preferredSubType ? 1 : 0;
if (pixelCount > maxPixelCount ||
(pixelCount == maxPixelCount && subTypeScore > bestSubTypeScore)) {
bestFormat = format;
maxPixelCount = pixelCount;
bestSubTypeScore = subTypeScore;
}
}
return bestFormat;
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.17+1
version: 0.9.17+2

environment:
sdk: ^3.2.3
Expand Down
5 changes: 5 additions & 0 deletions packages/video_player/video_player_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 2.6.2

* Adds possibility to play videos at more than 30 FPS.
* Fixes playing state not updating in some paths.

## 2.6.1

* Adds files to make include directory permanent.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ @interface StubFVPDisplayLinkFactory : NSObject <FVPDisplayLinkFactory>

/** This display link to return. */
@property(nonatomic, strong) FVPDisplayLink *displayLink;
@property(nonatomic) void (^fireDisplayLink)(void);

- (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink;

Expand All @@ -138,6 +139,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink {
}
- (FVPDisplayLink *)displayLinkWithRegistrar:(id<FlutterPluginRegistrar>)registrar
callback:(void (^)(void))callback {
self.fireDisplayLink = callback;
return self.displayLink;
}

Expand Down Expand Up @@ -243,13 +245,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -294,14 +297,15 @@ - (void)testInitStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -357,13 +361,14 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since the video was playing, the display link should not be paused after getting a buffer.
OCMVerify(never(), [mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -790,6 +795,82 @@ - (void)testPublishesInRegistration {
XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]);
}

- (void)testPlayerShouldNotDropEverySecondFrame {
NSObject<FlutterPluginRegistrar> *registrar =
[GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"];
NSObject<FlutterPluginRegistrar> *partialRegistrar = OCMPartialMock(registrar);
NSObject<FlutterTextureRegistry> *mockTextureRegistry =
OCMProtocolMock(@protocol(FlutterTextureRegistry));
OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry);

FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar
callback:^(){
}];
StubFVPDisplayLinkFactory *stubDisplayLinkFactory =
[[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink];
AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]);
FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc]
initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput]
displayLinkFactory:stubDisplayLinkFactory
registrar:partialRegistrar];

FlutterError *error;
[videoPlayerPlugin initialize:&error];
XCTAssertNil(error);
FVPCreationOptions *create = [FVPCreationOptions
makeWithAsset:nil
uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"
packageName:nil
formatHint:nil
httpHeaders:@{}];
NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error];
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];

__block CMTime currentTime = kCMTimeZero;
OCMStub([mockVideoOutput itemTimeForHostTime:0])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&currentTime];
});
__block NSMutableSet *pixelBuffers = NSMutableSet.new;
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&has];
});
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero
itemTimeForDisplay:[OCMArg anyPointer]])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
CVPixelBufferRef bufferRef = NULL;
if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) {
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
}
[pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&bufferRef];
});
void (^advanceFrame)(void) = ^{
currentTime.value++;
[pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]];
};

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
CFRelease([player copyPixelBuffer]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);
}

#if TARGET_OS_IOS
- (void)validateTransformFixForOrientation:(UIImageOrientation)orientation {
AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation];
Expand Down
Loading

0 comments on commit 6c769f9

Please sign in to comment.