diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 81e43153e7a..992c015df1b 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 2.7.1 + +* Adds possibility to play videos at more than 30 FPS. +* Fixes playing state not updating in some paths. + ## 2.7.0 * Adds support for platform views as an optional way of displaying a video. diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index e3c1d058728..4bb06d7d970 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -131,6 +131,7 @@ @interface StubFVPDisplayLinkFactory : NSObject /** This display link to return. */ @property(nonatomic, strong) FVPDisplayLink *displayLink; +@property(nonatomic, copy) void (^fireDisplayLink)(void); - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink; @@ -144,6 +145,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink { } - (FVPDisplayLink *)displayLinkWithRegistrar:(id)registrar callback:(void (^)(void))callback { + self.fireDisplayLink = callback; return self.displayLink; } @@ -321,13 +323,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -373,15 +376,16 @@ - (void)testInitStartsDisplayLinkTemporarily { OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. FVPTextureBasedVideoPlayer *player = (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier]; - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since a frame was found, and the video is paused, the display link should be paused again. OCMVerify([mockDisplayLink setRunning:NO]); } @@ -433,19 +437,25 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink { FVPTextureBasedVideoPlayer *player = (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier]; - XCTAssertEqual([player position], 1234); + // Wait for the player's position to update, it shouldn't take long. + XCTestExpectation *positionExpectation = + [self expectationForPredicate:[NSPredicate predicateWithFormat:@"position == 1234"] + evaluatedWithObject:player + handler:nil]; + [self waitForExpectations:@[ positionExpectation ] timeout:3.0]; // Simulate a buffer being available. OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) .ignoringNonObjectArgs() .andReturn(YES); - // Any non-zero value is fine here since it won't actually be used, just NULL-checked. - CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1; + CVPixelBufferRef bufferRef; + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL]) .ignoringNonObjectArgs() - .andReturn(fakeBufferRef); + .andReturn(bufferRef); // Simulate a callback from the engine to request a new frame. - [player copyPixelBuffer]; + stubDisplayLinkFactory.fireDisplayLink(); + CFRelease([player copyPixelBuffer]); // Since the video was playing, the display link should not be paused after getting a buffer. OCMVerify(never(), [mockDisplayLink setRunning:NO]); } @@ -994,6 +1004,84 @@ - (void)testUpdatePlayingStateShouldNotResetRate { XCTAssertEqual(player.player.rate, 2); } +- (void)testPlayerShouldNotDropEverySecondFrame { + NSObject *registrar = + [GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"]; + NSObject *partialRegistrar = OCMPartialMock(registrar); + NSObject *mockTextureRegistry = + OCMProtocolMock(@protocol(FlutterTextureRegistry)); + OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry); + + FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar + callback:^(){ + }]; + StubFVPDisplayLinkFactory *stubDisplayLinkFactory = + [[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink]; + AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]); + FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] + initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput] + displayLinkFactory:stubDisplayLinkFactory + registrar:partialRegistrar]; + + FlutterError *error; + [videoPlayerPlugin initialize:&error]; + XCTAssertNil(error); + FVPCreationOptions *create = [FVPCreationOptions + makeWithAsset:nil + uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" + packageName:nil + formatHint:nil + httpHeaders:@{} + viewType:FVPPlatformVideoViewTypeTextureView]; + NSNumber *playerIdentifier = [videoPlayerPlugin createWithOptions:create error:&error]; + FVPTextureBasedVideoPlayer *player = + (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[playerIdentifier]; + + __block CMTime currentTime = kCMTimeZero; + OCMStub([mockVideoOutput itemTimeForHostTime:0]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + [invocation setReturnValue:¤tTime]; + }); + __block NSMutableSet *pixelBuffers = NSMutableSet.new; + OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + CMTime itemTime; + [invocation getArgument:&itemTime atIndex:2]; + BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]; + [invocation setReturnValue:&has]; + }); + OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero + itemTimeForDisplay:[OCMArg anyPointer]]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + CMTime itemTime; + [invocation getArgument:&itemTime atIndex:2]; + CVPixelBufferRef bufferRef = NULL; + if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) { + CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); + } + [pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]]; + [invocation setReturnValue:&bufferRef]; + }); + void (^advanceFrame)(void) = ^{ + currentTime.value++; + [pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]]; + }; + + advanceFrame(); + OCMExpect([mockTextureRegistry textureFrameAvailable:playerIdentifier.intValue]); + stubDisplayLinkFactory.fireDisplayLink(); + OCMVerifyAllWithDelay(mockTextureRegistry, 10); + + advanceFrame(); + OCMExpect([mockTextureRegistry textureFrameAvailable:playerIdentifier.intValue]); + CFRelease([player copyPixelBuffer]); + stubDisplayLinkFactory.fireDisplayLink(); + OCMVerifyAllWithDelay(mockTextureRegistry, 10); +} + #if TARGET_OS_IOS - (void)testVideoPlayerShouldNotOverwritePlayAndRecordNorDefaultToSpeaker { NSObject *registrar = [GetPluginRegistry() diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPFrameUpdater.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPFrameUpdater.m index e73d486da17..0c66f6f639b 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPFrameUpdater.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPFrameUpdater.m @@ -4,28 +4,16 @@ #import "./include/video_player_avfoundation/FVPFrameUpdater.h" -/// FVPFrameUpdater is responsible for notifying the Flutter texture registry -/// when a new video frame is available. -@interface FVPFrameUpdater () -/// The Flutter texture registry used to notify about new frames. -@property(nonatomic, weak, readonly) NSObject *registry; -@end - @implementation FVPFrameUpdater - (FVPFrameUpdater *)initWithRegistry:(NSObject *)registry { NSAssert(self, @"super init cannot be nil"); if (self == nil) return nil; _registry = registry; - _lastKnownAvailableTime = kCMTimeInvalid; return self; } - (void)displayLinkFired { - // Only report a new frame if one is actually available. - CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()]; - if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { - _lastKnownAvailableTime = outputItemTime; - [_registry textureFrameAvailable:_textureIdentifier]; - } + self.frameDuration = _displayLink.duration; + [_registry textureFrameAvailable:_textureIdentifier]; } @end diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPTextureBasedVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPTextureBasedVideoPlayer.m index 509d0b5e55a..68f8a4673c6 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPTextureBasedVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPTextureBasedVideoPlayer.m @@ -12,6 +12,20 @@ @interface FVPTextureBasedVideoPlayer () @property(nonatomic) FVPFrameUpdater *frameUpdater; // The display link that drives frameUpdater. @property(nonatomic) FVPDisplayLink *displayLink; +// The latest buffer obtained from video output. This is stored so that it can be returned from +// copyPixelBuffer again if nothing new is available, since the engine has undefined behavior when +// returning NULL. +@property(nonatomic) CVPixelBufferRef latestPixelBuffer; +// The time that represents when the next frame displays. +@property(nonatomic) CFTimeInterval targetTime; +// Whether to enqueue textureFrameAvailable from copyPixelBuffer. +@property(nonatomic) BOOL selfRefresh; +// The time that represents the start of average frame duration measurement. +@property(nonatomic) CFTimeInterval startTime; +// The number of frames since the start of average frame duration measurement. +@property(nonatomic) int framesCount; +// The latest frame duration since there was significant change. +@property(nonatomic) CFTimeInterval latestDuration; // Whether a new frame needs to be provided to the engine regardless of the current play/pause state // (e.g., after a seek while paused). If YES, the display link should continue to run until the next // frame is successfully provided. @@ -67,7 +81,8 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item if (self) { _frameUpdater = frameUpdater; _displayLink = displayLink; - _frameUpdater.videoOutput = self.videoOutput; + _frameUpdater.displayLink = _displayLink; + _selfRefresh = true; _onDisposed = [onDisposed copy]; // This is to fix 2 bugs: 1. blank video for encrypted video streams on iOS 16 @@ -81,6 +96,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item return self; } +- (void)dealloc { + CVBufferRelease(_latestPixelBuffer); +} + - (void)setTextureIdentifier:(int64_t)textureIdentifier { self.frameUpdater.textureIdentifier = textureIdentifier; } @@ -161,17 +180,32 @@ - (void)dispose { #pragma mark - FlutterTexture - (CVPixelBufferRef)copyPixelBuffer { + // If the difference between target time and current time is longer than this fraction of frame + // duration then reset target time. + const float resetThreshold = 0.5; + + // Ensure video sampling at regular intervals. This function is not called at exact time intervals + // so CACurrentMediaTime returns irregular timestamps which causes missed video frames. The range + // outside of which targetTime is reset should be narrow enough to make possible lag as small as + // possible and at the same time wide enough to avoid too frequent resets which would lead to + // irregular sampling. + // TODO: Ideally there would be a targetTimestamp of display link used by the flutter engine. + // https://github.com/flutter/flutter/issues/159087 + CFTimeInterval currentTime = CACurrentMediaTime(); + CFTimeInterval duration = self.frameUpdater.frameDuration; + if (fabs(self.targetTime - currentTime) > duration * resetThreshold) { + self.targetTime = currentTime; + } + self.targetTime += duration; + CVPixelBufferRef buffer = NULL; - CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:CACurrentMediaTime()]; + CMTime outputItemTime = [self.videoOutput itemTimeForHostTime:self.targetTime]; if ([self.videoOutput hasNewPixelBufferForItemTime:outputItemTime]) { buffer = [self.videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; - } else { - // If the current time isn't available yet, use the time that was checked when informing the - // engine that a frame was available (if any). - CMTime lastAvailableTime = self.frameUpdater.lastKnownAvailableTime; - if (CMTIME_IS_VALID(lastAvailableTime)) { - buffer = [self.videoOutput copyPixelBufferForItemTime:lastAvailableTime - itemTimeForDisplay:NULL]; + if (buffer) { + // Balance the owned reference from copyPixelBufferForItemTime. + CVBufferRelease(self.latestPixelBuffer); + self.latestPixelBuffer = buffer; } } @@ -184,7 +218,48 @@ - (CVPixelBufferRef)copyPixelBuffer { } } - return buffer; + // Calling textureFrameAvailable only from within displayLinkFired would require a non-trivial + // solution to minimize missed video frames due to race between displayLinkFired, copyPixelBuffer + // and place where is _textureFrameAvailable reset to false in the flutter engine. + // TODO: Ideally FlutterTexture would support mode of operation where the copyPixelBuffer is + // called always or some other alternative, instead of on demand by calling textureFrameAvailable. + // https://github.com/flutter/flutter/issues/159162 + if (self.displayLink.running && self.selfRefresh) { + // The number of frames over which to measure average frame duration. + const int windowSize = 10; + // If measured average frame duration is shorter than this fraction of frame duration obtained + // from display link then rely solely on refreshes from display link. + const float durationThreshold = 0.5; + // If duration changes by this fraction or more then reset average frame duration measurement. + const float resetFraction = 0.01; + + if (fabs(duration - self.latestDuration) >= self.latestDuration * resetFraction) { + self.startTime = currentTime; + self.framesCount = 0; + self.latestDuration = duration; + } + if (self.framesCount == windowSize) { + CFTimeInterval averageDuration = (currentTime - self.startTime) / windowSize; + if (averageDuration < duration * durationThreshold) { + NSLog(@"Warning: measured average duration between frames is unexpectedly short (%f/%f), " + @"please report this to " + @"https://github.com/flutter/flutter/issues.", + averageDuration, duration); + self.selfRefresh = false; + } + self.startTime = currentTime; + self.framesCount = 0; + } + self.framesCount++; + + dispatch_async(dispatch_get_main_queue(), ^{ + [self.frameUpdater.registry textureFrameAvailable:self.frameUpdater.textureIdentifier]; + }); + } + + // Add a retain for the engine, since the copyPixelBufferForItemTime has already been accounted + // for, and the engine expects an owning reference. + return CVBufferRetain(self.latestPixelBuffer); } - (void)onTextureUnregistered:(NSObject *)texture { diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m index d1d096e7b28..04d6d21cb2e 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m @@ -60,6 +60,10 @@ - (instancetype)initWithPlayerItem:(AVPlayerItem *)item error:nil] == AVKeyValueStatusLoaded) { // Rotate the video by using a videoComposition and the preferredTransform self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack); + // Do not use video composition when it is not needed. + if (CGAffineTransformIsIdentity(self->_preferredTransform)) { + return; + } // Note: // https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition // Video composition can only be used with file-based media and is not supported for @@ -207,9 +211,14 @@ - (AVMutableVideoComposition *)getVideoCompositionWithTransform:(CGAffineTransfo } videoComposition.renderSize = CGSizeMake(width, height); - // TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ? - // Currently set at a constant 30 FPS - videoComposition.frameDuration = CMTimeMake(1, 30); + videoComposition.sourceTrackIDForFrameTiming = videoTrack.trackID; + if (CMTIME_IS_VALID(videoTrack.minFrameDuration)) { + videoComposition.frameDuration = videoTrack.minFrameDuration; + } else { + NSLog(@"Warning: videoTrack.minFrameDuration for input video is invalid, please report this to " + @"https://github.com/flutter/flutter/issues with input video attached."); + videoComposition.frameDuration = CMTimeMake(1, 30); + } return videoComposition; } @@ -239,7 +248,6 @@ - (void)observeValueForKeyPath:(NSString *)path case AVPlayerItemStatusReadyToPlay: [item addOutput:_videoOutput]; [self setupEventSinkIfReadyToPlay]; - [self updatePlayingState]; break; } } else if (context == presentationSizeContext || context == durationContext) { @@ -249,7 +257,6 @@ - (void)observeValueForKeyPath:(NSString *)path // its presentation size or duration. When these properties are finally set, re-check if // all required properties and instantiate the event sink if it is not already set up. [self setupEventSinkIfReadyToPlay]; - [self updatePlayingState]; } } else if (context == playbackLikelyToKeepUpContext) { [self updatePlayingState]; @@ -387,6 +394,8 @@ - (void)setupEventSinkIfReadyToPlay { } _isInitialized = YES; + [self updatePlayingState]; + _eventSink(@{ @"event" : @"initialized", @"duration" : @(duration), diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPDisplayLink.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPDisplayLink.h index 80d400629b2..54e338993c4 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPDisplayLink.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPDisplayLink.h @@ -18,6 +18,9 @@ /// Defaults to NO. @property(nonatomic, assign) BOOL running; +/// The time interval between screen refresh updates. +@property(nonatomic, readonly) CFTimeInterval duration; + /// Initializes a display link that calls the given callback when fired. /// /// The display link starts paused, so must be started, by setting 'running' to YES, before the diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPFrameUpdater.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPFrameUpdater.h index 9d5466d8757..274b666f287 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPFrameUpdater.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPFrameUpdater.h @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#import +#import "FVPDisplayLink.h" #if TARGET_OS_OSX #import @@ -17,10 +17,13 @@ NS_ASSUME_NONNULL_BEGIN @interface FVPFrameUpdater : NSObject /// The texture identifier associated with the video output. @property(nonatomic) int64_t textureIdentifier; -/// The output that this updater is managing. -@property(nonatomic, weak) AVPlayerItemVideoOutput *videoOutput; -/// The last time that has been validated as avaliable according to hasNewPixelBufferForItemTime:. -@property(readonly, nonatomic, assign) CMTime lastKnownAvailableTime; +/// The Flutter texture registry used to notify about new frames. +@property(nonatomic, weak, readonly) NSObject *registry; +/// The display link that drives frameUpdater. +@property(nonatomic) FVPDisplayLink *displayLink; +/// The time interval between screen refresh updates. Display link duration is in an undefined state +/// until displayLinkFired is called at least once so it should not be used directly. +@property(atomic) CFTimeInterval frameDuration; /// Initializes a new instance of FVPFrameUpdater with the given Flutter texture registry. - (FVPFrameUpdater *)initWithRegistry:(NSObject *)registry; diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_ios/FVPDisplayLink.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_ios/FVPDisplayLink.m index 9bdb321ae16..5ea6ea0135d 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_ios/FVPDisplayLink.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_ios/FVPDisplayLink.m @@ -67,4 +67,8 @@ - (void)setRunning:(BOOL)running { self.displayLink.paused = !running; } +- (CFTimeInterval)duration { + return self.displayLink.duration; +} + @end diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_macos/FVPDisplayLink.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_macos/FVPDisplayLink.m index cd5670fa5a3..cb9682e6011 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_macos/FVPDisplayLink.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation_macos/FVPDisplayLink.m @@ -81,4 +81,12 @@ - (void)setRunning:(BOOL)running { } } +- (CFTimeInterval)duration { + CVTimeStamp timestamp = {.version = 0}; + if (CVDisplayLinkGetCurrentTime(self.displayLink, ×tamp) != kCVReturnSuccess) { + return 0; + } + return (CFTimeInterval)timestamp.videoRefreshPeriod / timestamp.videoTimeScale; +} + @end diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index 3db089c7ccf..20087db42cc 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: video_player_avfoundation description: iOS and macOS implementation of the video_player plugin. repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 -version: 2.7.0 +version: 2.7.1 environment: sdk: ^3.4.0