623 lines
24 KiB
Objective-C
623 lines
24 KiB
Objective-C
// Copyright 2013 The Flutter Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
// found in the LICENSE file.
|
|
|
|
#import "CachedVideoPlayerPlugin.h"
|
|
#import <AVFoundation/AVFoundation.h>
|
|
#import <GLKit/GLKit.h>
|
|
#import "messages.h"
|
|
#import <KTVHTTPCache/KTVHTTPCache.h>
|
|
|
|
#if !__has_feature(objc_arc)
|
|
#error Code Requires ARC.
|
|
#endif
|
|
|
|
int64_t CachedCMTimeToMillis(CMTime time) {
|
|
if (time.timescale == 0) return 0;
|
|
return time.value * 1000 / time.timescale;
|
|
}
|
|
|
|
@interface CachedFrameUpdater : NSObject
|
|
@property(nonatomic) int64_t textureId;
|
|
@property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry>* registry;
|
|
- (void)onDisplayLink:(CADisplayLink*)link;
|
|
@end
|
|
|
|
@implementation CachedFrameUpdater
|
|
- (CachedFrameUpdater*)initWithRegistry:(NSObject<FlutterTextureRegistry>*)registry {
|
|
NSAssert(self, @"super init cannot be nil");
|
|
if (self == nil) return nil;
|
|
_registry = registry;
|
|
return self;
|
|
}
|
|
|
|
- (void)onDisplayLink:(CADisplayLink*)link {
|
|
[_registry textureFrameAvailable:_textureId];
|
|
}
|
|
@end
|
|
|
|
@interface CachedVideoPlayer : NSObject <FlutterTexture, FlutterStreamHandler>
|
|
@property(readonly, nonatomic) AVPlayer* player;
|
|
@property(readonly, nonatomic) AVPlayerItemVideoOutput* videoOutput;
|
|
@property(readonly, nonatomic) CADisplayLink* displayLink;
|
|
@property(nonatomic) FlutterEventChannel* eventChannel;
|
|
@property(nonatomic) FlutterEventSink eventSink;
|
|
@property(nonatomic) CGAffineTransform preferredTransform;
|
|
@property(nonatomic, readonly) bool disposed;
|
|
@property(nonatomic, readonly) bool isPlaying;
|
|
@property(nonatomic) bool isLooping;
|
|
@property(nonatomic, readonly) bool isInitialized;
|
|
- (instancetype)initWithURL:(NSURL*)url
|
|
frameUpdater:(CachedFrameUpdater*)frameUpdater
|
|
httpHeaders:(NSDictionary<NSString*, NSString*>*)headers;
|
|
- (void)play;
|
|
- (void)pause;
|
|
- (void)setIsLooping:(bool)isLooping;
|
|
- (void)updatePlayingState;
|
|
@end
|
|
|
|
static void* timeRangeContext = &timeRangeContext;
|
|
static void* statusContext = &statusContext;
|
|
static void* playbackLikelyToKeepUpContext = &playbackLikelyToKeepUpContext;
|
|
static void* playbackBufferEmptyContext = &playbackBufferEmptyContext;
|
|
static void* playbackBufferFullContext = &playbackBufferFullContext;
|
|
|
|
@implementation CachedVideoPlayer
|
|
- (instancetype)initWithAsset:(NSString*)asset frameUpdater:(CachedFrameUpdater*)frameUpdater {
|
|
NSString* path = [[NSBundle mainBundle] pathForResource:asset ofType:nil];
|
|
return [self initWithURL:[NSURL fileURLWithPath:path] frameUpdater:frameUpdater httpHeaders:nil];
|
|
}
|
|
|
|
- (void)addObservers:(AVPlayerItem*)item {
|
|
[item addObserver:self
|
|
forKeyPath:@"loadedTimeRanges"
|
|
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
|
|
context:timeRangeContext];
|
|
[item addObserver:self
|
|
forKeyPath:@"status"
|
|
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
|
|
context:statusContext];
|
|
[item addObserver:self
|
|
forKeyPath:@"playbackLikelyToKeepUp"
|
|
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
|
|
context:playbackLikelyToKeepUpContext];
|
|
[item addObserver:self
|
|
forKeyPath:@"playbackBufferEmpty"
|
|
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
|
|
context:playbackBufferEmptyContext];
|
|
[item addObserver:self
|
|
forKeyPath:@"playbackBufferFull"
|
|
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
|
|
context:playbackBufferFullContext];
|
|
|
|
// Add an observer that will respond to itemDidPlayToEndTime
|
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
|
selector:@selector(itemDidPlayToEndTime:)
|
|
name:AVPlayerItemDidPlayToEndTimeNotification
|
|
object:item];
|
|
}
|
|
|
|
- (void)itemDidPlayToEndTime:(NSNotification*)notification {
|
|
if (_isLooping) {
|
|
AVPlayerItem* p = [notification object];
|
|
[p seekToTime:kCMTimeZero completionHandler:nil];
|
|
} else {
|
|
if (_eventSink) {
|
|
_eventSink(@{@"event" : @"completed"});
|
|
}
|
|
}
|
|
}
|
|
|
|
static inline CGFloat radiansToDegrees(CGFloat radians) {
|
|
// Input range [-pi, pi] or [-180, 180]
|
|
CGFloat degrees = GLKMathRadiansToDegrees((float)radians);
|
|
if (degrees < 0) {
|
|
// Convert -90 to 270 and -180 to 180
|
|
return degrees + 360;
|
|
}
|
|
// Output degrees in between [0, 360[
|
|
return degrees;
|
|
};
|
|
|
|
- (AVMutableVideoComposition*)getVideoCompositionWithTransform:(CGAffineTransform)transform
|
|
withAsset:(AVAsset*)asset
|
|
withVideoTrack:(AVAssetTrack*)videoTrack {
|
|
AVMutableVideoCompositionInstruction* instruction =
|
|
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
|
|
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration]);
|
|
AVMutableVideoCompositionLayerInstruction* layerInstruction =
|
|
[AVMutableVideoCompositionLayerInstruction
|
|
videoCompositionLayerInstructionWithAssetTrack:videoTrack];
|
|
[layerInstruction setTransform:_preferredTransform atTime:kCMTimeZero];
|
|
|
|
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
|
|
instruction.layerInstructions = @[ layerInstruction ];
|
|
videoComposition.instructions = @[ instruction ];
|
|
|
|
// If in portrait mode, switch the width and height of the video
|
|
CGFloat width = videoTrack.naturalSize.width;
|
|
CGFloat height = videoTrack.naturalSize.height;
|
|
NSInteger rotationDegrees =
|
|
(NSInteger)round(radiansToDegrees(atan2(_preferredTransform.b, _preferredTransform.a)));
|
|
if (rotationDegrees == 90 || rotationDegrees == 270) {
|
|
width = videoTrack.naturalSize.height;
|
|
height = videoTrack.naturalSize.width;
|
|
}
|
|
videoComposition.renderSize = CGSizeMake(width, height);
|
|
|
|
// TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ?
|
|
// Currently set at a constant 30 FPS
|
|
videoComposition.frameDuration = CMTimeMake(1, 30);
|
|
|
|
return videoComposition;
|
|
}
|
|
|
|
- (void)createVideoOutputAndDisplayLink:(CachedFrameUpdater*)frameUpdater {
|
|
NSDictionary* pixBuffAttributes = @{
|
|
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
|
|
(id)kCVPixelBufferIOSurfacePropertiesKey : @{}
|
|
};
|
|
_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
|
|
|
|
_displayLink = [CADisplayLink displayLinkWithTarget:frameUpdater
|
|
selector:@selector(onDisplayLink:)];
|
|
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
|
|
_displayLink.paused = YES;
|
|
}
|
|
|
|
- (instancetype)initWithURL:(NSURL*)url
|
|
frameUpdater:(CachedFrameUpdater*)frameUpdater
|
|
httpHeaders:(NSDictionary<NSString*, NSString*>*)headers {
|
|
NSDictionary<NSString*, id>* options = nil;
|
|
if (headers != nil && [headers count] != 0) {
|
|
options = @{@"AVURLAssetHTTPHeaderFieldsKey" : headers};
|
|
}
|
|
AVURLAsset* urlAsset = [AVURLAsset URLAssetWithURL:url options:options];
|
|
AVPlayerItem* item = [AVPlayerItem playerItemWithAsset:urlAsset];
|
|
return [self initWithPlayerItem:item frameUpdater:frameUpdater];
|
|
}
|
|
|
|
- (CGAffineTransform)fixTransform:(AVAssetTrack*)videoTrack {
|
|
CGAffineTransform transform = videoTrack.preferredTransform;
|
|
// TODO(@recastrodiaz): why do we need to do this? Why is the preferredTransform incorrect?
|
|
// At least 2 user videos show a black screen when in portrait mode if we directly use the
|
|
// videoTrack.preferredTransform Setting tx to the height of the video instead of 0, properly
|
|
// displays the video https://github.com/flutter/flutter/issues/17606#issuecomment-413473181
|
|
if (transform.tx == 0 && transform.ty == 0) {
|
|
NSInteger rotationDegrees = (NSInteger)round(radiansToDegrees(atan2(transform.b, transform.a)));
|
|
NSLog(@"TX and TY are 0. Rotation: %ld. Natural width,height: %f, %f", (long)rotationDegrees,
|
|
videoTrack.naturalSize.width, videoTrack.naturalSize.height);
|
|
if (rotationDegrees == 90) {
|
|
NSLog(@"Setting transform tx");
|
|
transform.tx = videoTrack.naturalSize.height;
|
|
transform.ty = 0;
|
|
} else if (rotationDegrees == 270) {
|
|
NSLog(@"Setting transform ty");
|
|
transform.tx = 0;
|
|
transform.ty = videoTrack.naturalSize.width;
|
|
}
|
|
}
|
|
return transform;
|
|
}
|
|
|
|
- (instancetype)initWithPlayerItem:(AVPlayerItem*)item frameUpdater:(CachedFrameUpdater*)frameUpdater {
|
|
self = [super init];
|
|
NSAssert(self, @"super init cannot be nil");
|
|
_isInitialized = false;
|
|
_isPlaying = false;
|
|
_disposed = false;
|
|
|
|
AVAsset* asset = [item asset];
|
|
void (^assetCompletionHandler)(void) = ^{
|
|
if ([asset statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
|
|
NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
|
|
if ([tracks count] > 0) {
|
|
AVAssetTrack* videoTrack = tracks[0];
|
|
void (^trackCompletionHandler)(void) = ^{
|
|
if (self->_disposed) return;
|
|
if ([videoTrack statusOfValueForKey:@"preferredTransform"
|
|
error:nil] == AVKeyValueStatusLoaded) {
|
|
// Rotate the video by using a videoComposition and the preferredTransform
|
|
self->_preferredTransform = [self fixTransform:videoTrack];
|
|
// Note:
|
|
// https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition
|
|
// Video composition can only be used with file-based media and is not supported for
|
|
// use with media served using HTTP Live Streaming.
|
|
AVMutableVideoComposition* videoComposition =
|
|
[self getVideoCompositionWithTransform:self->_preferredTransform
|
|
withAsset:asset
|
|
withVideoTrack:videoTrack];
|
|
item.videoComposition = videoComposition;
|
|
}
|
|
};
|
|
[videoTrack loadValuesAsynchronouslyForKeys:@[ @"preferredTransform" ]
|
|
completionHandler:trackCompletionHandler];
|
|
}
|
|
}
|
|
};
|
|
|
|
_player = [AVPlayer playerWithPlayerItem:item];
|
|
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
|
|
|
[self createVideoOutputAndDisplayLink:frameUpdater];
|
|
|
|
[self addObservers:item];
|
|
|
|
[asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler];
|
|
|
|
return self;
|
|
}
|
|
|
|
- (void)observeValueForKeyPath:(NSString*)path
|
|
ofObject:(id)object
|
|
change:(NSDictionary*)change
|
|
context:(void*)context {
|
|
if (context == timeRangeContext) {
|
|
if (_eventSink != nil) {
|
|
NSMutableArray<NSArray<NSNumber*>*>* values = [[NSMutableArray alloc] init];
|
|
for (NSValue* rangeValue in [object loadedTimeRanges]) {
|
|
CMTimeRange range = [rangeValue CMTimeRangeValue];
|
|
int64_t start = CachedCMTimeToMillis(range.start);
|
|
[values addObject:@[ @(start), @(start + CachedCMTimeToMillis(range.duration)) ]];
|
|
}
|
|
_eventSink(@{@"event" : @"bufferingUpdate", @"values" : values});
|
|
}
|
|
} else if (context == statusContext) {
|
|
AVPlayerItem* item = (AVPlayerItem*)object;
|
|
switch (item.status) {
|
|
case AVPlayerItemStatusFailed:
|
|
if (_eventSink != nil) {
|
|
_eventSink([FlutterError
|
|
errorWithCode:@"VideoError"
|
|
message:[@"Failed to load video: "
|
|
stringByAppendingString:[item.error localizedDescription]]
|
|
details:nil]);
|
|
}
|
|
break;
|
|
case AVPlayerItemStatusUnknown:
|
|
break;
|
|
case AVPlayerItemStatusReadyToPlay:
|
|
[item addOutput:_videoOutput];
|
|
[self sendInitialized];
|
|
[self updatePlayingState];
|
|
break;
|
|
}
|
|
} else if (context == playbackLikelyToKeepUpContext) {
|
|
if ([[_player currentItem] isPlaybackLikelyToKeepUp]) {
|
|
[self updatePlayingState];
|
|
if (_eventSink != nil) {
|
|
_eventSink(@{@"event" : @"bufferingEnd"});
|
|
}
|
|
}
|
|
} else if (context == playbackBufferEmptyContext) {
|
|
if (_eventSink != nil) {
|
|
_eventSink(@{@"event" : @"bufferingStart"});
|
|
}
|
|
} else if (context == playbackBufferFullContext) {
|
|
if (_eventSink != nil) {
|
|
_eventSink(@{@"event" : @"bufferingEnd"});
|
|
}
|
|
}
|
|
}
|
|
|
|
- (void)updatePlayingState {
|
|
if (!_isInitialized) {
|
|
return;
|
|
}
|
|
if (_isPlaying) {
|
|
[_player play];
|
|
} else {
|
|
[_player pause];
|
|
}
|
|
_displayLink.paused = !_isPlaying;
|
|
}
|
|
|
|
- (void)sendInitialized {
|
|
if (_eventSink && !_isInitialized) {
|
|
CGSize size = [self.player currentItem].presentationSize;
|
|
CGFloat width = size.width;
|
|
CGFloat height = size.height;
|
|
|
|
// The player has not yet initialized.
|
|
if (height == CGSizeZero.height && width == CGSizeZero.width) {
|
|
return;
|
|
}
|
|
// The player may be initialized but still needs to determine the duration.
|
|
if ([self duration] == 0) {
|
|
return;
|
|
}
|
|
|
|
_isInitialized = true;
|
|
_eventSink(@{
|
|
@"event" : @"initialized",
|
|
@"duration" : @([self duration]),
|
|
@"width" : @(width),
|
|
@"height" : @(height)
|
|
});
|
|
}
|
|
}
|
|
|
|
- (void)play {
|
|
_isPlaying = true;
|
|
[self updatePlayingState];
|
|
}
|
|
|
|
- (void)pause {
|
|
_isPlaying = false;
|
|
[self updatePlayingState];
|
|
}
|
|
|
|
- (int64_t)position {
|
|
return CachedCMTimeToMillis([_player currentTime]);
|
|
}
|
|
|
|
- (int64_t)duration {
|
|
return CachedCMTimeToMillis([[_player currentItem] duration]);
|
|
}
|
|
|
|
- (void)seekTo:(int)location {
|
|
[_player seekToTime:CMTimeMake(location, 1000)
|
|
toleranceBefore:kCMTimeZero
|
|
toleranceAfter:kCMTimeZero];
|
|
}
|
|
|
|
- (void)setIsLooping:(bool)isLooping {
|
|
_isLooping = isLooping;
|
|
}
|
|
|
|
- (void)setVolume:(double)volume {
|
|
_player.volume = (float)((volume < 0.0) ? 0.0 : ((volume > 1.0) ? 1.0 : volume));
|
|
}
|
|
|
|
- (void)setPlaybackSpeed:(double)speed {
|
|
// See https://developer.apple.com/library/archive/qa/qa1772/_index.html for an explanation of
|
|
// these checks.
|
|
if (speed > 2.0 && !_player.currentItem.canPlayFastForward) {
|
|
if (_eventSink != nil) {
|
|
_eventSink([FlutterError errorWithCode:@"VideoError"
|
|
message:@"Video cannot be fast-forwarded beyond 2.0x"
|
|
details:nil]);
|
|
}
|
|
return;
|
|
}
|
|
|
|
if (speed < 1.0 && !_player.currentItem.canPlaySlowForward) {
|
|
if (_eventSink != nil) {
|
|
_eventSink([FlutterError errorWithCode:@"VideoError"
|
|
message:@"Video cannot be slow-forwarded"
|
|
details:nil]);
|
|
}
|
|
return;
|
|
}
|
|
|
|
_player.rate = speed;
|
|
}
|
|
|
|
- (CVPixelBufferRef)copyPixelBuffer {
|
|
CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()];
|
|
if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
|
|
return [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
|
|
} else {
|
|
return NULL;
|
|
}
|
|
}
|
|
|
|
- (void)onTextureUnregistered:(NSObject<FlutterTexture>*)texture {
|
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
[self dispose];
|
|
});
|
|
}
|
|
|
|
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments {
|
|
_eventSink = nil;
|
|
return nil;
|
|
}
|
|
|
|
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
|
|
eventSink:(nonnull FlutterEventSink)events {
|
|
_eventSink = events;
|
|
// TODO(@recastrodiaz): remove the line below when the race condition is resolved:
|
|
// https://github.com/flutter/flutter/issues/21483
|
|
// This line ensures the 'initialized' event is sent when the event
|
|
// 'AVPlayerItemStatusReadyToPlay' fires before _eventSink is set (this function
|
|
// onListenWithArguments is called)
|
|
[self sendInitialized];
|
|
return nil;
|
|
}
|
|
|
|
/// This method allows you to dispose without touching the event channel. This
|
|
/// is useful for the case where the Engine is in the process of deconstruction
|
|
/// so the channel is going to die or is already dead.
|
|
- (void)disposeSansEventChannel {
|
|
_disposed = true;
|
|
[_displayLink invalidate];
|
|
[[_player currentItem] removeObserver:self forKeyPath:@"status" context:statusContext];
|
|
[[_player currentItem] removeObserver:self
|
|
forKeyPath:@"loadedTimeRanges"
|
|
context:timeRangeContext];
|
|
[[_player currentItem] removeObserver:self
|
|
forKeyPath:@"playbackLikelyToKeepUp"
|
|
context:playbackLikelyToKeepUpContext];
|
|
[[_player currentItem] removeObserver:self
|
|
forKeyPath:@"playbackBufferEmpty"
|
|
context:playbackBufferEmptyContext];
|
|
[[_player currentItem] removeObserver:self
|
|
forKeyPath:@"playbackBufferFull"
|
|
context:playbackBufferFullContext];
|
|
[_player replaceCurrentItemWithPlayerItem:nil];
|
|
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
|
}
|
|
|
|
- (void)dispose {
|
|
[self disposeSansEventChannel];
|
|
[_eventChannel setStreamHandler:nil];
|
|
}
|
|
|
|
@end
|
|
|
|
@interface CachedVideoPlayerPlugin () <CachedVideoPlayerApi>
|
|
@property(readonly, weak, nonatomic) NSObject<FlutterTextureRegistry>* registry;
|
|
@property(readonly, weak, nonatomic) NSObject<FlutterBinaryMessenger>* messenger;
|
|
@property(readonly, strong, nonatomic) NSMutableDictionary* players;
|
|
@property(readonly, strong, nonatomic) NSObject<FlutterPluginRegistrar>* registrar;
|
|
@end
|
|
|
|
@implementation CachedVideoPlayerPlugin
|
|
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
|
|
CachedVideoPlayerPlugin* instance = [[CachedVideoPlayerPlugin alloc] initWithRegistrar:registrar];
|
|
[registrar publish:instance];
|
|
CachedVideoPlayerApiSetup(registrar.messenger, instance);
|
|
}
|
|
|
|
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
|
|
self = [super init];
|
|
NSAssert(self, @"super init cannot be nil");
|
|
[KTVHTTPCache proxyStart:nil];
|
|
_registry = [registrar textures];
|
|
_messenger = [registrar messenger];
|
|
_registrar = registrar;
|
|
_players = [NSMutableDictionary dictionaryWithCapacity:1];
|
|
return self;
|
|
}
|
|
|
|
- (void)detachFromEngineForRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
|
|
for (NSNumber* textureId in _players.allKeys) {
|
|
CachedVideoPlayer* player = _players[textureId];
|
|
[player disposeSansEventChannel];
|
|
}
|
|
[_players removeAllObjects];
|
|
// TODO(57151): This should be commented out when 57151's fix lands on stable.
|
|
// This is the correct behavior we never did it in the past and the engine
|
|
// doesn't currently support it.
|
|
// CachedVideoPlayerApiSetup(registrar.messenger, nil);
|
|
}
|
|
|
|
- (CachedTextureMessage*)onPlayerSetup:(CachedVideoPlayer*)player
|
|
frameUpdater:(CachedFrameUpdater*)frameUpdater {
|
|
int64_t textureId = [_registry registerTexture:player];
|
|
frameUpdater.textureId = textureId;
|
|
FlutterEventChannel* eventChannel = [FlutterEventChannel
|
|
eventChannelWithName:[NSString stringWithFormat:@"flutter.io/videoPlayer/videoEvents%lld",
|
|
textureId]
|
|
binaryMessenger:_messenger];
|
|
[eventChannel setStreamHandler:player];
|
|
player.eventChannel = eventChannel;
|
|
_players[@(textureId)] = player;
|
|
CachedTextureMessage* result = [[CachedTextureMessage alloc] init];
|
|
result.textureId = @(textureId);
|
|
return result;
|
|
}
|
|
|
|
- (void)initialize:(FlutterError* __autoreleasing*)error {
|
|
// Allow audio playback when the Ring/Silent switch is set to silent
|
|
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
|
|
|
|
for (NSNumber* textureId in _players) {
|
|
[_registry unregisterTexture:[textureId unsignedIntegerValue]];
|
|
[_players[textureId] dispose];
|
|
}
|
|
[_players removeAllObjects];
|
|
}
|
|
|
|
- (CachedTextureMessage*)create:(CachedCreateMessage*)input error:(FlutterError**)error {
|
|
CachedFrameUpdater* frameUpdater = [[CachedFrameUpdater alloc] initWithRegistry:_registry];
|
|
CachedVideoPlayer* player;
|
|
if (input.asset) {
|
|
NSString* assetPath;
|
|
if (input.packageName) {
|
|
assetPath = [_registrar lookupKeyForAsset:input.asset fromPackage:input.packageName];
|
|
} else {
|
|
assetPath = [_registrar lookupKeyForAsset:input.asset];
|
|
}
|
|
player = [[CachedVideoPlayer alloc] initWithAsset:assetPath frameUpdater:frameUpdater];
|
|
return [self onPlayerSetup:player frameUpdater:frameUpdater];
|
|
} else if (input.uri) {
|
|
// player = [[CachedVideoPlayer alloc] initWithURL:[NSURL URLWithString:input.uri]
|
|
NSURL *usedURL = [NSURL URLWithString:input.uri];
|
|
if(![input.formatHint isEqual: @"hls"]) {
|
|
usedURL = [KTVHTTPCache proxyURLWithOriginalURL:usedURL];
|
|
[KTVHTTPCache downloadSetAdditionalHeaders:input.httpHeaders];
|
|
}
|
|
|
|
player = [[CachedVideoPlayer alloc] initWithURL:usedURL
|
|
frameUpdater:frameUpdater
|
|
httpHeaders:input.httpHeaders];
|
|
return [self onPlayerSetup:player frameUpdater:frameUpdater];
|
|
} else {
|
|
*error = [FlutterError errorWithCode:@"video_player" message:@"not implemented" details:nil];
|
|
return nil;
|
|
}
|
|
}
|
|
|
|
- (void)dispose:(CachedTextureMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[_registry unregisterTexture:input.textureId.intValue];
|
|
[_players removeObjectForKey:input.textureId];
|
|
// If the Flutter contains https://github.com/flutter/engine/pull/12695,
|
|
// the `player` is disposed via `onTextureUnregistered` at the right time.
|
|
// Without https://github.com/flutter/engine/pull/12695, there is no guarantee that the
|
|
// texture has completed the un-reregistration. It may leads a crash if we dispose the
|
|
// `player` before the texture is unregistered. We add a dispatch_after hack to make sure the
|
|
// texture is unregistered before we dispose the `player`.
|
|
//
|
|
// TODO(cyanglaz): Remove this dispatch block when
|
|
// https://github.com/flutter/flutter/commit/8159a9906095efc9af8b223f5e232cb63542ad0b is in
|
|
// stable And update the min flutter version of the plugin to the stable version.
|
|
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)),
|
|
dispatch_get_main_queue(), ^{
|
|
if (!player.disposed) {
|
|
[player dispose];
|
|
}
|
|
});
|
|
}
|
|
|
|
- (void)setLooping:(CachedLoopingMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player setIsLooping:[input.isLooping boolValue]];
|
|
}
|
|
|
|
- (void)setVolume:(CachedVolumeMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player setVolume:[input.volume doubleValue]];
|
|
}
|
|
|
|
- (void)setPlaybackSpeed:(CachedPlaybackSpeedMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player setPlaybackSpeed:[input.speed doubleValue]];
|
|
}
|
|
|
|
- (void)play:(CachedTextureMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player play];
|
|
}
|
|
|
|
- (CachedPositionMessage*)position:(CachedTextureMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
CachedPositionMessage* result = [[CachedPositionMessage alloc] init];
|
|
result.position = @([player position]);
|
|
return result;
|
|
}
|
|
|
|
- (void)seekTo:(CachedPositionMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player seekTo:[input.position intValue]];
|
|
}
|
|
|
|
- (void)pause:(CachedTextureMessage*)input error:(FlutterError**)error {
|
|
CachedVideoPlayer* player = _players[input.textureId];
|
|
[player pause];
|
|
}
|
|
|
|
- (void)setMixWithOthers:(CachedMixWithOthersMessage*)input
|
|
error:(FlutterError* _Nullable __autoreleasing*)error {
|
|
if ([input.mixWithOthers boolValue]) {
|
|
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback
|
|
withOptions:AVAudioSessionCategoryOptionMixWithOthers
|
|
error:nil];
|
|
} else {
|
|
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
|
|
}
|
|
}
|
|
|
|
@end
|