我有一个处理AVPlayer(和AVPlayerItem)的类,它向状态,时间和timedMetadata报告委托.
除了大约70-80%的时间之外,工作良好,初始timedMetadata不是“观察到的关键值”.但是,在错过timedMetadata的第一个实例之后,似乎没有问题地观察到所有其他timedMetadata.
作为一个临时修复,我已经开始在视频的开头嵌入虚拟的timedMetadata标签,除了“踢出轮胎”之外什么都不做,然后一切正常.然而,这看起来非常糟糕.我怀疑我要么以次优的方式设置AVPlayerItem和KVO,要么这里只是一个bug.
关于为什么会发生这种情况的任何想法都非常感谢!代码如下……
// CL: Define constants for the key-value observation contexts. static const NSString *ItemStatusContext; static const NSString *ItemMetadataContext; static const NSString *ItemPlaybackForcastContext; - (id)initWithURL:(NSURL *)url { if (self = [super init]) { __weak TFPAVController *_self = self; AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; NSString *tracksKey = @"tracks"; [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:tracksKey] completionHandler: ^{ dispatch_async(dispatch_get_main_queue(),^{ NSError *error = nil; AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey error:&error]; if (status == AVKeyValueStatusLoaded) { AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:asset]; [item addObserver:_self forKeyPath:@"status" options:0 context:&ItemStatusContext]; [item addObserver:_self forKeyPath:@"timedMetadata" options:0 context:&ItemMetadataContext]; [item addObserver:_self forKeyPath:@"playbackLikelyToKeepUp" options:0 context:&ItemPlaybackForcastContext]; [[NSNotificationCenter defaultCenter] addObserver:_self selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:item]; AVPlayer *player = [AVPlayer playerWithPlayerItem:item]; _self.totalRunTime = CMTimeGetSeconds(item.duration); [_self.delegate avPlayerNeedsView:player]; _self.playerItem = item; _self.player = player; } else { NSLog(@"The asset's tracks were not loaded: %@ // [%@ %@]",error.localizedDescription,NSStringFromClass([self class]),NSStringFromSelector(_cmd)); } _self.playerObserver = [_self.player addPeriodicTimeObserverForInterval:CMTimeMake(1,_FrameRate_) queue:NULL usingBlock: ^(CMTime time) { _self.currentVideoTime = CMTimeGetSeconds([_self.playerItem currentTime]); }]; }); }]; } return self; } #pragma mark - KVO Response Methods - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { __weak TFPAVController *_self = self; if (context == &ItemStatusContext) { dispatch_async(dispatch_get_main_queue(),^{ if (((AVPlayerItem *)object).status == AVPlayerItemStatusReadyToPlay) { [_self.delegate videoIsLoadedInPlayer:_self]; } }); return; } else if (context == &ItemMetadataContext) { dispatch_async(dispatch_get_main_queue(),^{ [_self checkMetaDataForPlayerItem: (AVPlayerItem *)object]; }); return; } else if (context == &ItemPlaybackForcastContext) { dispatch_async(dispatch_get_main_queue(),^{ AVPlayerItem *playerItem = object; if (CMTimeGetSeconds([playerItem currentTime]) <= 0) return; NSDictionary *notificationDictionary = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:playerItem.playbackLikelyToKeepUp] forKey:kAVPlayerStateKey]; [[NSNotificationCenter defaultCenter] postNotificationName:kAVPlayerNotification object:self userInfo:notificationDictionary]; }); return; } [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; } - (void)checkMetaDataForPlayerItem:(AVPlayerItem *)item { NSMutableDictionary *MetaDict = [NSMutableDictionary dictionary]; // CL: make sure there's stuff there if (item.timedMetadata != nil && [item.timedMetadata count] > 0) { // CL: if there is,cycle through the items and create a Dictionary for (AVMetadataItem *Metadata in item.timedMetadata) { [MetaDict setObject:[Metadata valueForKey:@"value"] forKey:[Metadata valueForKey:@"key"]]; } // CL: pass it to the delegate [self.delegate parseNewMetaData:[NSDictionary dictionaryWithDictionary:MetaDict]]; } }