ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?

前端之家收集整理的这篇文章主要介绍了ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?前端之家小编觉得挺不错的,现在分享给大家,也给大家做个参考。
我正在尝试创建一个显示相机预览的应用程序,然后在某些情况下开始使用语音输入录制此内容,最后重复录制的电影.

我已经编写了预览/录制/重放的类和管理协调的控制器.

似乎这些功能在独立调用时工作得很好,但是我不能让它们一起工作:当重放视频时,声音会运行,但图像需要大约五秒钟才能显示然后断断续续.

这是我的代码

预览:

- (void) createSession
{
    _session = [[AVCaptureSession alloc] init];

    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID];
    if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error = nil;
    _cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
    if (!error) [_session addInput:_cVideoInput];

    device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

    error = nil;
    _cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
    if (!error) [_session addInput:_cAudioInput];

    _cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
    _cameraLayer.frame = self.bounds;
    [self.layer addSublayer:_cameraLayer];

    _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [_session setSessionPreset:AVCaptureSessionPreset640x480];
    [_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]];

    _audioOutput = [[AVCaptureAudioDataOutput alloc] init];

    dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME,NULL);
    [_videoOutput setSampleBufferDelegate:self queue:queue];
    [_session addOutput:_videoOutput];

    [_audioOutput setSampleBufferDelegate:self queue:queue];
    [_session addOutput:_audioOutput];

    dispatch_set_context(queue,self);
    dispatch_set_finalizer_f(queue,queue_finalizer);

    dispatch_release(queue);

    [_session startRunning];
}

- (void) deleteSession
{
    [_session stopRunning];

    [(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil];
    [_cameraLayer removeFromSuperlayer];
    [_cameraLayer release];
    _cameraLayer = nil;

    [_audioOutput setSampleBufferDelegate:nil queue:NULL];
    [_videoOutput setSampleBufferDelegate:nil queue:NULL];

    [_audioOutput release];
    _audioOutput = nil;

    [_videoOutput release];
    _videoOutput = nil;

    [_cAudioInput release];
    _cAudioInput = nil;

    [_cVideoInput release];
    _cVideoInput = nil;

    NSArray *inputs = [_session inputs];
    for (AVCaptureInput *input in inputs)
         [_session removeInput:input];

    NSArray *outputs = [_session outputs];
    for (AVCaptureOutput *output in outputs)
        [_session removeOutput:output];

    [_session release];
    _session = nil;
}

记录:

- (void) createWriter
{
    NSString *file = [self file];

    if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL];

    NSError *error = nil;
    _writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain];

    if (error)
    {
        [_writer release];
        _writer = nil;

        NSLog(@"%@",error);
        return;
    }

    AudioChannelLayout acl;
    bzero( &acl,sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

    NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
                              [NSNumber numberWithInt:kAudioFormatMPEG4AAC],AVFormatIDKey,[NSNumber numberWithFloat:44100.0],AVSampleRateKey,[NSNumber numberWithInt:1],AVNumberOfChannelsKey,[NSNumber numberWithInt:64000],AVEncoderBitRateKey,[NSData dataWithBytes:&acl length:sizeof(acl)],AVChannelLayoutKey,nil ];

    _wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain];

    [_writer addInput:_wAudioInput];

    settings = [NSDictionary dictionaryWithObjectsAndKeys:
                AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:640],AVVideoWidthKey,[NSNumber numberWithInt:480],AVVideoHeightKey,nil];

    _wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain];

    [_writer addInput:_wVideoInput];
}

- (void) deleteWriter
{
    [_wVideoInput release];
    _wVideoInput = nil;

    [_wAudioInput release];
    _wAudioInput = nil;

    [_writer release];
    _writer = nil;
}

- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer
{
    if (![self canRecordBuffer:sampleBuffer])
        return;

    if ([_wAudioInput isReadyForMoreMediaData])
        [_wAudioInput appendSampleBuffer:sampleBuffer];
}

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
    if (![self canRecordBuffer:sampleBuffer])
        return;

    if ([_wVideoInput isReadyForMoreMediaData])
        [_wVideoInput appendSampleBuffer:sampleBuffer];
}

播放:

- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    AVPlayerItem *item = (AVPlayerItem *)object;
    [item removeObserver:self forKeyPath:@"status"];

    switch (item.status) 
    {
        case AVPlayerItemStatusReadyToPlay:

            [_player seekToTime:kCMTimeZero];
            [_player play];

            [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
            break;
        case AVPlayerItemStatusUnknown:
        case AVPlayerItemStatusFailed:
            break;
        default:
            break;
    }
}

- (void) finishPlaying:(NSNotification *)notification
{
    [_player pause];
    [_playerLayer removeFromSuperlayer];

    [_playerLayer release];
    _playerLayer = nil;

    [_player release];
    _player = nil;

    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}

- (void) play:(NSString *)path
{
    _player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];

    _playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];
    _playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2,1),1,-1,1);
    _playerLayer.frame = self.bounds;
    [self.layer addSublayer:_playerLayer];

    [_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL];
}

解决方法

我有一个类似的问题,虽然这并没有完全解决它,它确实帮助了很多:
如果您还没有准备再次写入,请尝试让设备稍微休眠,然后重新评估是否可以写入数据,而不是仅丢失示例缓冲区.

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
    return;

if (!_wVideoInput.readyForMoreMediaData && _isRecording)
{
    [self performSelector:@selector(RecordingVideoWithBuffer:) withObject:(__bridge id)(sampleBuffer) afterDelay:0.05];
    return;
}

[_wVideoInput appendSampleBuffer:sampleBuffer];

如果您不使用ARC,只需要sampleBuffer即可.但ARC需要添加__bridge.

编辑:我使用performSelector并返回而不是使用NSThread睡眠的while循环,因为它是非阻塞的.阻塞很糟糕.

猜你在找的iOS相关文章