我正在尝试调整iOS广播扩展功能来录制视频而不是直播.
这似乎是可能的,因为你可以在processSampleBuffer:withType:方法中获得像素缓冲区.
这似乎是可能的,因为你可以在processSampleBuffer:withType:方法中获得像素缓冲区.
然后我编码如下,但在附加缓冲区时失败.
我对AVAssetWriter和App Extension编程都不熟悉,所以我无法弄清楚这里有什么问题.
我是否正在做一些我们不应该在扩展中做的事情?或者AVAssetWriter的用法是错误的?
任何想法都有帮助.谢谢!
// // SampleHandler.m // The main class of the App Extension // #import "SampleHandler.h" #import <AVFoundation/AVFoundation.h> @implementation SampleHandler { VideoExporter *exporter; NSDate *startDate; } - (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo { // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional. [self setupWriter]; } - (void)setupWriter { NSFileManager *fm = [NSFileManager defaultManager]; // Set video path into shared container NSURL *containerURL = [fm containerURLForSecurityApplicationGroupIdentifier:@"group.com.mycompany"]; NSURL *libraryURL = [containerURL URLByAppendingPathComponent:@"Library" isDirectory: true]; BOOL isDir = false; NSURL *cachesURL = [libraryURL URLByAppendingPathComponent:@"Caches" isDirectory: true]; NSURL *outVideoURL = [cachesURL URLByAppendingPathComponent:@"output.mov"]; if([[NSFileManager defaultManager] fileExistsAtPath:[outVideoURL path]]){ [[NSFileManager defaultManager] removeItemAtPath:[outVideoURL path] error:nil]; } exporter = [[VideoExporter alloc] initWithOutputURL:outVideoURL size:CGSizeMake(1280,720) frameRate:30]; exporter.delegate = self; [exporter beginExport]; startDate = [NSDate date]; } - (void)broadcastPaused { // User has requested to pause the broadcast. Samples will stop being delivered. } - (void)broadcastResumed { // User has requested to resume the broadcast. Samples delivery will resume. } - (void)broadcastFinished { // User has requested to finish the broadcast. NSLog(@"User requested finish writing"); [exporter finishWriting]; } - (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType { switch (sampleBufferType) { case RPSampleBufferTypeVideo: // Handle video sample buffer [exporter addCMSampleBuffer:sampleBuffer]; break; case RPSampleBufferTypeAudioApp: // Handle audio sample buffer for app audio break; case RPSampleBufferTypeAudioMic: // Handle audio sample buffer for mic audio break; default: break; } } @end // // VideoExporter.m // Helper class to write the video // #import "VideoExporter.h" @implementation VideoExporter @synthesize width,height; @synthesize framesPerSecond; @synthesize outputURL; @synthesize delegate; - (id)initWithOutputURL:(NSURL *)aURL size:(CGSize)size frameRate:(uint64_t)fps { if ((self = [super init])) { width = (int)round(size.width); height = (int)round(size.height); framesPerSecond = fps; outputURL = aURL; } return self; } - (void)beginExport { NSError *error = nil; writer = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error]; NSAssert(writer != nil,@"Writer should not be nil"); NSDictionary * outSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264,AVVideoCodecKey,[NSNumber numberWithInt:width],AVVideoWidthKey,[NSNumber numberWithInt:height],AVVideoHeightKey,nil]; writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:outSettings]; NSDictionary * pixelAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],kCVPixelBufferPixelFormatTypeKey,nil]; adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelAttributes]; [writer addInput:writerInput]; BOOL started = [writer startWriting]; NSAssert(started,@"Should start writing!"); [writer startSessionAtSourceTime:kCMTimeZero]; } - (void)addCMSampleBuffer:(CMSampleBufferRef)buf { if(writer.status==0) { NSLog(@"Writer status unknown!!"); } [self appendCMSampleBuffer:buf]; } - (void)finishWriting { [writerInput markAsFinished]; dispatch_semaphore_t semaphore = dispatch_semaphore_create(0); int64_t longDuration = 1000; CMTime cmTime = CMTimeMake(longDuration,1); [writer endSessionAtSourceTime:cmTime]; [writer finishWritingWithCompletionHandler:^{ // Call delegate method here dispatch_semaphore_signal(semaphore); }]; dispatch_semaphore_wait(semaphore,DISPATCH_TIME_FOREVER); } #pragma mark - Private - - (void)appendCMSampleBuffer:(CMSampleBufferRef)bufferRef { if(![writerInput isReadyForMoreMediaData]) { NSLog(@"WriterInput not ready! status = %ld,error=%@",(long)writer.status,writer.error); return; } BOOL success = [adaptor appendPixelBuffer:(CVPixelBufferRef)bufferRef withPresentationTime:CMTimeMake(frameCount++,(int32_t) framesPerSecond)]; if(success == NO) { NSLog(@"Append buffer Failed! status = %ld,writer.error); // Always gets here } } @end
解决方法
你可以分享videoExporter文件吗?
我正在实现相同的文件,并希望实现相同的目标,以将我的广播文件保存到本地存储,但相同的[编写器startWriting]显示错误
我得到了以下错误.
019-01-23 16:42:35.839406+0530 screencaptureuploadnew[11835:165102] *** Assertion failure in -[VideoExporter beginExport],/Users/georama-mac/Desktop/ScreenCapGeorama/screencaptureuploadnew/VideoExporter.m:53 2019-01-23 16:42:37.960818+0530 screencaptureuploadnew[11835:165102] exception : Should start writing! 2019-01-23 16:42:37.961413+0530 screencaptureuploadnew[11835:165102] path Exists 2019-01-23 16:42:53.162284+0530 screencaptureuploadnew[11835:165103] Append buffer Failed! status = 3,error=Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo={NSLocalizedRecoverySuggestion=Try saving again.,NSLocalizedDescription=Cannot Save,NSUnderlyingError=0x281a43870 {Error Domain=NSOSStatusErrorDomain Code=-12412 "(null)"}}