swift – CVPixelBufferPool错误(kCVReturnInvalidArgument / -6661)

前端之家收集整理的这篇文章主要介绍了swift – CVPixelBufferPool错误(kCVReturnInvalidArgument / -6661)前端之家小编觉得挺不错的,现在分享给大家,也给大家做个参考。
我已经用 Swift( How to use CVPixelBufferPool in conjunction with AVAssetWriterInputPixelBufferAdaptor in iPhone?)实现了以前的建议,
但在使用CVPixelBufferPoolCreatePixelBuffer作为指导时,却遇到了“kCVReturnInvalidArgument”(错误值:-6661).

我基本上试图从图像创建一个电影,但由于缓冲池未成功创建,我无法附加像素缓冲区 – 这是我执行此操作的代码.

任何建议都非常感谢!

import Foundation
import Photos
import OpenGLES
import AVFoundation
import CoreMedia

class MovieGenerator {

    var _videoWriter:AVAssetWriter
    var _videoWriterInput: AVAssetWriterInput
    var _adapter: AVAssetWriterInputPixelBufferAdaptor
    var _buffer = UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>.alloc(1)


    init(frameSize size: CGSize,outputURL url: NSURL) {

    // delete file if exists
    let sharedManager = NSFileManager.defaultManager() as NSFileManager
    if(sharedManager.fileExistsAtPath(url.path!)) {
        sharedManager.removeItemAtPath(url.path,error: nil)
    }

    // video writer
    _videoWriter = AVAssetWriter(URL: url,fileType: AVFileTypeQuickTimeMovie,error: nil)

    // writer input
    var videoSettings = [AVVideoCodecKey:AVVideoCodecH264,AVVideoWidthKey:size.width,AVVideoHeightKey:size.height]
    _videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,outputSettings: videoSettings)
    _videoWriterInput.expectsMediaDataInRealTime = true
    _videoWriter.addInput(_videoWriterInput)

    // pixel buffer adapter
    var adapterAttributes = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA,kCVPixelBufferWidthKey: size.width,kCVPixelBufferHeightKey: size.height,kCVPixelFormatOpenGLESCompatibility: kcfBooleanTrue]

    _adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: _videoWriterInput,sourcePixelBufferAttributes: adapterAttributes)
    var poolCreateResult:CVReturn = CVPixelBufferPoolCreatePixelBuffer(nil,_adapter.pixelBufferPool,_buffer)
    println("pool creation:\(poolCreateResult)")

    _videoWriter.startWriting()
    _videoWriter.startSessionAtSourceTime(kCMTimeZero)

}

func addImage(image:UIImage,frameNum:Int,fps:Int)->Bool {


    self.createPixelBufferFromCGImage(image.CGImage,pixelBufferPtr: _buffer)

    var presentTime:CMTime = CMTimeMake(Int64(frameNum),Int32(fps))
    var result:Bool = _adapter.appendPixelBuffer(_buffer.memory?.takeUnretainedValue(),withPresentationTime: presentTime)

    return result
}

func finalizeMovie(timeStamp: CMTime) {
    _videoWriterInput.markAsFinished()
    _videoWriter.endSessionAtSourceTime(timeStamp)
    _videoWriter.finishWritingWithCompletionHandler({println("video writer finished with status: \(self._videoWriter.status)")})
}

func createPixelBufferFromCGImage(image: CGImage,pixelBufferPtr: UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>) {

    let width:UInt = CGImageGetWidth(image)
    let height:UInt = CGImageGetHeight(image)

    let imageData:CFData = CGDataProviderCopyData(CGImageGetDataProvider(image))
    let options:CFDictionary = [kCVPixelBufferCGImageCompatibilityKey:NSNumber.numberWithBool(true),kCVPixelBufferCGBitmapContextCompatibilityKey:NSNumber.numberWithBool(true)]

    var status:CVReturn = CVPixelBufferCreate(kcfAllocatorDefault,width,height,OSType(kCVPixelFormatType_32BGRA),options,pixelBufferPtr)
    assert(status != 0,"CVPixelBufferCreate: \(status)")

    var lockStatus:CVReturn = CVPixelBufferLockBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue(),0)
    println("CVPixelBufferLockBaseAddress: \(lockStatus)")

    var pxData:UnsafeMutablePointer<(Void)> = CVPixelBufferGetBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue())
    let bitmapinfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.NoneSkipFirst.toRaw())
    let rgbColorSpace:CGColorSpace = CGColorSpaceCreateDeviceRGB()

    var context:CGContextRef = CGBitmapContextCreate(pxData,8,4*CGImageGetWidth(image),rgbColorSpace,bitmapinfo!)

    CGContextDrawImage(context,CGRectMake(0,CGFloat(width),CGFloat(height)),image)

    CVPixelBufferUnlockBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue(),0)


}



}
令人沮丧的是,我无法准确回答你的问题,但我正在研究基本相同的代码.并且,我的情况恰好比你得到的错误更进一步;它一直到试图将图像添加到电影中然后因为从未从appendPixelBuffer()获得成功结果而失败 – 我不知道如何找出原因.我发布这篇文章是为了帮助你进一步发展.

(我的代码改编自AVFoundation + AssetWriter: Generate Movie With Images and Audio,我用你的帖子来帮助浏览指针互操作的somanigans …)

func writeAnimationToMovie(path: String,size: CGSize,animation: Animation) -> Bool {
    var error: NSError?
    let writer = AVAssetWriter(URL: NSURL(fileURLWithPath: path),error: &error)

    let videoSettings = [AVVideoCodecKey: AVVideoCodecH264,AVVideoWidthKey: size.width,AVVideoHeightKey: size.height]

    let input = AVAssetWriterInput(mediaType: AVMediaTypeVideo,outputSettings: videoSettings)
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input,sourcePixelBufferAttributes: nil)
    input.expectsMediaDataInRealTime = true
    writer.addInput(input)

    writer.startWriting()
    writer.startSessionAtSourceTime(kCMTimeZero)

    var buffer: CVPixelBufferRef

    var frameCount = 0
    for frame in animation.frames {
        let rect = CGRectMake(0,size.width,size.height)
        let rectPtr = UnsafeMutablePointer<CGRect>.alloc(1)
        rectPtr.memory = rect
        buffer = pixelBufferFromCGImage(frame.image.CGImageForProposedRect(rectPtr,context: nil,hints: nil).takeUnretainedValue(),size)
        var appendOk = false
        var j = 0
        while (!appendOk && j < 30) {
            if pixelBufferAdaptor.assetWriterInput.readyForMoreMediaData {
                let frameTime = CMTimeMake(Int64(frameCount),10)
                appendOk = pixelBufferAdaptor.appendPixelBuffer(buffer,withPresentationTime: frameTime)
                // appendOk will always be false
                NSThread.sleepForTimeInterval(0.05)
            } else {
                NSThread.sleepForTimeInterval(0.1)
            }
            j++
        }
        if (!appendOk) {
            println("Doh,frame \(frame) at offset \(frameCount) Failed to append")
        }
    }

    input.markAsFinished()
    writer.finishWritingWithCompletionHandler({
        if writer.status == AVAssetWriterStatus.Failed {
            println("oh noes,an error: \(writer.error.description)")
        } else {
            println("hrmmm,there should be a movie?")
        }
    })

    return true;
}

其中pixelBufferFromCGImage的定义如下:

func pixelBufferFromCGImage(image: CGImageRef,size: CGSize) -> CVPixelBufferRef {
    let options = [
        kCVPixelBufferCGImageCompatibilityKey: true,kCVPixelBufferCGBitmapContextCompatibilityKey: true]
    var pixBufferPointer = UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>.alloc(1)

    let status = CVPixelBufferCreate(
        nil,UInt(size.width),UInt(size.height),OSType(kCVPixelFormatType_32ARGB),pixBufferPointer)

    CVPixelBufferLockBaseAddress(pixBufferPointer.memory?.takeUnretainedValue(),0)

    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
    let bitmapinfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.NoneSkipFirst.toRaw())

    var pixBufferData:UnsafeMutablePointer<(Void)> = CVPixelBufferGetBaseAddress(pixBufferPointer.memory?.takeUnretainedValue())

    let context = CGBitmapContextCreate(
        pixBufferData,UInt(4 * size.width),bitmapinfo!)

    CGContextConcatCTM(context,CGAffineTransformMakeRotation(0))
    CGContextDrawImage(
        context,CGFloat(CGImageGetWidth(image)),CGFloat(CGImageGetHeight(image))),image)

    CVPixelBufferUnlockBaseAddress(pixBufferPointer.memory?.takeUnretainedValue(),0)
    return pixBufferPointer.memory!.takeUnretainedValue()
}

猜你在找的Swift相关文章