无法在Swift Playgrounds上捕获视频数据,captureOutput AVCaptureVideoDataOutputSampleBufferDelegate委托方法未调用

前端之家收集整理的这篇文章主要介绍了无法在Swift Playgrounds上捕获视频数据,captureOutput AVCaptureVideoDataOutputSampleBufferDelegate委托方法未调用前端之家小编觉得挺不错的,现在分享给大家,也给大家做个参考。
我想在Swift Playgrounds iPad应用程序上访问iPad的相机。我发现即使我的游乐场运行良好,它也不可能捕获视频数据。

captureOutput(_ captureOutput:AVCaptureOutput!,didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,来自连接:AVCaptureConnection!),AVCaptureVideoDataOutputSampleBufferDelegate协议的委托方法未被调用(可能是因为没有视频数据进入),而它在我的iOS中应用程序。

我的操场上的视图应该显示FaceTime摄像机视图。为什么即使Apple explicitly says it’s allowed to do so也无法显示相机输出?此外,Playground应用程序会在我打开游乐场时立即询问相机权限,因此应该以某种方式允许它。

import UIKit
import CoreImage
import AVFoundation
import ImageIO
import PlaygroundSupport

class Visage: NSObject,AVCaptureVideoDataOutputSampleBufferDelegate {

    var visageCameraView : UIView = UIView()
    fileprivate var faceDetector : CIDetector?
    fileprivate var videoDataOutput : AVCaptureVideoDataOutput?
    fileprivate var videoDataOutputQueue : DispatchQueue?
    fileprivate var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
    fileprivate var captureSession : AVCaptureSession = AVCaptureSession()
    fileprivate let notificationCenter : NotificationCenter = NotificationCenter.default

    override init() {
        super.init()

        self.captureSetup(AVCaptureDevicePosition.front)
        var faceDetectorOptions : [String : AnyObject]?
        faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyHigh as AnyObject]
        self.faceDetector = CIDetector(ofType: CIDetectorTypeFace,context: nil,options: faceDetectorOptions)
    }

    func beginFaceDetection() {
        self.captureSession.startRunning()
    }

    func endFaceDetection() {
        self.captureSession.stopRunning()
    }

    fileprivate func captureSetup (_ position : AVCaptureDevicePosition) {
        var captureError : NSError?
        var captureDevice : AVCaptureDevice!

        for testedDevice in AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo){
            if ((testedDevice as AnyObject).position == position) {
                captureDevice = testedDevice as! AVCaptureDevice
            }
        }

        if (captureDevice == nil) {
            captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
        }

        var deviceInput : AVCaptureDeviceInput?
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
        } catch let error as NSError {
            captureError = error
            deviceInput = nil
        }
        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        if (captureError == nil) {
            if (captureSession.canAddInput(deviceInput)) {
                captureSession.addInput(deviceInput)
            }

            self.videoDataOutput = AVCaptureVideoDataOutput()
            self.videoDataOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: Int(kCVPixelFormatType_32BGRA)]
            self.videoDataOutput!.alwaysDiscardsLateVideoFrames = true
            self.videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue",attributes: [])
            self.videoDataOutput!.setSampleBufferDelegate(self,queue: self.videoDataOutputQueue!)

            if (captureSession.canAddOutput(self.videoDataOutput)) {
                captureSession.addOutput(self.videoDataOutput)
            }
        }

        visageCameraView.frame = UIScreen.main.bounds
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer?.frame = UIScreen.main.bounds
        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
        visageCameraView.layer.addSublayer(previewLayer!)
    }

    // NOT CALLED
    func captureOutput(_ captureOutput: AVCaptureOutput!,didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,from connection: AVCaptureConnection!) {            
        print("delegate method called!")
    }
}

class SmileView: UIView {
    let smileView = UIView()
    var smileRec: Visage!

    override init(frame: CGRect) {
        super.init(frame: frame)
        self.addSubview(smileView)
        self.translatesAutoresizingMaskIntoConstraints = false
        smileRec = Visage()
        smileRec.beginFaceDetection()
        let cameraView = smileRec.visageCameraView
        self.addSubview(cameraView)
    }

    required init?(coder aDecoder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
}

let frame = CGRect(x: 0,y: 0,width: UIScreen.main.bounds.width,height: UIScreen.main.bounds.height)
let sView = SmileView(frame: frame)
PlaygroundPage.current.liveView = sView
编辑:这应该是固定的:)

编辑:这被证实是Apple的一个错误

我已经提交了一份错误报告,当新的官方信息出现时我会更新这个答案。

原文链接:https://www.f2er.com/swift/320292.html

猜你在找的Swift相关文章