转换为图片并尝试检测脸部时,Google Face Detection会崩溃

时间:2017-09-01 05:47:11

标签: ios swift3 camera face-detection google-ios-vision

我正在创建一个带过滤器的自定义相机。当我添加以下行时,它会崩溃而不会显示任何异常。

//设置视频输出

func setupBuffer() {
    videoBuffer = AVCaptureVideoDataOutput()
    videoBuffer?.alwaysDiscardsLateVideoFrames = true
    videoBuffer?.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32RGBA)]
    videoBuffer?.setSampleBufferDelegate(self, queue: DispatchQueue.main)
    captureSession?.addOutput(videoBuffer)
}


public func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    if connection.videoOrientation != .portrait {
        connection.videoOrientation = .portrait
    }
        guard let image = GMVUtility.sampleBufferTo32RGBA(sampleBuffer) else {
            print("No Image ")
            return
        }

    pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
    ciImage = CIImage(cvImageBuffer: pixelBuffer!, options: CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate)as! [String : Any]?)

    CameraView.filter = CIFilter(name: "CIPhotoEffectProcess")
    CameraView.filter?.setValue(ciImage, forKey: kCIInputImageKey)
    let cgimg = CameraView.context.createCGImage(CameraView.filter!.outputImage!, from: ciImage.extent)

    DispatchQueue.main.async  {
        self.preview.image = UIImage(cgImage: cgimg!)
    }
}

但它正在崩溃 -

  guard let image = GMVUtility.sampleBufferTo32RGBA(sampleBuffer) else {
                print("No Image ")
                return
            }

当我传递从CIImage创建的图像时,它不会识别图像中的脸部。 完整的代码文件是 https://www.dropbox.com/s/y1ewd1sh18h3ezj/CameraView.swift.zip?dl=0

enter image description here

1 个答案:

答案 0 :(得分:0)

1)为缓冲区创建单独的队列。

 fileprivate var videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")

2)使用此

设置缓冲区
        let videoBuffer = AVCaptureVideoDataOutput()
        videoBuffer?.alwaysDiscardsLateVideoFrames = true
        videoBuffer?.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
        videoBuffer?.setSampleBufferDelegate(self, queue: videoDataOutputQueue ) //
        captureSession?.addOutput(videoBuffer)