我正试图实时捕获要使用Firebase ML KIT
处理的相机帧。我已经成功显示了摄影机视图,但似乎无法调用captureOutput委托函数。
P.s我是iOS开发的新手。
private func startLiveVideo() {
self.session.sessionPreset = AVCaptureSession.Preset.photo
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
let deviceInput = try! AVCaptureDeviceInput(device: captureDevice!)
self.session.addInput(deviceInput)
let deviceOutput = AVCaptureVideoDataOutput()
deviceOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
deviceOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
self.session.addOutput(AVCaptureVideoDataOutput())
let imageLayer = AVCaptureVideoPreviewLayer(session: session)
imageLayer.frame = CGRect(x: 0, y: 0, width: self.imageView.frame.size.width + 100, height: self.imageView.frame.size.height)
imageLayer.videoGravity = .resizeAspectFill
imageView.layer.addSublayer(imageLayer)
self.session.startRunning()
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("Frame captured")
}
答案 0 :(得分:0)
您添加的代表
let deviceOutput = AVCaptureVideoDataOutput()
deviceOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
deviceOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
但在此处添加另一个实例
self.session.addOutput(AVCaptureVideoDataOutput())
因此将其替换为
self.session.addOutput(deviceOutput)
答案 1 :(得分:0)
转换为Swift 5后,效果还不错。