这个错误让我很生气。我正在尝试使用DualCam生成绝对最小代码以从iPhone 7+获取AVDepthData
。
我有这段代码:
//
// RecorderViewController.swift
// ios-recorder-app
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
@IBOutlet weak var previewView: UIView!
@IBAction func onTapTakePhoto(_ sender: Any) {
guard let capturePhotoOutput = self.capturePhotoOutput else { return }
let photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true //Error
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .depthData, position: .back)
do {
print(captureDevice!)
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true //Error
self.session = AVCaptureSession()
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = view.layer.bounds
previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.startRunning()
} catch {
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}
如果我注释掉标有“错误”的行,则代码按照我的预期运行,并为nil
打印depthData
。
然而,保持原样,我得到一个例外。错误消息指出:AVCapturePhotoOutput setDepthDataDeliveryEnabled:] Depth data delivery is not supported in the current configuration
。
如何更改“当前配置”以便支持深度传送?
我观看了这段视频:https://developer.apple.com/videos/play/wwdc2017/507/这很有帮助,我相信我已按照完成这项工作所需的确切步骤。
任何提示都会感激不尽!
答案 0 :(得分:3)
我需要解决两件事。
sessionPreset
设置为支持深度的格式,例如.photo
。.isDepthDataDeliveryEnabled = true
之前将cameraPhotoOutput添加到会话。这是我获取照片深度的最小代码:
//
// RecorderViewController.swift
// ios-recorder-app
//
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
@IBOutlet weak var previewView: UIView!
@IBAction func onTapTakePhoto(_ sender: Any) {
guard var capturePhotoOutput = self.capturePhotoOutput else { return }
var photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
print(captureDevice!.activeDepthDataFormat)
do{
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.session = AVCaptureSession()
self.session?.beginConfiguration()
self.session?.sessionPreset = .photo
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = self.view.layer.bounds
self.previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.commitConfiguration()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true
self.session?.startRunning()
}
catch{
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}