我正在使用安装了ios 11的iPhone 7+,我正在尝试调整一些捕获常规图像的代码以捕捉深度。
当我致电capturePhotoOutput?.isDepthDataDeliverySupported
时,它会返回false。我的印象是,我可以使用我的iPhone 7+来捕捉深度。
我是否错过了info.plist的许可?或者我犯了一个更基本的错误?
//
// RecorderViewController.swift
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
@IBOutlet weak var previewView: UIView!
@IBAction func onTapTakePhoto(_ sender: Any) {
// Make sure capturePhotoOutput is valid
guard let capturePhotoOutput = self.capturePhotoOutput else { return }
// Get an instance of AVCapturePhotoSettings class
let photoSettings = AVCapturePhotoSettings()
// Set photo settings for our need
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .auto
// Call capturePhoto method by passing our photo settings and a
// delegate implementing AVCapturePhotoCaptureDelegate
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
//let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
let captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInDualCamera, for: .video, position: .back)
do {
let input = try AVCaptureDeviceInput(device: captureDevice!)
captureSession = AVCaptureSession()
captureSession?.addInput(input)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
previewView.layer.addSublayer(videoPreviewLayer!)
capturePhotoOutput = AVCapturePhotoOutput()
capturePhotoOutput?.isHighResolutionCaptureEnabled = true
if (capturePhotoOutput?.isDepthDataDeliverySupported)!
{
capturePhotoOutput?.isDepthDataDeliveryEnabled = true
}
else{
print ("DEPTH NOT SUPPORTED!")
}
// Set the output on the capture session
captureSession?.addOutput(capturePhotoOutput!)
captureSession?.startRunning()
} catch {
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ captureOutput: AVCapturePhotoOutput,
didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?,
previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
resolvedSettings: AVCaptureResolvedPhotoSettings,
bracketSettings: AVCaptureBracketedStillImageSettings?,
error: Error?) {
// get captured image
// Make sure we get some photo sample buffer
guard error == nil,
let photoSampleBuffer = photoSampleBuffer else {
print("Error capturing photo: \(String(describing: error))")
return
}
// Convert photo same buffer to a jpeg image data by using // AVCapturePhotoOutput
guard let imageData =
AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
return
}
// Initialise a UIImage with our image data
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
// Save our captured image to photos album
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
}
}