我一直在使用自定义相机,最近我和Swift 3一起升级到Xcode 8 beta。我最初有这个:
var stillImageOutput: AVCaptureStillImageOutput?
但是,我现在收到警告:
' AVCaptureStillImageOutput'在iOS 10.0中已弃用:请改用AVCapturePhotoOutput
由于这是相当新的,我没有看到太多关于此的信息。这是我目前的代码:
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
func clickPicture() {
if let videoConnection = stillImageOutput?.connection(withMediaType: AVMediaTypeVideo) {
videoConnection.videoOrientation = .portrait
stillImageOutput?.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (sampleBuffer, error) -> Void in
if sampleBuffer != nil {
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
let dataProvider = CGDataProvider(data: imageData!)
let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef!, scale: 1, orientation: .right)
}
})
}
}
我试过看AVCapturePhotoCaptureDelegate
,但我不太清楚如何使用它。有谁知道如何使用它?感谢。
答案 0 :(得分:54)
已更新至Swift 4
嗨,它真的很容易使用AVCapturePhotoOutput
。
您需要AVCapturePhotoCaptureDelegate
返回CMSampleBuffer
。
如果您告诉AVCapturePhotoSettings
previewFormat
class CameraCaptureOutput: NSObject, AVCapturePhotoCaptureDelegate {
let cameraOutput = AVCapturePhotoOutput()
func capturePhoto() {
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160]
settings.previewPhotoFormat = previewFormat
self.cameraOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print(error.localizedDescription)
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print(image: UIImage(data: dataImage).size) // Your Image
}
}
}
有关详细信息,请访问https://developer.apple.com/reference/AVFoundation/AVCapturePhotoOutput
注意:在拍摄照片之前,您必须将AVCapturePhotoOutput
添加到AVCaptureSession
。类似于:session.addOutput(output)
,然后:output.capturePhoto(with:settings, delegate:self)
感谢@BigHeadCreations
答案 1 :(得分:36)
我的完整实施
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
var captureSesssion : AVCaptureSession!
var cameraOutput : AVCapturePhotoOutput!
var previewLayer : AVCaptureVideoPreviewLayer!
@IBOutlet weak var capturedImage: UIImageView!
@IBOutlet weak var previewView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
captureSesssion = AVCaptureSession()
captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto
cameraOutput = AVCapturePhotoOutput()
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
if let input = try? AVCaptureDeviceInput(device: device) {
if (captureSesssion.canAddInput(input)) {
captureSesssion.addInput(input)
if (captureSesssion.canAddOutput(cameraOutput)) {
captureSesssion.addOutput(cameraOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
captureSesssion.startRunning()
}
} else {
print("issue here : captureSesssion.canAddInput")
}
} else {
print("some problem here")
}
}
// Take picture button
@IBAction func didPressTakePhoto(_ sender: UIButton) {
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
// callBack from take picture
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print("error occure : \(error.localizedDescription)")
}
if let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print(UIImage(data: dataImage)?.size as Any)
let dataProvider = CGDataProvider(data: dataImage as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
self.capturedImage.image = image
} else {
print("some error here")
}
}
// This method you can use somewhere you need to know camera permission state
func askPermission() {
print("here")
let cameraPermissionStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
switch cameraPermissionStatus {
case .authorized:
print("Already Authorized")
case .denied:
print("denied")
let alert = UIAlertController(title: "Sorry :(" , message: "But could you please grant permission for camera within device settings", preferredStyle: .alert)
let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
alert.addAction(action)
present(alert, animated: true, completion: nil)
case .restricted:
print("restricted")
default:
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: {
[weak self]
(granted :Bool) -> Void in
if granted == true {
// User granted
print("User granted")
DispatchQueue.main.async(){
//Do smth that you need in main thread
}
}
else {
// User Rejected
print("User Rejected")
DispatchQueue.main.async(){
let alert = UIAlertController(title: "WHY?" , message: "Camera it is the main feature of our application", preferredStyle: .alert)
let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
alert.addAction(action)
self?.present(alert, animated: true, completion: nil)
}
}
});
}
}
}
答案 2 :(得分:6)
我接受了 @Aleksey Timoshchenko的出色答案,并将其更新为Swift 4.x
。
请注意,在我的用例中,我允许用户拍摄多张照片,这就是为什么将它们保存在images
数组中的原因。
请注意,您需要通过@IBAction takePhoto
或在代码中连接storyboard
方法。就我而言,我使用的是storyboard
。
自iOS 11
起,已弃用在 @Aleksey Timoshchenko的答案中使用的AVCapturePhotoOutput.jpegPhotoDataRepresentation
。
class CameraVC: UIViewController {
@IBOutlet weak var cameraView: UIView!
var images = [UIImage]()
var captureSession: AVCaptureSession!
var cameraOutput: AVCapturePhotoOutput!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
startCamera()
}
func startCamera() {
captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSession.Preset.photo
cameraOutput = AVCapturePhotoOutput()
if let device = AVCaptureDevice.default(for: .video),
let input = try? AVCaptureDeviceInput(device: device) {
if (captureSession.canAddInput(input)) {
captureSession.addInput(input)
if (captureSession.canAddOutput(cameraOutput)) {
captureSession.addOutput(cameraOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.bounds
cameraView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
} else {
print("issue here : captureSesssion.canAddInput")
}
} else {
print("some problem here")
}
}
@IBAction func takePhoto(_ sender: UITapGestureRecognizer) {
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
}
extension CameraVC : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let error = error {
print("error occured : \(error.localizedDescription)")
}
if let dataImage = photo.fileDataRepresentation() {
print(UIImage(data: dataImage)?.size as Any)
let dataProvider = CGDataProvider(data: dataImage as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImage.Orientation.right)
/**
save image in array / do whatever you want to do with the image here
*/
self.images.append(image)
} else {
print("some error here")
}
}
}
答案 3 :(得分:5)
在iOS 11" photoOutput(_输出:AVCapturePhotoOutput,didFinishProcessingPhoto photoSampleBuffer:CMSampleBuffer?,previewPhoto previewPhotoSampleBuffer:CMSampleBuffer?,resolvedSettings:AVCaptureResolvedPhotoSettings,bracketSettings:AVCaptureBracketedStillImageSettings?,error:Error?){}"已被弃用。
使用以下方法:
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
let imageData = photo.fileDataRepresentation()
if let data = imageData, let img = UIImage(data: data) {
print(img)
}
}
答案 4 :(得分:3)
我在GitHub中找到了这个项目,帮助我理解了设备的初始化和捕获会话。
答案 5 :(得分:3)
capture
委托功能已更改为photoOutput
。这是Swift 4的更新功能。
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print(error.localizedDescription)
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print("image: \(String(describing: UIImage(data: dataImage)?.size))") // Your Image
}
}