如何使用AVFoundation捕获静止图像并使用Swift

时间:2015-10-22 03:22:08

标签: ios swift uiimageview avfoundation segue

目标:我正在使用AVFoundation来创建自定义相机,其行为类似于Facebook,Instagram和Snapchats相机图像捕捉序列。

以下是具有理想用户体验的控制器:

  • 用户按下加号按钮
  • app使用AVCaptureSession和AVCaptureVideoPreviewLayer切换/过渡到自定义相机View Controller,显示为紫色区域
  • 用户按按钮以捕获图像
  • app切换到静止/图像视图控制器,他们刚拍摄的图片,以便用户可以编辑或其他任何必须发生的事情
  • 用户按使用按钮
  • app保存图像并弹出到根ViewController

Here is my Swift Storyboard of the above

问题:我可以使用AVCapturePreviewLayer获取实时视频,但是一旦我捕获了我的图片,我无法将捕获的UIImage传输到第二个ViewController。我正在使用一个segue,我在结束时触发captureStillImageAsynchronouslyFromConnection完成回调。

这是MasterViewController?

class AddPhotoViewController: UIViewController {


@IBOutlet var previewLayerView: UIView!


var captureSession: AVCaptureSession?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput: AVCaptureStillImageOutput?
var imageDetail: UIImage?


@IBAction func cancelCameraBtn(sender: AnyObject) {

    self.navigationController?.popToRootViewControllerAnimated(true)

}



@IBAction func takePhotoBtn(sender: AnyObject) {


    if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {


        videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait

        stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {(sampleBuffer, error) in

            if (sampleBuffer != nil) {
                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
                let dataProvider = CGDataProviderCreateWithCFData(imageData)
                let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)

                self.imageDetail = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)

                self.performSegueWithIdentifier("captureSessionDetailSegue", sender: self)
            }
        })
    }
}



override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.
}

override func viewWillDisappear(animated: Bool) {
    captureSession!.stopRunning()
    self.navigationController?.setNavigationBarHidden(false, animated: false)

}

override func viewWillAppear(animated: Bool) {

    super.viewWillAppear(animated)

    // display properties
    self.navigationController?.setNavigationBarHidden(true, animated: false)

    captureSession = AVCaptureSession()
    captureSession!.sessionPreset = AVCaptureSessionPresetPhoto

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)

    var error: NSError?
    var input: AVCaptureDeviceInput!
    do {
        input = try AVCaptureDeviceInput(device: backCamera)
    } catch let error1 as NSError {
        error = error1
        input = nil
    }

    if error == nil && captureSession!.canAddInput(input) {
        captureSession!.addInput(input)

        stillImageOutput = AVCaptureStillImageOutput()
        stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

        if captureSession!.canAddOutput(stillImageOutput) {
            captureSession!.addOutput(stillImageOutput)

            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)


            previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.Portrait



            previewLayerView.layer.addSublayer(previewLayer!)
            //previewLayerView.layer.removeAllAnimations()


            captureSession!.startRunning()
        }
    }

}
override func viewDidAppear(animated: Bool) {
    super.viewDidAppear(animated)
    previewLayer!.frame = previewLayerView.bounds
}


override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}



// MARK: - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
    // Get the new view controller using segue.destinationViewController.
    // Pass the selected object to the new view controller.

    //if segue.identifier == "captureSessionDetailSegue" {

    let destination = segue.destinationViewController as! CaptureSessionDetailViewController
    destination.capturedImage.image = self.imageDetail

    // returns nil propertyfrom here
    //destination.navigationController!.setNavigationBarHidden(true, animated: false)
    //}
}


}

这是DetailViewController?

class CaptureSessionDetailViewController: UIViewController {

@IBOutlet var capturedImage: UIImageView!

override func viewDidLoad() {
    super.viewDidLoad()

    // Do any additional setup after loading the view.
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}


/*
// MARK: - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
    // Get the new view controller using segue.destinationViewController.
    // Pass the selected object to the new view controller.
}
*/

}

我当前的代码产生致命错误:在解包可选值时意外发现nil。我认为这是因为我的prepareForSegue方法设置了一些尚不存在的东西,但我不知道如何将图像转换为所需的DetailViewController。

如何达到理想的效果?

2 个答案:

答案 0 :(得分:2)

我的解决方案使用了上​​述用户(Dharmesh Kheni)和DBCamera custom camera github的设计模式。

AddPhotoViewController

    @IBAction func takePhotoBtn(sender: AnyObject) {


    if let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) {

        videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait

        stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {(sampleBuffer, error) in

            if (sampleBuffer != nil) {
                let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)

                // Setup class variable --> imgMetaData: NSData!
                // Assign and transport to destination ViewController
                self.imgMetaData = imageData

                self.performSegueWithIdentifier("captureSessionDetailSegue", sender: self)
            }
        })
    }
}
    override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {

    if segue.identifier == "captureSessionDetailSegue" {

        let destination = segue.destinationViewController as! CaptureSessionDetailViewController

        destination.capturedImageMetaData = self.imgMetaData


    }
}

CaptureSessionDetailViewController

class CaptureSessionDetailViewController: UIViewController {


var capturedImageMetaData: NSData!


@IBOutlet var capturedImage: UIImageView!

override func viewDidLoad() {
    super.viewDidLoad()

    let dataProvider = CGDataProviderCreateWithCFData(capturedImageMetaData)
    let cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault)

    let img = UIImage(CGImage: cgImageRef!, scale: 1.0, orientation: UIImageOrientation.Right)


    capturedImage.image = img


    // Do any additional setup after loading the view.
}

来自AVCaptureStillImageOutput的图片数据已分配给imgMetaData: NSData!中的类变量AddPhotoViewController。数据已使用prepareForSegue传输到目标视图控制器CaptureSessionDetailViewController并存储在capturedImageMEtaData: NSData!中。然后,数据在UIImage方法中转换为viewDidLoad

答案 1 :(得分:0)

不要直接分配图像,如:

destination.capturedImage.image = self.imageDetail

但是声明另一个将您的图片保存到CaptureSessionDetailViewController的实例,如下所示:

var capturedImageRef = UIImage()

现在,您可以将CaptureSessionDetailViewController的图片从AddPhotoViewController分配到Segue方法:

destination.capturedImageRef = self.imageDetail

现在viewDidLoad的{​​{1}}你可以将该图片分配给CaptureSessionDetailViewController

imageView