AVCaptureSession真的很慢

时间:2015-12-02 10:00:53

标签: ios swift

我无法弄清楚为什么我的相机应用程序使用AVFoundation框架拍摄的速度非常慢。

每个捕获会话大约需要2/3秒。 我认为这是一个记忆问题。

我使用的是Swift 1.1。 enter image description here

我使用过的代码:

   class ViewController: UIViewController {
    let captureSession = AVCaptureSession()
    var previewLayer : AVCaptureVideoPreviewLayer?
    var captureDevice : AVCaptureDevice?
    var captureConnection: AVCaptureConnection?
    var stillImageOutput = AVCaptureStillImageOutput()
    let targetRegion = CALayer()
    var currentImage: UIImage?

    @IBOutlet weak var cameraView: UIImageView!
    @IBOutlet weak var imageDisplayed: UIImageView!


    override func viewDidLoad() {
        super.viewDidLoad()

        navigationController?.setNavigationBarHidden(true, animated: true)

        captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
        let devices = AVCaptureDevice.devices()
        for device in devices {
            if device.hasMediaType(AVMediaTypeVideo) {
                if device.position == AVCaptureDevicePosition.Back {
                    captureDevice = device as? AVCaptureDevice
                }
            }
        }
        if captureDevice != nil {
            println("Device trovato")
            beginSession()
        }


    }


    func beginSession() {

        var err: NSError? = nil
        captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))

        if err != nil {
            println("err \(err?.localizedDescription)")
            return
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        self.view.layer.addSublayer(previewLayer)

        previewLayer?.frame = CGRect(x: cameraView.frame.origin.x, y: cameraView.frame.origin.y, width: cameraView.frame.size.width, height: cameraView.frame.size.height)

        captureSetup()
        captureSession.startRunning()
    }

    func captureSetup() {

        let outputSetting = NSDictionary(dictionary: [AVVideoCodecKey: AVVideoCodecJPEG])
        self.stillImageOutput.outputSettings = outputSetting
        self.captureSession.addOutput(stillImageOutput)
        for connection:AVCaptureConnection in self.stillImageOutput.connections as [AVCaptureConnection] {

            for port:AVCaptureInputPort in connection.inputPorts! as [AVCaptureInputPort] {
                if port.mediaType == AVMediaTypeVideo {
                    captureConnection = connection as AVCaptureConnection
                    break
                }
            }
            if captureConnection != nil {
                break
            }
        }
    }


    var i = 0;

    func captureScene() {


        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {
            if self.captureConnection != nil {
                self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.captureConnection, completionHandler:{ (imageSampleBuffer:CMSampleBuffer!, _) -> Void in

                    let imageDataJpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer)
                    var pickedImage: UIImage = UIImage(data:imageDataJpeg)!



                    if let data = UIImagePNGRepresentation(pickedImage) {


                        let filename = self.getDocumentsDirectory().stringByAppendingPathComponent("\(self.i).png")
                        data.writeToFile(filename, atomically: true)

                        self.i++
                    }


                })
            }
        })

    }



    func getDocumentsDirectory() -> NSString {
        let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
        let documentsDirectory: AnyObject = paths[0]


        return documentsDirectory as NSString
    }




    override func touchesBegan(touches: NSSet, withEvent event: UIEvent) {
        captureScene()
    }

}

0 个答案:

没有答案