捕获会话启动缓慢

时间:2015-05-22 01:32:45

标签: ios swift

我想知道为什么我的捕获会话在应用启动时缓慢启动。每次启动应用程序时都不会发生这种情况,因此我不确定它是否只是实际手机的其他变量或其他内容。我不是一个非常好的并发/并行程序员,所以它很可能是我糟糕的编码:(

如果有人能够确定有时会让它变慢的话,我会非常感激。我已经读过来自捕获会话的所有调用都可以阻塞,所以我已经尽力将这些调用发送到另一个队列而没有任何竞争条件。我正在学习如何以快速形式here

进行编码

这是我的代码,我初始化并启动所有内容:我的队列是串行队列

/**************************************************************************
    VIEW DID LOAD
    ***************************************************************************/
    override func viewDidLoad() {

        super.viewDidLoad()

        println("Initializing the cameraCaptureDevice with MediaTypeVideo")

        //------INIT CAMERA CAPTURE DEVICE TO BEGIN WITH------
        self.cameraCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)

        println("Done initializing camera")

        var error1: NSError? = nil

        println("Getting array of available capture devices")

        //------GRAB ALL OF THE DEVICES------
        let devices = AVCaptureDevice.devices()

        //------FIND THE CAMERA MATCHING THE POSITION------
        for device in devices {

            if device.position == self.cameraCapturePosition {

                self.cameraCaptureDevice = device as? AVCaptureDevice

                println("Back camera has been added")

                self.usingBackCamera = true
            }
        }

        //------ INIT MOVIE FILE OUTPUT ------
        self.movieFileOutput = AVCaptureMovieFileOutput()

        //------SET UP PREVIEW LAYER-----
        self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)

        if let preview = self.videoPreviewLayer {

            println("Video Preview Layer set")

            preview.videoGravity = AVLayerVideoGravityResizeAspectFill
        }
        else {

            println("Video Preview Layer is nil!!! Could not set AVLayerVideoGravityResizeAspectFill")
        }

        println("Camera successully can display")

        //------SET JPEG OUTPUT------
        println("Setting JPEG Output")

        self.stillImageOutput = AVCaptureStillImageOutput()

        let outputSettings = [ AVVideoCodecKey : AVVideoCodecJPEG ]

        if let imageOutput = self.stillImageOutput {

            imageOutput.outputSettings = outputSettings
        }
        else {

            println("still image output is nil, could notset output settings")
        }

        println("Successfully configured JPEG Ouput")

        //------SET MOVIE FILE OUPUT MAX DURATION AND MIN FREE DISK SPACE------
        println("Setting Movie File Max Duration")

        let maxDuration:CMTime = CMTimeMakeWithSeconds(self.totalTime, self.preferredTimeScale)

        if let movieOutput = self.movieFileOutput {

            movieOutput.maxRecordedDuration = maxDuration

            println("Successully set movie file max duration")
            println("Setting movie file minimun byte space")

            movieOutput.minFreeDiskSpaceLimit = self.minFreeSpace

            println("Successfully added minium free space")
        }
        else {

            println("Movie file output is nil, could not set maximum recording duration or minimum free space")
        }

        //------  GRAB THE DEVICE'S SUPPORTED FRAME RATE RANGES ------
        if let device = self.cameraCaptureDevice {

            println("Setting frame rates")

            let supportedFrameRateRanges = device.activeFormat.videoSupportedFrameRateRanges

            for range in supportedFrameRateRanges {

                // Workaround until finding a better way
                // frame rate should be 1 - 30

                if (range.minFrameRate >= 1 || range.minFrameRate <= 30) == true && (range.maxFrameRate <= 30 || range.maxFrameRate >= 1) == true {

                    println("Frame rate is supported")

                    self.frameRateSupported = true
                }
                else {

                    println("Frame rate is not supported")

                    self.frameRateSupported = false
                }
            }

            var error: NSError?

            if frameRateSupported && device.lockForConfiguration(&error) {

                device.activeVideoMaxFrameDuration = self.frameDuration
                device.activeVideoMinFrameDuration = self.frameDuration
                device.unlockForConfiguration()

                println("SUCCESS")
            }
            else {

                println("frame rate is not supported or there was an error")

                if let err = error {

                    println("There was an error setting framerate: \(err.description)")
                }
                else {

                    println("Frame rate is not supported")
                }
            }
        }
        else {

            println("camera capture device is nil, could not set frame rate")
        }

        //------ INIT AUDIO CAPTURE DEVICE ------
        self.audioCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)

        var error2: NSError? = nil
        let audioDeviceInput = AVCaptureDeviceInput(device: self.audioCaptureDevice, error: &error2)

        //------ADD CAMERA CAPTURE DEVICE TO CAPTURE SESSION INPUT------
        if let captureDevice = self.cameraCaptureDevice {

            if error1 == nil {

                println("Trying to add video input")

                self.videoDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &error1)

            }
            else {

                println("Could not create video input")
            }
        }
        else {

            println("Could not create camera capture device")
        }

        //------ ADD INPUTS AND OUTPUTS  AS WELL AS OTHER SESSION CONFIGURATIONS------
        dispatch_async(self.sessionQueue) {

            println("Trying to add audio output")

            if let input = audioDeviceInput {

                self.session.addInput(audioDeviceInput)

                println("Successfully added audio output")
            }
            else {
                println("Could not create audio input")
            }

            if self.session.canAddInput(self.videoDeviceInput) {

                self.session.addInput(self.videoDeviceInput)

                println("Successfully added video input")
            }
            else {

                println("Could not add video input")
            }


            println("initializing video capture session")

            //----- SET THE IMAGE QUALITY / RESOLUTION -----
            //Options:
            //  AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
            //  AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
            //  AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
            //  AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
            //  AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
            //  AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)

            if self.session.canSetSessionPreset(AVCaptureSessionPresetHigh) {

                println("Capture Session preset is set to High Quality")

                self.session.sessionPreset = AVCaptureSessionPresetHigh
            }
            else {

                println("Capture Session preset is set to Medium Quality")

                self.session.sessionPreset = AVCaptureSessionPresetMedium
            }

            //------ADD JPEG OUTPUT AND MOVIE FILE OUTPUT TO SESSION OUTPUT------
            println("Adding still image and movie file output")

            if self.session.canAddOutput(self.stillImageOutput) && self.session.canAddOutput(self.movieFileOutput) {

                self.session.addOutput(self.stillImageOutput)
                self.session.addOutput(self.movieFileOutput)

                println("Successfully added outputs")
            }
            else {

                //------ IF OUTPUTS COULD NOT BE ADDED, THEN APP SHOULD NOT RUN ON DEVICE!!!!! ------
                println("Could Not Add still image and movie file output")
            }

            //------WE CALL A METHOD AS IT ALSO HAS TO BE DONE AFTER CHANGING CAMERA------
            self.setCameraOutputProperties()

            //------DISPLAY PREVIEW LAYER------
            if let videoLayer = self.videoPreviewLayer {

                self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer)

                println("Video Preview Layer Added as sublayer")

                self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame

                println("Video Preview frame set")
            }
            else {

                println("videoPreviewLayer is nil, could not add sublayer or set frame")
            }

            self.view.sendSubviewToBack(self.videoPreviewView)
        }
    }






    /**************************************************************************
    VIEW DID APPEAR
    ***************************************************************************/
    override func viewDidAppear(animated: Bool) {

        println("About to start the capture session")

        //------INITIALIZE THE CAMERA------
        dispatch_async(self.startSessionQueue) {

            if self.beenHereBefore == false {

                println("Have not seen this view before.... starting the session")

                //------ START THE PREVIEW SESSION ------
                self.startSession()

                /*
                CHECK TO MAKE SURE THAT THIS CODE IS REALLY NEEDED FOR AUTHORIZATION
                */

                // ----- SET MEDIA TYPE ------
                var mediaTypeVideo = AVMediaTypeVideo

                AVCaptureDevice.requestAccessForMediaType(mediaTypeVideo, completionHandler: { (granted) -> Void in

                    //------ GRANTED ACCESS TO MEDIATYPE ------
                    if granted {

                        self.deviceAuthorized = AVAuthorizationStatus.Authorized
                    }
                        //------ NOT GRANTED ACCESS TO MEDIATYPE ------
                    else {

                        dispatch_async(dispatch_get_main_queue()) {

                            UIAlertView(title: "CopWatch", message: "CopWatch does not have permission to use the camera, please change your privacy settings.", delegate: self, cancelButtonTitle: "OK")

                            self.deviceAuthorized = AVAuthorizationStatus.Denied

                            dispatch_resume(dispatch_get_main_queue())
                        }
                    }

                })

            }
            else {

                println("Been Here Before")

                self.session.startRunning()
            }

            self.weAreRecording = false
        }
    }

这是启动视频预览的方法

/**************************************************************************
    START SESSION
    **************************************************************************/
    func startSession() {

        println("Checking to see if the session is already running before starting the session")

        //------ START SESSION IF IT IS NOT ALREADY RUNNING------

        if !self.session.running {

            //------START CAMERA------
            println("Session is not already running, starting the session now")

            self.session.startRunning()
            self.isSessionRunning = true

            println("Capture Session initiated")
        }
        else {

            println("Session is already running, no need to start it again")
        }
    }

1 个答案:

答案 0 :(得分:1)

似乎我找到了答案。

我将videoPreviewLayer添加为子视图,并在异步调度调用中将其发送到视图的后面。显然,应用程序不喜欢这样,导致启动的速度非常非常慢。

我移动此代码

//------DISPLAY PREVIEW LAYER------
        if let videoLayer = self.videoPreviewLayer {

            self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer)

            println("Video Preview Layer Added as sublayer")

            self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame

            println("Video Preview frame set")
        }
        else {

            println("videoPreviewLayer is nil, could not add sublayer or set frame")
        }

        self.view.sendSubviewToBack(self.videoPreviewView)

就像这样:

//------SET UP PREVIEW LAYER-----
        self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)

        if let preview = self.videoPreviewLayer {

            println("Video Preview Layer set")

            preview.videoGravity = AVLayerVideoGravityResizeAspectFill
        }
        else {

            println("Video Preview Layer is nil!!! Could not set AVLayerVideoGravityResizeAspectFill")
        }

        println("Camera successully can display")

        //------DISPLAY PREVIEW LAYER------
        if let videoLayer = self.videoPreviewLayer {

            self.videoPreviewView.layer.addSublayer(self.videoPreviewLayer)

            println("Video Preview Layer Added as sublayer")

            self.videoPreviewLayer!.frame = self.videoPreviewView.layer.frame

            println("Video Preview frame set")

            self.view.sendSubviewToBack(self.videoPreviewView)
        }
        else {

            println("videoPreviewLayer is nil, could not add sublayer or set frame")
        }

我应该能够看到这个问题,但我想这是在错误的时间进行优化时会发生的情况。现在它非常敏感。

故事的道德,如果您使用AVFoundation进行编程,请不要在异步队列中的当前视图控制器中设置和添加视频预览图层作为视图的子视图。