以120 / 240fps保存视频

时间:2015-11-17 09:54:21

标签: ios swift camera avcapture

我正在创建一个应用程序,以设备最大帧速率录制视频(即iPhone 5s中的120fps和6和6s中的240fps)。我已设法配置AVCaptureDevice以设置maxFrameRateDuration,我将currentDevice.activeFormat.videoSupportedFrameRateRanges打印到日志中,一切正常。

但是当我尝试保存视频时,它会保存它,但是以正常的帧速率,而不是120或240fps。

拜托,有人可以帮我吗?任何帮助将不胜感激。

提前致谢。

P.S。:这是我到目前为止的完整代码

import UIKit
import AVFoundation
import AVKit
import AssetsLibrary

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    @IBOutlet weak var cameraButton:UIButton!

    let captureSession = AVCaptureSession()
    var currentDevice:AVCaptureDevice?
    var videoFileOutput:AVCaptureMovieFileOutput?
    var cameraPreviewLayer:AVCaptureVideoPreviewLayer?
    var outputPath: String = ""
    var backgroundRecordId: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid

    var isRecording = false

    override func viewDidLoad() {
        super.viewDidLoad()

        // Preset the session for taking photo in full resolution
        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        // Get the available devices that is capable of taking video
        let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]

        // Get the back-facing camera for taking videos
        for device in devices {
            if device.position == AVCaptureDevicePosition.Back {
            currentDevice = device
                configureDevice()
            }
        }

        let captureDeviceInput:AVCaptureDeviceInput
        do {
            captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice)
        } catch {
            print(error)
            return
        }

        // Configure the session with the output for capturing video
        videoFileOutput = AVCaptureMovieFileOutput()

        // Configure the session with the input and the output devices
        captureSession.addInput(captureDeviceInput)
        captureSession.addOutput(videoFileOutput)

        // Provide a camera preview
        cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(cameraPreviewLayer!)
        cameraPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
        cameraPreviewLayer?.frame = view.layer.frame

        // Bring the camera button to front
        view.bringSubviewToFront(cameraButton)
        captureSession.startRunning()    
    }

    func configureDevice() {

        var bestFormat: AVCaptureDeviceFormat? = nil
        var bestFrameRateRange: AVFrameRateRange? = nil
        var bestPixelArea: Int32 = 0
        for format in currentDevice!.formats {
            let dims: CMVideoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
            let pixelArea: Int32 = dims.width * dims.height
            let ranges = format.videoSupportedFrameRateRanges as! [AVFrameRateRange];
            for range in ranges {
                //print ("[",dims.width,",",dims.width,"] : ",range.maxFrameRate);
                if bestFrameRateRange==nil || range.maxFrameRate > bestFrameRateRange!.maxFrameRate || ((range.maxFrameRate == bestFrameRateRange!.maxFrameRate) && (pixelArea > bestPixelArea)) {
                    bestFormat = format as? AVCaptureDeviceFormat
                    bestFrameRateRange = range
                    bestPixelArea = pixelArea
                }
            }
        }            

        do {

            try currentDevice!.lockForConfiguration() {

            currentDevice!.activeFormat = bestFormat
            currentDevice!.activeVideoMinFrameDuration = bestFrameRateRange!.minFrameDuration
            currentDevice!.activeVideoMaxFrameDuration = bestFrameRateRange!.minFrameDuration              

        }
        catch{}         

        print(currentDevice!.activeFormat.videoSupportedFrameRateRanges)

        currentDevice!.unlockForConfiguration()            
    }    


    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    // MARK: - AVCaptureFileOutputRecordingDelegate methods

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {


        if error != nil {
            print(error)
            return
        }

        let backgroundRecordId: UIBackgroundTaskIdentifier = self.backgroundRecordId
        self.backgroundRecordId = UIBackgroundTaskInvalid

        ALAssetsLibrary().writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: {
            (assetURL:NSURL!, error:NSError!) in
            if error != nil{
                print(error)

            }

            do {
                try NSFileManager.defaultManager().removeItemAtURL(outputFileURL)
            } catch _ {
            }

            if backgroundRecordId != UIBackgroundTaskInvalid {
                UIApplication.sharedApplication().endBackgroundTask(backgroundRecordId)
            }

        })
        performSegueWithIdentifier("playVideo", sender: outputFileURL)
    }

    // MARK: - Segue methods

    override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
        if segue.identifier == "playVideo" {
            let videoPlayerViewController = segue.destinationViewController as! AVPlayerViewController
            let videoFileURL = sender as! NSURL
            videoPlayerViewController.player = AVPlayer(URL: videoFileURL)

        }
    }


    // MARK: - Action methods

    @IBAction func unwindToCamera(segue:UIStoryboardSegue) {

    }

    @IBAction func capture(sender: AnyObject) {
        if !isRecording {
            isRecording = true

            UIView.animateWithDuration(0.5, delay: 0.0, options: [.Repeat, .Autoreverse, .AllowUserInteraction], animations: { () -> Void in
                self.cameraButton.transform = CGAffineTransformMakeScale(0.5, 0.5)
                }, completion: nil)

            let outputPath: String = NSTemporaryDirectory() + "output.mov"
            let outputFileURL = NSURL(fileURLWithPath: outputPath)
            videoFileOutput?.startRecordingToOutputFileURL(outputFileURL, recordingDelegate: self)
        } else {
            isRecording = false

            UIView.animateWithDuration(0.5, delay: 1.0, options: [], animations: { () -> Void in
                self.cameraButton.transform = CGAffineTransformMakeScale(1.0, 1.0)
                }, completion: nil)
            cameraButton.layer.removeAllAnimations()

            videoFileOutput?.stopRecording()

        }
    }        
}

2 个答案:

答案 0 :(得分:1)

您过早地调用configDevice()并且您的配置正在被替换。

在您添加了捕获设备的输入后致电configDevice()

// Configure the session with the input and the output devices
captureSession.addInput(captureDeviceInput)
configureDevice()

答案 1 :(得分:1)

您的问题已经很老了,但我仍然想添加一些内容。 我认为您必须将两者都设置为maxFrameDuration。 尝试一下:

   currentDevice!.activeVideoMinFrameDuration = bestFrameRateRange!.maxFrameDuration
   currentDevice!.activeVideoMaxFrameDuration = bestFrameRateRange!.maxFrameDuration