按住按钮以使用AVFoundation,Swift 3录制视频

时间:2017-07-20 14:37:03

标签: ios swift video avfoundation video-capture

我想弄清楚如何在Swift中使用AVFoundation录制视频。我已经创建了一个自定义相机,但我只知道如何使用它拍摄静态图片,我无法弄清楚如何录制视频。希望你能帮我解决这个问题。

我想按住 takePhotoButton 来录制视频,然后预览我预览当前静态照片的位置。你的帮助将真正帮助我继续我的项目。非常感谢!

import UIKit
import AVFoundation

@available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {

let photoSettings = AVCapturePhotoSettings()
    var audioPlayer = AVAudioPlayer()
    var captureSession = AVCaptureSession()
    var videoDeviceInput: AVCaptureDeviceInput!
    var previewLayer = AVCaptureVideoPreviewLayer()
    var frontCamera: Bool = false
    var captureDevice:AVCaptureDevice!
    var takePhoto = false

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        prepareCamera()
    }

    func prepareCamera() {
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
            captureDevice = availableDevices.first
            beginSession()
        } 
    }

    func frontCamera(_ front: Bool){
        let devices = AVCaptureDevice.devices()

        do{
            try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!)) 
        }catch{
            print("Error")
        }

        for device in devices!{
            if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
                if front{
                    if (device as AnyObject).position == AVCaptureDevicePosition.front {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }else{
                    if (device as AnyObject).position == AVCaptureDevicePosition.back {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }
            }
        }
    }

    func beginSession () {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            self.previewLayer = previewLayer
            containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
            self.previewLayer.frame = self.view.layer.frame
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
            captureSession.startRunning()

            let dataOutput = AVCaptureVideoDataOutput()
            dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]

            dataOutput.alwaysDiscardsLateVideoFrames = true

            if captureSession.canAddOutput(dataOutput) {
                captureSession.addOutput(dataOutput)

                photoSettings.isHighResolutionPhotoEnabled = true
                photoSettings.isAutoStillImageStabilizationEnabled = true
            }

            captureSession.commitConfiguration()

            let queue = DispatchQueue(label: "com.NightOut.captureQueue")
            dataOutput.setSampleBufferDelegate(self, queue: queue) 
        }
    }
        @IBAction func takePhoto(_ sender: Any) {
            takePhoto = true

            photoSettings.isHighResolutionPhotoEnabled = true
            photoSettings.isAutoStillImageStabilizationEnabled = true
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if takePhoto {
            takePhoto = false
            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController

                photoVC.takenPhoto = image

                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                }
            }  
        }
    }

    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()

            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
            }
    }
        return nil
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        self.captureSession.stopRunning()
    }

    func stopCaptureSession () {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    @IBAction func DismissButtonAction(_ sender: UIButton) {

        UIView.animate(withDuration: 0.1, animations: {
            self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
        }, completion: { (finish) in
            UIView.animate(withDuration: 0.1, animations: {
                self.DismissButton.transform = CGAffineTransform.identity
            })
        })
        performSegue(withIdentifier: "Segue", sender: nil)
    }
}

2 个答案:

答案 0 :(得分:4)

要识别按住按钮并释放按钮,可以通过不同方式完成。最简单的方法是为UIControlEvents.TouchUpInsideUIControlEvents.TouchDown添加目标,如下图所示。

aButton.addTarget(self, action: Selector("holdRelease:"), forControlEvents: UIControlEvents.TouchUpInside);
aButton.addTarget(self, action: Selector("HoldDown:"), forControlEvents: UIControlEvents.TouchDown)

//target functions   
func HoldDown(sender:UIButton)
{
       // Start recording the video
}

func holdRelease(sender:UIButton)
{
       // Stop recording the video
}

还有其他方法,例如为按钮添加长按手势识别器或根据识别器状态查看和启动/停止。更多信息可以在另一个SO回答UIButton with hold down action and release action

中找到

视频录制

您需要将AVCaptureMovieFileOutput添加到捕获会话中,并使用startRecordingToOutputFileURL方法启动视频录制。

需要注意的事项

  • 实施AVCaptureFileOutputRecordingDelegate方法以识别开始和didFinish录制
  • 文件路径应该有意义,这意味着您应该提供应用有权访问的正确文件路径。

HoldDown()方法中使用此代码开始录制

 let videoFileOutput = AVCaptureMovieFileOutput()
                self.captureSession?.addOutput(videoFileOutput)
 let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
 let filePath = documentsURL.appendingPathComponent("tempMovie")
 videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: self)

停止录制使用vidoeFileOutput.stopRecording()

答案 1 :(得分:0)

您需要使用AVCaptureMovieFileOutput。使用AVCaptureMovieFileOutput

addOutput(_:)添加到捕获会话中
  

开始录制

     

您开始使用录制QuickTime影片   startRecording(to:recordingDelegate:)。你需要提供一个   基于文件的URL和委托。 URL不得标识现有的URL   文件,因为电影文件输出不会覆盖现有的   资源。您还必须具有写入指定的权限   地点。代表必须符合   AVCaptureFileOutputRecordingDelegate协议,并且必须实现   fileOutput(_:didFinishRecordingTo:from:error:)   方法

有关详细信息,请参阅docs