在swift 3.0中自动使用自定义相机录制视频

时间:2017-02-08 11:22:17

标签: ios swift

如何在iOS中自动录制视频而无需用户在相机控件上进行交互? 要求是在打开视图时从前置摄像头录制视频,但条件是摄像机控件被禁用, 视频记录并从该视图中自动保存。

2 个答案:

答案 0 :(得分:9)

Swift 3.0

  

最后我解决了问题,只需复制并粘贴整个代码即可   连接插座,这很好。

class TestViewController: UIViewController {

        @IBOutlet weak var myView: UIView!

        var session: AVCaptureSession?
        var userreponsevideoData = NSData()
        var userreponsethumbimageData = NSData()

        override func viewDidLoad() {
            super.viewDidLoad()
            createSession()
        }

        override func viewDidAppear(animated: Bool) {
            super.viewDidAppear(animated)
        }

        func createSession() {

            var input: AVCaptureDeviceInput?
            let  movieFileOutput = AVCaptureMovieFileOutput()
            var prevLayer: AVCaptureVideoPreviewLayer?
            prevLayer?.frame.size = myView.frame.size
            session = AVCaptureSession()
            let error: NSError? = nil
            do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(.Front)!) } catch {return}
            if error == nil {
                session?.addInput(input)
            } else {
                print("camera input error: \(error)")
            }
            prevLayer = AVCaptureVideoPreviewLayer(session: session)
            prevLayer?.frame.size = myView.frame.size
            prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
            prevLayer?.connection.videoOrientation = .Portrait
            myView.layer.addSublayer(prevLayer!)
            let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
            let  filemainurl = NSURL(string: ("\(documentsURL.URLByAppendingPathComponent("temp")!)" + ".mov"))


            let maxDuration: CMTime = CMTimeMake(600, 10)
            movieFileOutput.maxRecordedDuration = maxDuration
            movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
            if self.session!.canAddOutput(movieFileOutput) {
                self.session!.addOutput(movieFileOutput)
            }
            session?.startRunning()
            movieFileOutput.startRecordingToOutputFileURL(filemainurl, recordingDelegate: self)

        }
        func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
            let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
            for device in devices {
                if device.position == position {
                    return device as? AVCaptureDevice
                }
            }
            return nil
        }
        @IBAction func pressbackbutton(sender: AnyObject) {
            session?.stopRunning()

        }

    }
    extension TestViewController: AVCaptureFileOutputRecordingDelegate
    {
        func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
            print(fileURL)
        }

        func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
            print(outputFileURL)
            let filemainurl = outputFileURL

            do
            {
                let asset = AVURLAsset(URL: filemainurl, options: nil)
                print(asset)
                let imgGenerator = AVAssetImageGenerator(asset: asset)
                imgGenerator.appliesPreferredTrackTransform = true
                let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil)
                let uiImage = UIImage(CGImage: cgImage)
                userreponsethumbimageData = NSData(contentsOfURL: filemainurl)!
                print(userreponsethumbimageData.length)
                print(uiImage)
                // imageData = UIImageJPEGRepresentation(uiImage, 0.1)
            }
            catch let error as NSError
            {
                print(error)
                return
            }

            SVProgressHUD.showWithMaskType(SVProgressHUDMaskType.Clear)
            let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("mergeVideo\(arc4random()%1000)d")!.URLByAppendingPathExtension("mp4")!.absoluteString

            if NSFileManager.defaultManager().fileExistsAtPath(VideoFilePath!)

            {
                do

                {
                    try NSFileManager.defaultManager().removeItemAtPath(VideoFilePath!)
                }
                catch { }

            }
            let tempfilemainurl =  NSURL(string: VideoFilePath!)!
            let sourceAsset = AVURLAsset(URL: filemainurl!, options: nil)
            let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
            assetExport.outputFileType = AVFileTypeQuickTimeMovie
            assetExport.outputURL = tempfilemainurl
            assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
                switch assetExport.status
                {
                case AVAssetExportSessionStatus.Completed:
                    dispatch_async(dispatch_get_main_queue(),
                        {
                            do
                            {
                                SVProgressHUD .dismiss()
                                self.userreponsevideoData = try NSData(contentsOfURL: tempfilemainurl, options: NSDataReadingOptions())
                                print("MB - \(self.userreponsevideoData.length) byte")


                            }
                            catch
                            {
                                SVProgressHUD .dismiss()
                                print(error)
                            }
                    })
                case  AVAssetExportSessionStatus.Failed:
                    print("failed \(assetExport.error)")
                case AVAssetExportSessionStatus.Cancelled:
                    print("cancelled \(assetExport.error)")
                default:
                    print("complete")
                    SVProgressHUD .dismiss()
                }

            }
        }

    }

答案 1 :(得分:1)

这是迅速的4.1版本

 @objc func longPress(gesture: UILongPressGestureRecognizer) {
    if gesture.state == UIGestureRecognizerState.began {
        print("Long Press")
        captureButton.setImage(UIImage(named: "recording"), for:.normal)
        createSession()

    }else if gesture.state == UIGestureRecognizerState.changed {


    }
    else if gesture.state == UIGestureRecognizerState.ended {
        captureButton.setImage(UIImage(named: "camaraTap"), for:.normal)

        session?.stopRunning()
    }
}

 func createSession() {

    var input: AVCaptureDeviceInput?
    let  movieFileOutput = AVCaptureMovieFileOutput()
    videosPreviewLayer?.frame.size = photoPreviewImageView.frame.size
    session = AVCaptureSession()
    let error: NSError? = nil
    do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .back)!) } catch {return}
    if error == nil {
        session?.addInput(input!)
    } else {
        print("camera input error: \(String(describing: error))")
    }
    videosPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
    videosPreviewLayer?.frame.size = self.photoPreviewImageView.frame.size
    videosPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
    videosPreviewLayer?.connection?.videoOrientation = .portrait
    photoPreviewImageView.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
    photoPreviewImageView.layer.addSublayer(videosPreviewLayer!)

    switchCameraButton.isHidden=true
    flashButton.isHidden=true
    msgLabel.isHidden=true
    galleryCollectionView.isHidden=true
    timerLabel.isHidden=false

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
    fileURL = URL(string:"\(documentsURL.appendingPathComponent("temp"))" + ".mov")
    print("*****fileurl%@",fileURL ?? "00000")

    let maxDuration: CMTime = CMTimeMake(600, 10)
    movieFileOutput.maxRecordedDuration = maxDuration
    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
    if self.session!.canAddOutput(movieFileOutput) {
        self.session!.addOutput(movieFileOutput)
    }
    session?.startRunning()
    movieFileOutput.startRecording(to: fileURL!, recordingDelegate: self)
}

func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
    let devices = AVCaptureDevice.devices(for: AVMediaType.video)
    for device in devices {
        if device.position == position {
            return device
        }
    }
    return nil
}
}
extension SwipeGallerymainViewController: AVCaptureFileOutputRecordingDelegate
{

func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
    print(outputFileURL)
    let filemainurl = outputFileURL

    do
    {
        let asset = AVURLAsset(url:filemainurl as URL, options:nil)
        print(asset)
        let imgGenerator = AVAssetImageGenerator(asset: asset)
        imgGenerator.appliesPreferredTrackTransform = true
        let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
        let uiImage = UIImage(cgImage: cgImage)
        previewImage = uiImage
        userreponsethumbimageData = NSData(contentsOf: filemainurl as URL)!
        print(userreponsethumbimageData.length)
        print(uiImage)
        // imageData = UIImageJPEGRepresentation(uiImage, 0.1)
    }
    catch let error as NSError
    {
        print(error)
        return
    }
    let VideoFilePath = URL(fileURLWithPath:NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d").appendingPathExtension("mp4").absoluteString
    if FileManager.default.fileExists(atPath: VideoFilePath)
    {
        print("exist")
        do
        {
            try FileManager.default.removeItem(atPath: VideoFilePath)
        }
        catch { }
    }

    let tempfilemainurl =  NSURL(string: VideoFilePath)!
    let sourceAsset = AVURLAsset(url:filemainurl as URL, options:nil)
    let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
    assetExport.outputFileType = AVFileType.mov
    assetExport.outputURL = tempfilemainurl as URL
    assetExport.exportAsynchronously { () -> Void in
        switch assetExport.status
        {
        case AVAssetExportSessionStatus.completed:
            DispatchQueue.main.async {
                do
                {
                    //                        SVProgressHUD .dismiss()
                    self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
                    print("MB - \(self.userreponsevideoData.length) byte")
                    self.isVideoLoad=true
                    self.performSegue(withIdentifier:"previewSegue", sender:self)
                }
                catch
                {
                    // SVProgressHUD .dismiss()
                    print(error)
                }
            }

        case  AVAssetExportSessionStatus.failed:
            print("failed \(String(describing: assetExport.error))")
        case AVAssetExportSessionStatus.cancelled:
            print("cancelled \(String(describing: assetExport.error))")
        default:
            print("complete")
            //                SVProgressHUD .dismiss()
        }

    }
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
    print(fileURL)
}


}