自定义相机,视频无法快速播放音频

时间:2017-08-23 13:40:44

标签: ios swift camera avfoundation video-recording

我是swift的新手也是赌注溢出。高级感谢您的关注。 基本上我正在尝试构建一个自定义摄像头,用于录制带有音频的视频。这意味着当我播放此视频时,视频将播放声音。拉斯几天我试着建立这个自定义相机。我已经按照我的教程,但仍然遗漏了我的相机的东西。我按照我的自定义相机尝试只录制视频。也许它没有录制音频。我不明白。我正在寻找这个答案,没有找到合适的答案。

这是我做了什么

import UIKit
import AVFoundation
import SVProgressHUD
import MediaPlayer
import MobileCoreServices
import AVKit
var videoUrl = [AnyObject]()


class TestViewController: UIViewController {

@IBOutlet var viewVidioPlayer: UIView!
@IBOutlet weak var myView: UIView!

var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()

override func viewDidLoad() {
    super.viewDidLoad()

}

override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)
}

//这里我创建会话     func createSession(){

    var input: AVCaptureDeviceInput?
    let  movieFileOutput = AVCaptureMovieFileOutput()
    var prevLayer: AVCaptureVideoPreviewLayer?
    prevLayer?.frame.size = myView.frame.size
    session = AVCaptureSession()
    let error: NSError? = nil
    do {
        input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return}
    if error == nil {
        session?.addInput(input)
    } else {
        print("camera input error: \(String(describing: error))")
    }
    prevLayer = AVCaptureVideoPreviewLayer(session: session)
    prevLayer?.frame.size = myView.frame.size
    prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
    prevLayer?.connection.videoOrientation = .portrait
    myView.layer.addSublayer(prevLayer!)
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
    let  filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4"))


    let maxDuration: CMTime = CMTimeMake(600, 10)
    movieFileOutput.maxRecordedDuration = maxDuration
    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
    if self.session!.canAddOutput(movieFileOutput) {
        self.session!.addOutput(movieFileOutput)
    }
    session?.startRunning()
    movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self)

}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
    for device in devices! {
        if (device as AnyObject).position == position {
            return device as? AVCaptureDevice
        }
    }
    return nil
}
@IBAction func pressbackbutton(sender: AnyObject) {
    session?.stopRunning()

}

@IBAction func Record(_ sender: Any) {
    createSession()
}
@IBAction func play(_ sender: Any) {
    self.videoPlay()
}
func videoPlay()
{

    let documentsUrl =  FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!

    do {
        // Get the directory contents urls (including subfolders urls)
        let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
        print(directoryContents)

        // if you want to filter the directory contents you can do like this:
        videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject]
        print("mp3 urls:",videoUrl)


        let playerController = AVPlayerViewController()
        playerController.delegate = self as? AVPlayerViewControllerDelegate
        let movieURL = videoUrl[0]


        print(movieURL)

        let player = AVPlayer(url: movieURL as! URL)
        playerController.player = player
        self.addChildViewController(playerController)
        self.view.addSubview(playerController.view)
        playerController.view.frame = self.view.frame

        player.play()
        player.volume = 1.0
        player.rate = 1.0



    } catch let error as NSError {
        print(error.localizedDescription)
    }




    }
    }

扩展TestViewController:AVCaptureFileOutputRecordingDelegate {

@available(iOS 4.0, *)
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) {
    print(fileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
    let filemainurl = outputFileURL

    do
    {
        let asset = AVURLAsset(url: filemainurl! as URL, options: nil)
        //AVURLAsset(URL: filemainurl as! URL, options: nil)
        print(asset)
        let imgGenerator = AVAssetImageGenerator(asset: asset)
        imgGenerator.appliesPreferredTrackTransform = true
        let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)

        let uiImage = UIImage(cgImage: cgImage)

        userreponsethumbimageData = try  NSData(contentsOf: filemainurl! as URL)

        print(userreponsethumbimageData.length)
        print(uiImage)
        // imageData = UIImageJPEGRepresentation(uiImage, 0.1)
    }
    catch let error as NSError
    {
        print(error)
        return
    }

    SVProgressHUD.show(with: SVProgressHUDMaskType.clear)
    let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
    if FileManager.default.fileExists(atPath: VideoFilePath)

    {
        do

        {
            try FileManager.default.removeItem(atPath: VideoFilePath)
        }
        catch { }

    }
    let tempfilemainurl =  NSURL(string: VideoFilePath)!
    let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil)
    let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
    assetExport.outputFileType = AVFileTypeQuickTimeMovie
    assetExport.outputURL = tempfilemainurl as URL

    assetExport.exportAsynchronously { () -> Void in
        switch assetExport.status
        {
        case AVAssetExportSessionStatus.completed:
            DispatchQueue.main.async(execute: {
                do
                {
                    SVProgressHUD .dismiss()
                    self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
                    print("MB - \(self.userreponsevideoData.length) byte")


                }
                catch
                {
                    SVProgressHUD .dismiss()
                    print(error)
                }
            })
        case  AVAssetExportSessionStatus.failed:
            print("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.cancelled:
            print("cancelled \(assetExport.error)")
        default:
            print("complete")
            SVProgressHUD .dismiss()
        }

    }



}

}

我所做的一切。所以我不明白这段代码中缺少的东西。为什么音频不播放视频或为什么不用视频重新录制音频。

1 个答案:

答案 0 :(得分:0)

将此cocopod用于您的项目。它让您的工作变得轻松。 它包含有关如何操作的所有说明,还包含一个演示项目,以测试它是否按预期工作。

SwiftyCam