使用AVCaptureSession获取相机输出并成功添加了音频和视频输入和输出。
{
var captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice
var error: NSError? = nil
do {
//remove the previous inputs
let inputs = cameraSession.inputs as! [AVCaptureDeviceInput]
for oldInput:AVCaptureDeviceInput in inputs {
cameraSession.removeInput(oldInput)
}
cameraSession.beginConfiguration()
if cameraPosition.isEqualToString("Front") {
captureDevice = cameraWithPosition(.Front)!
}
else {
captureDevice = cameraWithPosition(.Back)!
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
if (cameraSession.canAddInput(deviceInput) == true) {
cameraSession.addInput(deviceInput)
}
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if (cameraSession.canAddOutput(dataOutput) == true) {
cameraSession.addOutput(dataOutput)
}
let audioCheck = AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)
if audioCheck.isEmpty {
print("no audio device")
return
}
let audioDevice: AVCaptureDevice! = audioCheck.first as! AVCaptureDevice
var audioDeviceInput: AVCaptureDeviceInput?
do {
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
} catch let error2 as NSError {
error = error2
audioDeviceInput = nil
} catch {
fatalError()
}
if error != nil{
print(error)
let alert = UIAlertController(title: "Error", message: error!.localizedDescription
, preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: .Default, handler: nil))
self.presentViewController(alert, animated: true, completion: nil)
}
if cameraSession.canAddInput(audioDeviceInput){
cameraSession.addInput(audioDeviceInput)
}
cameraSession.commitConfiguration()
let queue = dispatch_queue_create("com.invasivecode.videoQueue", DISPATCH_QUEUE_SERIAL)
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
catch let error as NSError {
NSLog("\(error), \(error.localizedDescription)")
}
}
使用 AVCaptureMovieFileOutput 可以使用
将输出视频保存在照片库中movieFileOutput.startRecordingToOutputFileURL( outputFilePath, recordingDelegate: self)
但是我使用 AVCaptureVideoDataOutput 作为输出来对代表提供的元数据进行额外的工作并尝试录制视频,但无法获得任何方法来启动和停止录制视频
建议如何使用 AVCaptureVideoDataOutput
录制视频答案 0 :(得分:3)
您需要AVCaptureSession来执行此操作:
//First add AVCaptureVideoDataOutput to AVCaptureSession
AVCaptureSession *_captureSession;
_captureSession = [[AVCaptureSession alloc] init];
......Configuration......
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
......Configuration......
if ( [_captureSession canAddOutput:videoOut] ) {
[_captureSession addOutput:videoOut];
}
//Then use captureSession to start and stop recording
[_captureSession startRunning];
[_captureSession stopRunning];
请浏览RosyWriterCapturePipeline.m,这是一个很好的例子:
答案 1 :(得分:0)
我发现Rosy Writer是AVCaptureSession
工作的一个很好的例子。这是Rosy Writer2.1 Swift版本。它应该帮助与AVCaptureSession
挣扎的人们。