我有两个摄像头源进入OSX应用程序,我正在尝试使用AVCaptureMovieFileOutput保存它们。不久它们的视频不同步。在最小测试后,他们可以关闭1至5秒。经过一个小时的测试,他们将在20多岁时关闭。我觉得必须有某种简单的解决方案来保持两个输出同步。我们尝试过对会话和输出使用相同的设备,我们也遇到了同样的问题。我们试图将fps降低到15,但仍然没有运气。
设置输出
func assignDeviceToPreview(captureSession: AVCaptureSession, device: AVCaptureDevice, previewView: NSView, index: Int){
captureSession.stopRunning()
captureSession.beginConfiguration()
//clearing out old inputs
for input in captureSession.inputs {
let i = input as! AVCaptureInput
captureSession.removeInput(i)
}
let output = self.outputs[index]
output.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//removing old outputs
for o in captureSession.outputs{
if let oc = o as? AVCaptureStillImageOutput{
captureSession.removeOutput(oc)
print("removed image out")
}
}
//Adding input
do {
try captureSession.addInput(AVCaptureDeviceInput(device:device))
let camViewLayer = previewView.layer!
camViewLayer.backgroundColor = CGColorGetConstantColor(kCGColorBlack)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camViewLayer.bounds
previewLayer.autoresizingMask = [.LayerWidthSizable, .LayerHeightSizable]
camViewLayer.addSublayer(previewLayer)
let overlayPreview = overlayPreviews[index]
overlayPreview.frame.origin = CGPoint.zero
previewView.addSubview(overlayPreview)
//adding output
captureSession.addOutput(output)
if captureSession == session2{
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do {
let input = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(input)
}
}
} catch {
print("Failed to add webcam as AV input")
}
captureSession.commitConfiguration()
captureSession.startRunning()
}
开始录制
func startRecording(){
startRecordingTimer()
let base = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let appFolder = "Sessions"
let sessionFolder = "session_" + session.UUID
let path = base+"/"+appFolder+"/"+sessionFolder
do{
try NSFileManager.defaultManager().createDirectoryAtPath(path, withIntermediateDirectories: true, attributes: nil)
}catch{
print("issue creating folder")
}
for fileOutput in fileOutputs{
let fileName = "cam\(String(fileOutputs.indexOf(fileOutput)!))" + ".mov"
let fileURL = NSURL.fileURLWithPathComponents([path, fileName])
fileURLs.append(fileURL!)
print(fileURL?.absoluteString)
var captureConnection = fileOutput.connections.first as? AVCaptureConnection
captureConnection!.videoMinFrameDuration = CMTimeMake(1, 15)
captureConnection!.videoMaxFrameDuration = CMTimeMake(1, 15)
if fileOutput == movieFileOutput1{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResize, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720], forConnection: captureConnection)
}else{
fileOutput.setOutputSettings([AVVideoScalingModeKey: AVVideoScalingModeResizeAspect, AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 640, AVVideoHeightKey: 360], forConnection: captureConnection)
}
captureConnection = fileOutput.connections.first as? AVCaptureConnection
print(fileOutput.outputSettingsForConnection(captureConnection))
fileOutput.startRecordingToOutputFileURL(fileURL, recordingDelegate: self)
print("start recording")
}
}
答案 0 :(得分:2)
对于精确的时序控制,我认为您需要考虑使用较低级别的AVAssetWriter框架。这允许您控制各个帧的写入和时间。
使用AVAssetWriter.startSession(atSourceTime:CMTime),您可以精确控制每台摄像机的录制开始时间。
在编写过程中,使用AVCaptureVideoDataOutputSampleBufferDelegate,您可以进一步操作生成的CMSampleBuffer来调整其时序信息,并进一步保持两个视频同步。请查看https://developer.apple.com/reference/coremedia/1669345-cmsamplebuffer以获取有关调整CMSampleBuffer的计时部分的参考信息。
那就是说,我从来没有尝试过这个并且这不确定这会起作用,但我相信你会接近你想要实现的目标如果你走这条路。