我有一个使用最新Quickblox版本(Quickblox 2.5,Quickblox-WebRTC 2.0)的视频通话应用程序,我需要保存在调用文件中流式传输的视频。有一个旧版本的旧版本的SDK看起来与当前版本完全不同。
目前的文档中没有任何相关内容,因为Quickblox已经在使用AVCaptureVideoDataOutput,所以我无法启动AVCaptureMovieFileOutout。无论如何都要将流保存到文件中?
更新:
我设法将视频写入文件。所有缺失的都是音轨。
import Foundation
class VideoManager : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
static let sharedInstance = VideoManager()
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
var assetWriterInput: AVAssetWriterInput?
var assetWriter: AVAssetWriter?
var frameNumber: Int64 = 0
var qbDelegate: AVCaptureVideoDataOutputSampleBufferDelegate?
func startSavingCaptureToFileWithURL(url: NSURL, capture: QBRTCCameraCapture) {
print("[VideoManager]: startSavingCaptureToFileWithURL")
guard let dataOutput = getVideoCaptureDataOutput(capture) else { return }
frameNumber = 0
qbDelegate = dataOutput.sampleBufferDelegate
dataOutput.setSampleBufferDelegate(self, queue: dataOutput.sampleBufferCallbackQueue)
let outputSettings: [String : AnyObject] = [
AVVideoWidthKey : 720,
AVVideoHeightKey: 1280,
AVVideoCodecKey : AVVideoCodecH264
]
assetWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterInput!, sourcePixelBufferAttributes: [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)])
do {
assetWriter = try AVAssetWriter(URL: url, fileType: AVFileTypeMPEG4)
assetWriter!.addInput(assetWriterInput!)
assetWriterInput!.expectsMediaDataInRealTime = true
assetWriter!.startWriting()
assetWriter!.startSessionAtSourceTime(kCMTimeZero)
}
catch {
print("[VideoManager]: Error persisting stream!")
}
}
func stopSavingVideo() {
assetWriter?.finishWritingWithCompletionHandler { [weak self] in
guard let strongSelf = self else { return }
strongSelf.frameNumber = 0
}
}
private func getVideoCaptureDataOutput(videoCapture: QBRTCCameraCapture) -> AVCaptureVideoDataOutput? {
var output: AVCaptureVideoDataOutput?
videoCapture.captureSession.outputs.forEach{ captureOutput in
if captureOutput is AVCaptureVideoDataOutput {
output = captureOutput as? AVCaptureVideoDataOutput
}
}
return output
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
qbDelegate?.captureOutput?(captureOutput, didOutputSampleBuffer: sampleBuffer, fromConnection: connection)
guard let assetWriterInput = assetWriterInput else { return }
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
if assetWriterInput.readyForMoreMediaData {
pixelBufferAdaptor?.appendPixelBuffer(imageBuffer, withPresentationTime: CMTimeMake(frameNumber, 25))
}
frameNumber++
}
func getUniqueFileURL() -> NSURL {
let guid = NSProcessInfo.processInfo().globallyUniqueString
let fileName = "\(guid).mp4"
return NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent(fileName)
}
}
有关如何获取QBRTCLocalAudioTrack的基础AVCaptureAudioDataOutput的任何想法?
答案 0 :(得分:0)
我来自dev QuicbkloxWebRTC团队。该功能计划用于下一个大版本。