我一直在网上浏览内容,似乎找不到所需的教程或帮助。
使用AVFoundation
和Dlib库,我创建了一个应用程序,可以使用手机上的前置摄像头从实时视频中检测人脸。我正在使用Shape Predictor 68 Face Landmark
来执行此操作。为此,我敢肯定我必须使用AVCaptureVideoDataOutput
而不是AVMovieFileOutput,以便可以分析每个帧。
我现在希望能够将视频保存到文件中,根据我收集的信息,我需要使用AVAssetWriter
来执行此操作。我只是在任何地方都找不到太多有关如何开始使用此方法的信息。我对Swift和iOS编程是完全陌生的,而从阅读Apple文档中并不能真正了解很多。
如果有人可以帮助我,将不胜感激!
答案 0 :(得分:8)
我能够找到如何使用AVAssetWriter的方法。万一其他人需要帮助,我使用的代码如下:
func setUpWriter() {
do {
outputFileLocation = videoFileLocation()
videoWriter = try AVAssetWriter(outputURL: outputFileLocation!, fileType: AVFileType.mov)
// add video input
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 720,
AVVideoHeightKey : 1280,
AVVideoCompressionPropertiesKey : [
AVVideoAverageBitRateKey : 2300000,
],
])
videoWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
print("video input added")
} else {
print("no input added")
}
// add audio input
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(audioWriterInput!) {
videoWriter.add(audioWriterInput!)
print("audio input added")
}
videoWriter.startWriting()
} catch let error {
debugPrint(error.localizedDescription)
}
}
func canWrite() -> Bool {
return isRecording && videoWriter != nil && videoWriter?.status == .writing
}
//video file location method
func videoFileLocation() -> URL {
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let videoOutputUrl = URL(fileURLWithPath: documentsPath.appendingPathComponent("videoFile")).appendingPathExtension("mov")
do {
if FileManager.default.fileExists(atPath: videoOutputUrl.path) {
try FileManager.default.removeItem(at: videoOutputUrl)
print("file removed")
}
} catch {
print(error)
}
return videoOutputUrl
}
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let writable = canWrite()
if writable,
sessionAtSourceTime == nil {
// start writing
sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
//print("Writing")
}
if output == videoDataOutput {
connection.videoOrientation = .portrait
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
if writable,
output == videoDataOutput,
(videoWriterInput.isReadyForMoreMediaData) {
// write video buffer
videoWriterInput.append(sampleBuffer)
//print("video buffering")
} else if writable,
output == audioDataOutput,
(audioWriterInput.isReadyForMoreMediaData) {
// write audio buffer
audioWriterInput?.append(sampleBuffer)
//print("audio buffering")
}
}
// MARK: Start recording
func start() {
guard !isRecording else { return }
isRecording = true
sessionAtSourceTime = nil
setUpWriter()
print(isRecording)
print(videoWriter)
if videoWriter.status == .writing {
print("status writing")
} else if videoWriter.status == .failed {
print("status failed")
} else if videoWriter.status == .cancelled {
print("status cancelled")
} else if videoWriter.status == .unknown {
print("status unknown")
} else {
print("status completed")
}
}
// MARK: Stop recording
func stop() {
guard isRecording else { return }
isRecording = false
videoWriterInput.markAsFinished()
print("marked as finished")
videoWriter.finishWriting { [weak self] in
self?.sessionAtSourceTime = nil
}
//print("finished writing \(self.outputFileLocation)")
captureSession.stopRunning()
performSegue(withIdentifier: "videoPreview", sender: nil)
}
我现在有另一个问题,当我同时使用AVCaptureMetadataOutput,AVCaptureVideoDataOutput和AVCaptureAudioDataOutput时,此解决方案不起作用。添加AVCaptureAudioDataOutput时,应用程序崩溃。