我正在使用AVAssetWriter用相机记录实时供稿,但是遇到最后一帧丢失的问题。当我点击“停止”按钮时,我的输出视频丢失了0.5秒。
我认为这是因为在我按下“停止”按钮后,视频捕获管道太忙了,无法播放最后几帧。供您参考,当我将稳定模式禁用为off
或将会话捕获预设设置为low
或medium
时,我没有丢失帧。
class ViewController: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
private var session: AVCaptureSession = AVCaptureSession()
private var deviceInput: AVCaptureDeviceInput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
private var videoDevice: AVCaptureDevice = (AVCaptureDevice.default(for: AVMediaType.video))!
private var audioConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var assetWriter: AVAssetWriter?
private var audioInput: AVAssetWriterInput?
private var videoInput: AVAssetWriterInput?
private var fileManager: FileManager = FileManager()
private var recordingURL: URL?
private var isCameraRecording: Bool = false
private var isRecordingSessionStarted: Bool = false
private var recordingQueue = DispatchQueue(label: "recording.queue")
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
self.session.sessionPreset = AVCaptureSession.Preset.high
self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mov")
if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
_ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
}
self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
fileType: AVFileType.mov)
/*
let compression = [//AVVideoAverageBitRateKey : 5000000,
AVVideoMaxKeyFrameIntervalKey : 1]
*/
let videoSettings = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 1080,
AVVideoHeightKey : 1920,
//AVVideoCompressionPropertiesKey : compression
] as [String : Any]
self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,
outputSettings: videoSettings)
self.videoInput?.expectsMediaDataInRealTime = true
if self.assetWriter!.canAdd(self.videoInput!) {
self.assetWriter?.add(self.videoInput!)
}
self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
let rootLayer = self.view.layer
self.previewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width)
rootLayer.insertSublayer(self.previewLayer!, at: 0)
self.session.startRunning()
DispatchQueue.main.async {
self.session.beginConfiguration()
if self.session.canAddInput(self.deviceInput!) {
self.session.addInput(self.deviceInput!)
}
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
if let connection = self.videoOutput.connection(with: AVMediaType.video) {
if connection.isVideoOrientationSupported {
// Force recording to portrait
connection.videoOrientation = AVCaptureVideoOrientation.portrait
}
}
self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
if self.videoConnection?.isVideoStabilizationSupported == true {
self.videoConnection?.preferredVideoStabilizationMode = .auto
}
self.session.commitConfiguration()
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if !self.isRecordingSessionStarted {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.assetWriter?.startSession(atSourceTime: presentationTime)
self.isRecordingSessionStarted = true
}
let description = CMSampleBufferGetFormatDescription(sampleBuffer)!
if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
if self.audioInput!.isReadyForMoreMediaData {
print("appendSampleBuffer audio");
self.audioInput?.append(sampleBuffer)
}
} else {
if self.videoInput!.isReadyForMoreMediaData {
print("appendSampleBuffer video");
if !self.videoInput!.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
}
private func startRecording() {
if self.assetWriter?.startWriting() != true {
print("error: \(self.assetWriter?.error.debugDescription ?? "")")
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}
private func stopRecording() {
self.videoInput?.markAsFinished()
self.audioInput?.markAsFinished()
self.assetWriter?.finishWriting {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.recordingURL!)
}) { saved, error in
if saved {
let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(defaultAction)
DispatchQueue.main.async {
self.present(alertController, animated: true, completion: nil)
self.resetAssetWriter()
}
} else {
print(error.debugDescription)
}
}
print("saved")
}
}
}
您知道如何解决此问题吗?
谢谢