这让我困扰了一段时间。我有视频转换将视频转换为“.mp4”格式。但是有些视频会发生崩溃,但不是全部。
这是崩溃日志
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:]
Cannot append sample buffer: First input buffer must have an appropriate kCMSampleBufferAttachmentKey_TrimDurationAtStart since the codec has encoder delay'
这是我的代码:
NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:[self getVideoName]] stringByAppendingString:@".mp4"]];
AVAssetTrack *videoTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSize = videoTrack.naturalSize;
NSDictionary *videoWriterCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];
NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoWriterSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;
videoWriterInput.transform = videoTrack.preferredTransform;
self.assetWriter = [[AVAssetWriter alloc] initWithURL:uploadURL fileType:AVFileTypeQuickTimeMovie error:nil];
[self.assetWriter addInput:videoWriterInput];
//setup video reader
NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];
self.assetReader = [[AVAssetReader alloc] initWithAsset:self.avAsset error:nil];
[self.assetReader addOutput:videoReaderOutput];
//setup audio writer
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
audioWriterInput.expectsMediaDataInRealTime = NO;
[self.assetWriter addInput:audioWriterInput];
//setup audio reader
AVAssetTrack* audioTrack = [[self.avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:self.avAsset error:nil];
[audioReader addOutput:audioReaderOutput];
[self.assetWriter startWriting];
[self.assetReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue
usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef sampleBuffer = NULL;
if ([self.assetReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {
[videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
if ([self.assetReader status] == AVAssetReaderStatusCompleted)
{
//start writing from audio reader
[audioReader startReading];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{
while (audioWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
if ([audioReader status] == AVAssetReaderStatusCompleted) {
[self.assetWriter finishWritingWithCompletionHandler:^(){
[self createLiveTrailerApiForVideoId:video.dbId];
}];
}
}
}
}];
}
}
}
}];
这是导致崩溃的部分
CMSampleBufferRef sampleBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [audioReaderOutput copyNextSampleBuffer]))
{
if (sampleBuffer) {
[audioWriterInput appendSampleBuffer:sampleBuffer];
}
CFRelease(sampleBuffer);
}
我一直在寻找,似乎我需要将' kCMSampleBufferAttachmentKey_TrimDurationAtStart '设置为第一个缓冲区,但无法找到有关如何设置此值的任何示例。
请指教。谢谢!
答案 0 :(得分:1)
就像这样:
CFDictionaryRef dict = NULL;
if (firstBuffer) {
firstBuffer = NO;
dict = CMTimeCopyAsDictionary(CMTimeMake(1024, 44100), kCFAllocatorDefault);
CMSetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_TrimDurationAtStart, dict, kCMAttachmentMode_ShouldNotPropagate);
}
答案 1 :(得分:0)