我正在制作直播视频,我只是对视频选择器进行编程并发送到服务器。我使用AVCaptureSession
来获取音频和视频,然后使用AVAssetWriter
写入许多mp4文件。当我使用[writerInput appendSampleBuffer:sampleBuffer]
附加缓冲区时,它可以工作,但有时我会收到错误。
[com.ifactorylab.ifassetencoder.encodingqueue]无法附加样本缓冲区:错误域= AVFoundationErrorDomain代码= -11800"操作无法完成" UserInfo = 0x1314e08b0 {NSUnderlyingError = 0x173259ad0"操作无法完成。 (OSStatus错误-12763。)",NSLocalizedFailureReason =发生未知错误(-12763),NSLocalizedDescription =操作无法完成}
然后sampleBuffer的内存将被填满,捕获委托将停止。有人说finishWritingWithCompletionHandler
不会起作用。它描述了-12763,-12763 @constant kCMBufferQueueError_EnqueueAfterEndOfData在调用CMBufferQueueMarkEndOfData之后尝试了CMBufferQueueEnqueue(没有调用CMBufferQueueReset)。
我不知道它是如何发生的,以及如何避免它或我做了什么来实现它。
这是我的主要代码
- (void)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(IFCapturedBufferType)mediaType {
if (assetEncodingQueue_) {
CFRetain(sampleBuffer);
// We'd like encoding job running asynchronously
dispatch_async(assetEncodingQueue_, ^{
int writtenBufferCount = 0;
if (pendingSampleBuffers_.count > 0) {
for (KSYPendingSampleBuffer *pending in pendingSampleBuffers_) {
CMSampleBufferRef buf = [pending getSampleBuffer];
if (![self writeSampleBuffer:buf ofType:pending.mediaType]) {
break;
}
writtenBufferCount++;
}
for (int i = 0; i < writtenBufferCount; ++i) {
[pendingSampleBuffers_ removeObjectAtIndex:0];
}
}
// Write the given sample buffer to output file through AVAssetWriter
if (pendingSampleBuffers_.count > 0 || ![self writeSampleBuffer:sampleBuffer ofType:mediaType]) {
[pendingSampleBuffers_ addObject:
[KSYPendingSampleBuffer pendingSampleBuffer:sampleBuffer ofType:mediaType]];
DDLogDebug(@"pendingSampleBuffers_.count = %@",@(pendingSampleBuffers_.count));
}
CFRelease(sampleBuffer);
});
}
else {
DDLogDebug(@"No valid assetEncodingQueue_ exist");
}
}
- (BOOL)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(IFCapturedBufferType)mediaType {
// DDLogDebug(@"Even if stream is coming, just do nothing if we are not ready yet.");
if (self.encoderState != kEncoderStateRunning) {
DDLogDebug(@"stated =%d",self.encoderState);
return NO;
}
[assetMetaWriter error]);
if ([self encodeSampleBuffer:sampleBuffer ofType:mediaType assetWriter:assetWriter]) {
if (!watchOutputFileReady_) {
[self watchOutputFile:[outputURL path]];
}
}
else {
DDLogDebug(@"*************** Failed to encode given sample buffer");
return NO;
}
return YES;
}
- (BOOL)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer
ofType:(IFCapturedBufferType)mediaType
assetWriter:(AVAssetWriter *)writer {
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
DDLogDebug(@"SampleBufferDataIsReady?? no");
return NO;
}
if (writer.status == AVAssetWriterStatusUnknown) {
if ([writer startWriting]) {
@try {
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[writer startSessionAtSourceTime:startTime];
}
@catch (NSException *exception) {
DDLogDebug(@"Couldn't add audio(2)/video(1)--%d input: %@",mediaType, [exception description]);
return NO;
}
} else {
DDLogDebug(@"Failed to start writing(%@): %@", writer.outputURL,
[writer error]);
return NO;
}
}
if (writer.status == AVAssetWriterStatusWriting) {
if (mediaType == kBufferVideo) {
return [self appendSampleBuffer:sampleBuffer toWriterInput:self.videoEncoder.assetWriterInput];
} else if (mediaType == kBufferAudio) {
return [self appendSampleBuffer:sampleBuffer toWriterInput:self.audioEncoder.assetWriterInput];
}
}
return NO;
}
- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer
toWriterInput:(AVAssetWriterInput *)writerInput {
if (writerInput.readyForMoreMediaData) {
@try {
if (!**[writerInput appendSampleBuffer:sampleBuffer]**) {
DDLogDebug(@"Failed to append sample buffer: %@", [assetWriter error]);
[writerInput markAsFinished];
return NO;
}
return YES;
} @catch (NSException *exception) {
DDLogDebug(@"Couldn't append sample buffer: %@", [exception description]);
return NO;
}
}else {
DDLogDebug(@"writerInput.readyForMoreMediaData = no");
}
return NO;
}
- (void)watchOutputFile:(NSString *)filePath {
dispatch_queue_t queue =
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
double movieBitrate = self.videoEncoder.bitRate + self.audioEncoder.bitRate;
self.outputFileHandle = [NSFileHandle fileHandleForReadingAtPath:filePath];
dispatchSource_ = dispatch_source_create(DISPATCH_SOURCE_TYPE_VNODE,
[outputFileHandle fileDescriptor],
DISPATCH_VNODE_DELETE |
DISPATCH_VNODE_WRITE |
DISPATCH_VNODE_EXTEND |
DISPATCH_VNODE_ATTRIB |
DISPATCH_VNODE_LINK |
DISPATCH_VNODE_RENAME |
DISPATCH_VNODE_REVOKE,
queue);
dispatch_source_set_event_handler(dispatchSource_, ^{
unsigned long flags = dispatch_source_get_data(dispatchSource_);
if (flags & DISPATCH_VNODE_DELETE) {
dispatch_source_cancel(dispatchSource_);
dispatchSource_ = nil;
}
// When file size has changed,
if (flags & DISPATCH_VNODE_EXTEND) {
NSData *chunk = [outputFileHandle readDataToEndOfFile];
if ([chunk length] > (movieBitrate / 10)) {
if (assetWriter.status == AVAssetWriterStatusWriting) {
if (self.encoderState != kEncoderStateRunning) {
DDLogDebug(@"encoderState =%d",encoderState_);
return;
}
@try {
// Update current encoder status to "EncoderFinishing"
self.encoderState = kEncoderStateFinishing;
DDLogDebug(@"检测到一波写入结束,设为kEncoderStateFinishing");
// Regardless of job failure, we need to reset current encoder
dispatch_source_cancel(dispatchSource_);
dispatchSource_ = nil;
[assetWriter finishWritingWithCompletionHandler:^{
// Meanwhile finishing, if we received stop signal, don't restart
// asset encoder again.
DDLogDebug(@"finishWritingWithCompletionHandler");
BOOL restartEncoder = YES;
if (self.encoderState == kEncoderStateStopped) {
DDLogDebug(@"self.encoderState == kEncoderStateStopped");
restartEncoder = NO;
}
if (assetWriter.status == AVAssetWriterStatusFailed) {
DDLogDebug(@"Failed to finish writing: %@", [assetWriter error]);
} else {
NSData *movWithMoov =
[NSData dataWithContentsOfFile:assetWriter.outputURL.path];
NSArray *frames = nil;
KSYMP4Reader *mp4Reader = [[KSYMP4Reader alloc] init];
if ([movWithMoov length] > 0) {
// Let's parse mp4 header
[mp4Reader readData:[KSYBytesData dataWithNSData:movWithMoov]];
frames = [mp4Reader readFrames];
// double pts = [self getOldestPts];
if (!readMovieMeta_ && metaHeaderHandler) {
readMovieMeta_ = YES;
metaHeaderHandler(mp4Reader);
}
}
if (captureHandler) {
captureHandler(frames, movWithMoov);
}
assetWriter = nil;
}
if (restartEncoder) {
// Once it's done, generate new file name and reinitiate AVAssetWrite
self.outputURL = [NSURL fileURLWithPath:[self getOutputFilePath:fileType]
isDirectory:NO];
NSError *error;
assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:fileType
error:&error];
// setVideoEncoder and setAudioEncoder will retain the given
// encoder objects so we need to reduce reference as it's retained
// in the functions.
[assetWriter addInput:self.videoEncoder.assetWriterInput];
[assetWriter addInput:self.audioEncoder.assetWriterInput];
self.encoderState = kEncoderStateRunning;
DDLogDebug(@"新的一波设为kEncoderStateRunning");
// we are good to go.
@synchronized (self) {
watchOutputFileReady_ = NO;
}
} else {
// Update current encoder status to "EncoderFinished", if we
// don't start the encoder again.
self.encoderState = kEncoderStateFinished;
}
}];
} @catch (NSException *exception) {
DDLogDebug(@"Caught exception: %@", [exception description]);
self.encoderState = kEncoderStateFinished;
}
}
} else {
[outputFileHandle seekToFileOffset:0];
}
}
});
dispatch_source_set_cancel_handler(dispatchSource_, ^(void){
[outputFileHandle closeFile];
});
dispatch_resume(dispatchSource_);
watchOutputFileReady_ = YES;
}