以下是我的代码
的说明主要是撞上这两条线。
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time]; if(!success){ NSLog(@“警告:无法将缓冲区写入视频”); }
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
**BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(@"Warning: Unable to write buffer to video");
}**
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
错误说明也是
线程#34:tid = 0x4aefc,0x00000001958952bc libsystem_platform.dylib
_platform_memmove + 108, queue = 'ASScreenRecorder.append_queue', stop reason = EXC_BAD_ACCESS (code=1, address=0x109c60000) frame #0: 0x00000001958952bc libsystem_platform.dylib
_ platform_memmove + 108 帧#1:0x0000000183c32004 CoreMediaFigNEAtomWriterAppendData + 92. frame #2: 0x0000000183c31f8c CoreMedia
sbufAtom_appendAtomWithMemoryBlock + 104 帧#3:0x0000000183c2f78c CoreMediasbufAtom_createSerializedDataForPixelBuffer + 588 frame #4: 0x0000000183c2f4b8 CoreMedia
FigRemote_CreateSerializedAtomDataForPixelBuffer + 288 帧#5:0x0000000185a6af3c MediaToolboxremoteWriter_AddPixelBuffer + 140 frame #6: 0x0000000181bcebd4 AVFoundation
- [AVFigAssetWriterTrack addPixelBuffer:atPresentationTime:error:] + 176 帧#7:0x0000000181bca848 AVFoundation-[AVAssetWriterInputWritingHelper appendPixelBuffer:withPresentationTime:] + 124 frame #8: 0x0000000181bc82fc AVFoundation
- [AVAssetWriterInput _appendPixelBuffer:withPresentationTime:] + 88 帧#9:0x0000000181bcdb40 AVFoundation-[AVAssetWriterInputPixelBufferAdaptor appendPixelBuffer:withPresentationTime:] + 104 frame #10: 0x00000001000f45e0 Island Guide Aruba
__ 35- [ASScreenRecorder writeVideoFrame] _block_invoke192(.block_descriptor =)+ 132在ASScreenRecorder.m:301 帧#11:0x00000001005b0fd4 libdispatch.dylib_dispatch_call_block_and_release + 24 frame #12: 0x00000001005b0f94 libdispatch.dylib
_ dispatch_client_callout + 16 帧#13:0x00000001005bbdb8 libdispatch.dylib_dispatch_queue_drain + 780 frame #14: 0x00000001005b42c4 libdispatch.dylib
_ dispatch_queue_invoke + 132 帧#15:0x00000001005be5d4 libdispatch.dylib_dispatch_root_queue_drain + 772 frame #16: 0x00000001005c0248 libdispatch.dylib
_ dispatch_worker_thread3 + 132帧#17: 0x000000019589d21c libsystem_pthread.dylib`_pthread_wqthread + 816
=============================================== =========================
答案 0 :(得分:0)
我假设您正在使用ASScreenRecorder(因为几行与该回购相同)。所以我花了几天这个崩溃并且可能发现了问题,创建像素缓冲池存在问题(在[self createPixelBufferAndBitmapContext:&pixelBuffer];
方法中)。我修改过的代码:
- (void)writeVideoFrame{
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]){
return;
}
CVPixelBufferRef pixelBuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, _viewSize.width, _viewSize.height, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef _Nullable)(_outputBufferPoolAuxAttributes), &pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
//creating bitmap context
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(pixelBuffer),
CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer),
8, CVPixelBufferGetBytesPerRow(pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
[[UIApplication sharedApplication].keyWindow drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
} UIGraphicsPopContext();
});
CMTime time = CMTimeAdd(_firstAudioTimeStamp, CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:_startedAt], _firstAudioTimeStamp.timescale));
[_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
});
}
因此,当您看到我不使用CVPixelBufferPoolRef
和CVPixelBufferPoolCreatePixelBuffer
时,只需使用CVPixelBufferRef
方法创建CVPixelBufferCreate
。它可能比较慢,但是自从修改过这个以来我甚至没有崩溃过。
答案 1 :(得分:0)
我也有这个问题。我的解决方法是尝试保留缓冲区:
CVPixelBufferRef pixelBuffer = NULL;
CVPixelBufferRetain(pixelBuffer); // hope to retain the buffer
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
它似乎没有崩溃。