我正在编写一个IOS应用程序,它将解码H.264帧并使用AVSampleBufferDisplayLayer进行渲染。我已经修改了帧以不具有NAL起始码但具有4字节NAL大小。这已得到验证。
但我所看到的只是我的IOS模拟器中的白框,没有错误;是否有可能转储解码后的帧并验证?任何其他调试点都会有所帮助。
@implementation DecodeClass
- (void) viewDidLoad {
}
/* method to decode and render a frame */
- (void)decodeFrame{
NSLog(@"Decode Start");
/* local variable declaration */
//OSStatus status;
size_t spsSize, ppsSize,dataLen;
//_frameSize = 320*240*1.5;
uint8_t sps[] = {0x67, 0x42, 0xC0, 0x0D, 0x96, 0x64, 0x0A, 0x0F, 0xDF, 0xF8, 0x00, 0x20, 0x00, 0x18, 0x80, 0x00,
0x00, 0x7D, 0x00, 0x00, 0x0B, 0xB5, 0x47, 0x8A, 0x15, 0x50};
uint8_t pps[] = {0x68, 0xCE, 0x32, 0xC8};
const uint8_t* props[] = {sps, pps};
spsSize = (sizeof(sps)/sizeof(uint8_t));
ppsSize = (sizeof(pps)/sizeof(uint8_t));
const size_t sizes[] = {spsSize,ppsSize};
FILE* pFile;
int result;
pFile = fopen("/Documents/input_mod1.264","r");
fseeko(pFile, 0, SEEK_END);
unsigned long fileSize = ftello(pFile);
fseek(pFile, 0, SEEK_SET);
_dataBuf = (uint8_t*)malloc(sizeof(uint8_t) * (fileSize));
memset(_dataBuf,0,sizeof(uint8_t) * (fileSize));
if (pFile ){
result = fread(_dataBuf,sizeof(uint8_t),fileSize,pFile);
fclose(pFile);
}
else
NSLog(@"Can't open file");
[self MUX_Modify_AVC_Start_Code:_dataBuf size:&fileSize Header:false];
dataLen = fileSize;
//construct h.264 parameter set
CMVideoFormatDescriptionRef formatDesc;
OSStatus formatCreateResult = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, props, sizes, 4, &formatDesc);
if (formatCreateResult)
{
NSLog(@"construct CMVideoFormatDescriptionCreateFromH264ParameterSets Failed :%ld",(long)formatCreateResult);
}
//construct cmBlockbuffer .
CMBlockBufferRef blockBufferOut = nil;
CMBlockBufferCreateEmpty (0,0,kCMBlockBufferAlwaysCopyDataFlag, &blockBufferOut);
CMBlockBufferAppendMemoryBlock(blockBufferOut,
_dataBuf,
dataLen,
NULL,
NULL,
0,
dataLen,
kCMBlockBufferAlwaysCopyDataFlag);
//construct cmsamplebuffer ok
size_t sampleSizeArray[1] = {0};
sampleSizeArray[0] = CMBlockBufferGetDataLength(blockBufferOut);
CMSampleTimingInfo tmInfos[1] = {
{CMTimeMake(5,1), CMTimeMake(5,1), CMTimeMake(5,1)}
};
CMSampleBufferRef sampBuf = nil;
formatCreateResult = CMSampleBufferCreate(kCFAllocatorDefault,
blockBufferOut,
YES,
NULL,
NULL,
formatDesc,
1,
1,
tmInfos,
1,
sampleSizeArray,
&sampBuf);
NSLog(@"Decode End :: Construct CMSampleBufferRef value of formatCreateResult is %d", formatCreateResult);
if(!_dspLayer)
{
_dspLayer = [[AVSampleBufferDisplayLayer alloc]init];
[_dspLayer setFrame:CGRectMake(0,0,320,240)];
_dspLayer.bounds = CGRectMake(0, 0, 300, 300);
_dspLayer.videoGravity = AVLayerVideoGravityResizeAspect;
_dspLayer.position = CGPointMake(500, 500);
_dspLayer.backgroundColor = [UIColor blueColor].CGColor;
CMTimebaseRef tmBase = nil;
CMTimebaseCreateWithMasterClock(NULL,CMClockGetHostTimeClock(),&tmBase);
_dspLayer.controlTimebase = tmBase;
CMTimebaseSetTime(_dspLayer.controlTimebase, kCMTimeZero);
CMTimebaseSetRate(_dspLayer.controlTimebase, 1.0);
[self.layerView.layer addSublayer:_dspLayer];
}
//put to AVSampleBufferdisplayLayer,just one frame.
if([self.dspLayer isReadyForMoreMediaData])
{
[self.dspLayer enqueueSampleBuffer:sampBuf];
}
[self.dspLayer setNeedsDisplay];
}
-(void)MUX_Modify_AVC_Start_Code:(uint8_t*)pData size:(uint32_t *)nSize Header:(bool)bHeader{
....
}
-(uint32_t)MUX_FindNextPattern:(uint8_t*)streamBuf buffSize:(uint32_t)bufSize startCode:(uint32_t)startcode{
....
}
- (void)dealloc{
//free(_dataBuf);
}
@end
int main(int argc, char * argv[]) {
//[decodeClass release];
@autoreleasepool {
DecodeClass *decodeClass = [[DecodeClass alloc]init];
[decodeClass decodeFrame];
decodeClass = nil;
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}