我正在尝试将kCVPixelFormatType_420YpCbCr8BiPlanarFullRange转换为RGB,因此我可以将图像保存在JPG文件中,这是我正在使用的代码:
#define clamp(a) (a>255?255:(a<0?0:a));
- (UIImage *)makeUIImage:(uint8_t *)inBaseAddress bufferInfo:(CVPlanarPixelBufferInfo_YCbCrBiPlanar *)inBufferInfo width:(size_t)inWidth height:(size_t)inHeight bytesPerRow:(size_t)inBytesPerRow
{
NSUInteger yOffset = EndianU32_BtoN(inBufferInfo->componentInfoY.offset);
NSUInteger yPitch = EndianU32_BtoN(inBufferInfo->componentInfoY.rowBytes);
NSUInteger cbCrOffset = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.offset);
NSUInteger cbCrPitch = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.rowBytes);
uint8_t *rgbBuffer = malloc(inWidth * inHeight * 4);
uint8_t *yBuffer = inBaseAddress + yOffset;
uint8_t *cbCrBuffer = inBaseAddress + cbCrOffset;
for(int y = 0; y < inHeight; y++)
{
uint8_t *rgbBufferLine = &rgbBuffer[y * inWidth * 4];
uint8_t *yBufferLine = &yBuffer[y * yPitch];
uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
for(int x = 0; x < inWidth; x++)
{
uint8_t y = yBufferLine[x];
uint8_t cb = cbCrBufferLine[x & ~1];
uint8_t cr = cbCrBufferLine[x | 1];
uint8_t *rgbOutput = &rgbBufferLine[x*4];
int r = (int)round( (y - 16) * 1.164 + (cr - 128) * 1.596 );
int g = (int)round( (y - 16) * 1.164 + (cb - 128) * -0.391 + (cr - 128) * -0.813 );
int b = (int)round( (y - 16) * 1.164 + (cb - 128) * 2.018 );
rgbOutput[0] = 0xff;
rgbOutput[1] = clamp(r);
rgbOutput[2] = clamp(g);
rgbOutput[3] = clamp(b);
}
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(rgbBuffer, yPitch, inHeight, 8,
yPitch*4, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:quartzImage];
CGImageRelease(quartzImage);
free(rgbBuffer);
return image;
}
-(void)createImg:(CMSampleBufferRef) sampleBuffer index:(int)index
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//Lock the imagebuffer
CVPixelBufferLockBaseAddress(imageBuffer,0);
// Get information about the image
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
CVPlanarPixelBufferInfo_YCbCrBiPlanar *bufferInfo = (CVPlanarPixelBufferInfo_YCbCrBiPlanar *)baseAddress;
baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
// convert the image
UIImage *img = [self makeUIImage:baseAddress bufferInfo:bufferInfo width:width height:height bytesPerRow:bytesPerRow];
NSString *videoThumbPath = [NSString
stringWithFormat:@"%@/img%02d.jpg",
burstFolderPath,
index];
NSData *imageData = [NSData dataWithData:UIImageJPEGRepresentation(img,0.8f)];
[imageData writeToFile:videoThumbPath atomically:YES];
}
图像生成但颜色错误,图片偏蓝,而且我似乎在图片的右侧有一个休息时间:
我做错了什么?
答案 0 :(得分:1)
// Hope it can help someone!
int bytesPerPixel = 4;
uint8_t *rgbBuffer = (uint8_t *)malloc(width * height * bytesPerPixel);
for(int y = 0; y < height; y++)
{
uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
uint8_t *yBufferLine = &yBuffer[y * yPitch];
uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
for(int x = 0; x < width; x++)
{
int16_t y = yBufferLine[x];
int16_t cb = cbCrBufferLine[x & ~1] - 128;
int16_t cr = cbCrBufferLine[x | 1] - 128;
uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];
int16_t r = (int16_t)roundf( y + cr * 1.4 );
int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
int16_t b = (int16_t)roundf( y + cb * 1.765);
//ABGR
rgbOutput[0] = 0xff;
rgbOutput[1] = clamp(b);
rgbOutput[2] = clamp(g);
rgbOutput[3] = clamp(r);
}
}