我正在尝试测试转换由Vuforia生成的YUV图像并使用iOS Accelerate Framework的vImage调用将它们转换为UIImage
的性能。在代码的当前状态中,我只是想让它工作。现在转换产生暗条纹图像。有没有关于Vuforia如何在实施中制定YUV格式的公开细节?我最初的假设是他们使用的是双平面420p格式的iOS设备。相关的测试代码如下。
UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage)
{
UIImage *image = nil;
if (cameraImage) {
QCAR::PIXEL_FORMAT pixelFormat = cameraImage->getFormat();
CGColorSpaceRef colorSpace = NULL;
switch (pixelFormat) {
case QCAR::YUV:
case QCAR::RGB888:
colorSpace = CGColorSpaceCreateDeviceRGB();
break;
case QCAR::GRAYSCALE:
colorSpace = CGColorSpaceCreateDeviceGray();
break;
case QCAR::RGB565:
case QCAR::RGBA8888:
case QCAR::INDEXED:
std::cerr << "Image format conversion not implemented." << std::endl;
break;
case QCAR::UNKNOWN_FORMAT:
std::cerr << "Image format unknown." << std::endl;
break;
}
int bitsPerComponent = 8;
int width = cameraImage->getWidth();
int height = cameraImage->getHeight();
const void *baseAddress = cameraImage->getPixels();
size_t totalBytes = QCAR::getBufferSize(width, height, pixelFormat);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = NULL;
if (pixelFormat == QCAR::YUV) {
int bytesPerPixel = 4;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
static vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = const_cast<void *>(baseAddress)
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
static vImage_Buffer srcCb = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes)
};
static vImage_Buffer srcCr = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2))
};
static vImage_Buffer dest = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = imageData
};
//uint8_t permuteMap[] = { 1, 2, 3, 0 };
vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrToARGB info;
vImage_Error error;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
NULL,
1,
kvImageNoFlags);
vImage_CGImageFormat format =
{
.bitsPerComponent = static_cast<uint32_t>(bitsPerComponent),
.bitsPerPixel = static_cast<uint32_t>(3 * bitsPerComponent),
.colorSpace = colorSpace,
.bitmapInfo = bitmapInfo,
.version = 0,
.decode = NULL,
.renderingIntent = renderingIntent
};
imageRef = vImageCreateCGImageFromBuffer(&dest,
&format,
NULL,
NULL,
kvImageNoFlags,
&error);
if (error) {
std::cerr << "Err." << std::endl;
}
} else {
int bitsPerPixel = QCAR::getBitsPerPixel(pixelFormat);
int bytesPerRow = cameraImage->getStride();
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,
baseAddress,
totalBytes,
NULL);
imageRef = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
bitmapInfo,
provider,
NULL,
false,
renderingIntent);
CGDataProviderRelease(provider);
}
if (imageRef != NULL) {
image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
}
if (colorSpace != NULL) {
CGColorSpaceRelease(colorSpace);
}
}
return image;
}
答案 0 :(得分:0)
void *baseAddress = buffer;
size_t totalBytes = width * height * 3 / 2;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.rowBytes = static_cast<size_t>(width),
.data = const_cast<void *>(baseAddress),
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
vImage_Buffer srcCb = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes),
};
vImage_Buffer srcCr = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2)),
};
vImage_Buffer dest;
dest.data = NULL;
vImage_Error error = kvImageNoError;
error = vImageBuffer_Init(&dest, height, width, 32, kvImageNoFlags);
// vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrPixelRange pixelRange = { 16, 128, 235, 240, 255, 0, 255, 0 };
vImage_YpCbCrToARGB info;
error = kvImageNoError;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = kvImageNoError;
uint8_t permuteMap[4] = {3, 2, 1, 0}; // BGRA - iOS only support BGRA
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
permuteMap, // for iOS must be no NULL, mac can be NULL iOS only support BGRA
255,
kvImageNoFlags);