我被分配了编写程序的任务,该程序采用示例原始YUV文件并将其显示在Cocoa OpenGL程序中。
我是一名实习生,我很少或根本不知道如何开始。我一直在阅读维基百科和有关YUV的文章,但我找不到任何关于如何打开原始YUV文件,提取数据并将其转换为RGB并在视图窗口中显示的良好源代码。
基本上,我需要帮助完成任务的以下方面 - 如何从样本YUV文件中提取YUV数据 - 如何将YUV数据转换为RGB色彩空间 - 如何在OpenGL中显示RGB颜色空间。 (这个我认为我可以随时了解,但我真的需要帮助前两点)
请告诉我要使用的课程,或者指出我可以学习YUV图形/视频显示的地方
答案 0 :(得分:8)
我用CCD相机拍摄的YUV帧完成了这项工作。不幸的是,有许多不同的YUV格式。我认为Apple用于GL_YCBCR_422_APPLE
纹理格式的技术上是2VUY422。要将图像从IIDC Firewire相机生成的YUV422帧转换为2VUY422,我使用了以下内容:
void yuv422_2vuy422(const unsigned char *theYUVFrame, unsigned char *the422Frame, const unsigned int width, const unsigned int height)
{
int i =0, j=0;
unsigned int numPixels = width * height;
unsigned int totalNumberOfPasses = numPixels * 2;
register unsigned int y0, y1, y2, y3, u0, u2, v0, v2;
while (i < (totalNumberOfPasses) )
{
u0 = theYUVFrame[i++]-128;
y0 = theYUVFrame[i++];
v0 = theYUVFrame[i++]-128;
y1 = theYUVFrame[i++];
u2 = theYUVFrame[i++]-128;
y2 = theYUVFrame[i++];
v2 = theYUVFrame[i++]-128;
y3 = theYUVFrame[i++];
// U0 Y0 V0 Y1 U2 Y2 V2 Y3
// Remap the values to 2VUY (YUYS?) (Y422) colorspace for OpenGL
// Y0 U Y1 V Y2 U Y3 V
// IIDC cameras are full-range y=[0..255], u,v=[-127..+127], where display is "video range" (y=[16..240], u,v=[16..236])
the422Frame[j++] = ((y0 * 240) / 255 + 16);
the422Frame[j++] = ((u0 * 236) / 255 + 128);
the422Frame[j++] = ((y1 * 240) / 255 + 16);
the422Frame[j++] = ((v0 * 236) / 255 + 128);
the422Frame[j++] = ((y2 * 240) / 255 + 16);
the422Frame[j++] = ((u2 * 236) / 255 + 128);
the422Frame[j++] = ((y3 * 240) / 255 + 16);
the422Frame[j++] = ((v2 * 236) / 255 + 128);
}
}
为了有效显示YUV视频源,您可能希望使用Apple's client storage extension,您可以使用以下内容进行设置:
glEnable(GL_TEXTURE_RECTANGLE_EXT);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 1);
glTextureRangeAPPLE(GL_TEXTURE_RECTANGLE_EXT, videoImageWidth * videoImageHeight * 2, videoTexture);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE , GL_STORAGE_SHARED_APPLE);
glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_TRUE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, 0, GL_RGBA, videoImageWidth, videoImageHeight, 0, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, videoTexture);
这使您可以在每个帧显示在屏幕上之前快速更改存储在客户端视频纹理中的数据。
要绘制,您可以使用如下代码:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glViewport(0, 0, [self frame].size.width, [self frame].size.height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
NSRect bounds = NSRectFromCGRect([self bounds]);
glOrtho( (GLfloat)NSMinX(bounds), (GLfloat)NSMaxX(bounds), (GLfloat)NSMinY(bounds), (GLfloat)NSMaxY(bounds), -1.0, 1.0);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 1);
glTexSubImage2D (GL_TEXTURE_RECTANGLE_EXT, 0, 0, 0, videoImageWidth, videoImageHeight, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, videoTexture);
glMatrixMode(GL_TEXTURE);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(0.0f, videoImageHeight);
glTexCoord2f(0.0f, videoImageHeight);
glVertex2f(0.0f, 0.0f);
glTexCoord2f(videoImageWidth, videoImageHeight);
glVertex2f(videoImageWidth, 0.0f);
glTexCoord2f(videoImageWidth, 0.0f);
glVertex2f(videoImageWidth, videoImageHeight);
glEnd();
答案 1 :(得分:6)
Adam Rosenfield的评论不正确。在Mac上,您可以使用GL_YCBCR_422_APPLE
纹理格式显示YCbCr(数字等效于YUV)纹理,如APPLE_ycbcr_422扩展名中所指定。
答案 2 :(得分:1)
这个答案不正确,请参阅其他答案和评论。下面留下的原始答案为后人。
您无法直接显示它。您需要将其转换为RGB纹理。正如您可能从Wikipedia收集的那样,YUV颜色空间有很多变化。确保你使用的是正确的。
对于每个像素,从YUV到RGB的转换是直接的线性变换。你只需对每个像素做同样的事情。
将图像转换为RGB后,可以通过创建纹理来显示图像。您需要调用glGenTextures()
来分配纹理句柄,glBindTexture()
将纹理绑定到渲染上下文,并glTexImage2D()
将纹理数据上传到GPU。要渲染它,再次调用glBindTexture()
,然后渲染一个纹理坐标设置正确的四边形。
// parameters: image: pointer to raw YUV input data
// width: image width (must be a power of 2)
// height: image height (must be a power of 2)
// returns: a handle to the resulting RGB texture
GLuint makeTextureFromYUV(const float *image, int width, int height)
{
float *rgbImage = (float *)malloc(width * height * 3 * sizeof(float)); // check for NULL
float *rgbImagePtr = rgbImage;
// convert from YUV to RGB (floats used here for simplicity; it's a little
// trickier with 8-bit ints)
int y, x;
for(y = 0; y < height; y++)
{
for(x = 0; x < width; x++)
{
float Y = *image++;
float U = *image++;
float V = *image++;
*rgbImagePtr++ = Y + 1.13983f * V; // R
*rgbImagePtr++ = Y - 0.39465f * U - 0.58060f * V; // G
*rgbImagePtr++ = Y + 2.03211f * U; // B
}
}
// create texture
GLuint texture;
glGenTextures(1, &texture);
// bind texture to render context
glBindTexture(GL_TEXTURE_2D, texture);
// upload texture data
glTexImage2D(GL_TEXTURE_2D, 0, 3, width, height, 0, GL_RGB, GL_FLOAT, rgbImage);
// don't use mipmapping (since we're not creating any mipmaps); the default
// minification filter uses mipmapping. Use linear filtering for minification
// and magnification.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// free data (it's now been copied onto the GPU) and return texture handle
free(rgbImage);
return texture;
}
渲染:
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f( 0.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(64.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(64.0f, 64.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f( 0.0f, 64.0f, 0.0f);
glEnd();
不要忘记在初始化期间的某个时刻调用glEnable(GL_TEXTURE_2D)
,并在关机期间调用glDeleteTextures(1, &texture)
。