我要在opengl视图上显示像素数据。 但我看不到任何东西,只能看到空的粉红色gl屏幕。 请检查我的代码,让我知道它是什么错误。
@implementation GLView
+ (Class) layerClass
{
return [CAEAGLLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
if ((self = [super initWithFrame:frame]))
{
// Do OpenGL Core Animation layer setup
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!context || ![EAGLContext setCurrentContext:context]
|| ![self createFramebuffers])
return nil;
}
return self;
}
- (BOOL)createFramebuffers
{
glEnable(GL_TEXTURE_2D);
glDisable(GL_DEPTH_TEST);
// Onscreen framebuffer object
glGenFramebuffers(1, &viewFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
glGenRenderbuffers(1, &viewRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, viewRenderbuffer);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
NSLog(@"Failure with framebuffer generation");
return NO;
}
return YES;
}
- (void)setDisplayFramebuffer;
{
if (context)
{
// [EAGLContext setCurrentContext:context];
if (!viewFramebuffer)
[self createFramebuffers];
glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
// glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
glViewport(0, 0, backingWidth, backingHeight);
}
}
- (BOOL)presentFramebuffer;
{
BOOL success = FALSE;
if (context)
{
// [EAGLContext setCurrentContext:context];
glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
success = [context presentRenderbuffer:GL_RENDERBUFFER];
}
return success;
}
并在viewcontroller中
m_glView = [[GLView alloc] initWithFrame:CGRectMake(0, 0, m_viewPlayer.frame.size.width, m_viewPlayer.frame.size.height)];
[m_viewPlayer addSubview:m_glView];
[self loadVertexShader:@"DirectDisplayShader" fragmentShader:@"DirectDisplayShader" forProgram:&m_directDisplayProgram];
并在计时器循环中
CMSampleBufferRef buf = [m_trackOutput copyNextSampleBuffer];
if (buf == nil)
return;
[self processFrame:buf]; // draw frame to opengl view
CFRelease(buf);
- (void)processFrame:(CMSampleBufferRef)sampleBuffer
{
if (m_videoFrameTexture)
glDeleteTextures(1, &m_videoFrameTexture);
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
CGFloat width = CVPixelBufferGetWidth(imageBuffer);
CGFloat height = CVPixelBufferGetHeight(imageBuffer);
// Create a new texture from the camera frame data, display that using the shaders
glGenTextures(1, &m_videoFrameTexture);
glBindTexture(GL_TEXTURE_2D, m_videoFrameTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Using BGRA extension to pull in video frame data directly
// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(imageBuffer));
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(imageBuffer));
[self drawFrame];
// glDeleteTextures(1, &videoFrameTexture);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
- (void)drawFrame
{
[m_glView setDisplayFramebuffer];
[self drawCapturedScreen];
[m_glView presentFramebuffer];
}
- (void)drawCapturedScreen
{
glUseProgram(m_directDisplayProgram);
glBindTexture(GL_TEXTURE_2D, m_videoFrameTexture);
glUniform1i(uniforms[UNIFORM_VIDEOFRAME], 0);
// Update attribute values.
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
float m_fScale = 1.0f; //
GLfloat kRate = 1.0f/m_fScale;
GLfloat kX = (1.0-kRate)/2;
GLfloat kY = (1.0-kRate)/2;
GLfloat kS = kX+kRate;
GLfloat kT = kY+kRate;
{
{
GLfloat textureVertices[] = {
kS, kT,
kS, kY,
kX, kT,
kX, kY,
};
glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON);
}
}
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
答案 0 :(得分:0)
到目前为止,您是否设法在OpenGL中渲染任何内容?我建议将你正在做的事情分解成更小的步骤并让那些工作先行。首先尝试使用GL_POINTS进行渲染,看看是否可以获得任何东西,然后从那里进行构建。使着色器尽可能简单,以检查那里的问题。您应该尝试使最小的渲染工作,然后建立复杂性(例如,在点尝试线,然后是三角形,然后是纹理三角形)。通过分解渲染,您可以隔离导致绘制调用的内容,而不显示任何内容。