使用图像处理过滤视频时OpenGL的问题

时间:2012-03-05 10:58:26

标签: iphone ios xcode opengl-es-2.0

我正在尝试使用Brad Larson的框架作为参考,以获得一些基本的图像处理测试。到目前为止,我已经实现了棕褐色滤镜,它有点工作,问题是我得到一个处理过的图像和每个渲染周期的非处理图像(如果我将fps设置为1秒,第一个渲染,第二个不是,第三个是的,等等)。我花了几个小时似乎无法找到原因。

我有2个窗口,视频预览图层和glkview,我正在渲染我的缓冲区。

这是我的代码,基本上只是尝试合成前面提到的框架:

查看了Load方法:

- (void)viewDidLoad
{
    [super viewDidLoad];

    session = [[AVCaptureSession alloc] init];

    // Add inputs and outputs.
    if ([session canSetSessionPreset:AVCaptureSessionPreset640x480]) {
        session.sessionPreset = AVCaptureSessionPreset640x480;
    }
    else {
        // Handle the failure.
        NSLog(@"Cannot set session preset to 640x480");
    }

    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

    if (!input) {
        // Handle the error appropriately.
        NSLog(@"Could create input: %@", error);
    }

    if ([session canAddInput:input]) {
        [session addInput:input];
    }
    else {
        // Handle the failure.
        NSLog(@"Could not add input");
    }




    captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];

    [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];

    [captureVideoPreviewLayer setBounds:videoLayer.layer.bounds];
    [captureVideoPreviewLayer setPosition:videoLayer.layer.position];

    [videoLayer.layer addSublayer:captureVideoPreviewLayer];

    [session startRunning];



    // OPENGL stuff


    self.imageView.context = [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context];


    if ([self createFilterProgram]) {
        NSLog(@"Filter Program Created");
    }


    if ([self createOutputProgram]) {
        NSLog(@"Output Program Created");
    }

    if (CVOpenGLESTextureCacheCreate != NULL)
    {
        [GPUImageOpenGLESContext useImageProcessingContext];
        CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
        if (err) 
        {
            NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d");
        }
        else
            NSLog(@"Initial CVOpenGLESTextureCache created");

        // Need to remove the initially created texture
        if (outputTexture)
        {
            glDeleteTextures(1, &outputTexture);
            outputTexture = 0;
        }
    }

    // DATA OUTPUT
    dataOutput = [[AVCaptureVideoDataOutput alloc] init];

    if ([session canAddOutput:dataOutput]) {
        [session addOutput:dataOutput];

        dataOutput.videoSettings = 
        [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                    forKey: (id)kCVPixelBufferPixelFormatTypeKey];

        AVCaptureConnection *connection = [dataOutput connectionWithMediaType:AVMediaTypeVideo];

        [connection setVideoMaxFrameDuration:CMTimeMake(1, 1)];
        [connection setVideoMinFrameDuration:CMTimeMake(1, 1)];

    }
    else {
        // Handle the failure.
        NSLog(@"Could not add output");
    }
    // DATA OUTPUT END

    //dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
    [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
    //dispatch_release(queue);

}

以及视频捕获中的委托方法:

- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection 
{

    NSLog(@"Render");
    //CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
    CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
    int bufferWidth = CVPixelBufferGetWidth(cameraFrame);
    int bufferHeight = CVPixelBufferGetHeight(cameraFrame);

    if (CVOpenGLESTextureCacheCreate != NULL)
    {
        NSLog(@"Not Null");
        CVPixelBufferLockBaseAddress(cameraFrame, 0);

        [GPUImageOpenGLESContext useImageProcessingContext];
        CVOpenGLESTextureRef texture = NULL;
        CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);

        if (!texture || err) {
            NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);  
            return;
        }

        outputTexture = CVOpenGLESTextureGetName(texture);
        glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);


        // "setInputSize" method

        CGSize sizeOfFBO = CGSizeMake(bufferWidth, bufferHeight);

        // "newFrameReady" method

        static const GLfloat squareVertices[] = {
            -1.0f, -1.0f,
            1.0f, -1.0f,
            -1.0f,  1.0f,
            1.0f,  1.0f,
        };

        static const GLfloat squareTextureCoordinates[] = {
            0.0f, 0.0f,
            1.0f, 0.0f,
            0.0f,  1.0f,
            1.0f,  1.0f,
        };

        [GPUImageOpenGLESContext useImageProcessingContext];

        // "setFilterFBO" method
        if (!filterFramebuffer)
        {
            NSLog(@"New Filter Frame buffer");
            glActiveTexture(GL_TEXTURE1);
            glGenFramebuffers(1, &filterFramebuffer);
            glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);

            NSLog(@"Filter size: %f, %f", sizeOfFBO.width, sizeOfFBO.height);

            glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)sizeOfFBO.width, (int)sizeOfFBO.height);
            glBindTexture(GL_TEXTURE_2D, outputTexture);
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)sizeOfFBO.width, (int)sizeOfFBO.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);

            GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

            NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
        }

        glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
        glViewport(0, 0, (int)sizeOfFBO.width, (int)sizeOfFBO.height);

        // END "setFilterFBO" method

        glUseProgram(filterProgram); // This Program MUST be the filter one 

        glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, outputTexture);

        glUniform1i(uniforms[filterInputTextureUniform], 2);    

        glVertexAttribPointer(position, 2, GL_FLOAT, 0, 0, squareVertices);
        glVertexAttribPointer(inputTextureCoordinate, 2, GL_FLOAT, 0, 0, squareTextureCoordinates);

        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

        static const GLfloat squareVertices2[] = {
            -1.0f, -1.0f,
            1.0f, -1.0f,
            -1.0f,  1.0f,
            1.0f,  1.0f,
        };

        static const GLfloat textureCoordinates2[] = {
            0.0f, 1.0f,
            1.0f, 1.0f,
            0.0f, 0.0f,
            1.0f, 0.0f,
        };


        [GPUImageOpenGLESContext useImageProcessingContext];

        //[self setDisplayFramebuffer];
        if (!displayFramebuffer)
        {
            NSLog(@"New Display Frame buffer");
            glGenFramebuffers(1, &displayFramebuffer);
            glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);

            glGenRenderbuffers(1, &displayRenderbuffer);
            glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);

            [[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)imageView.layer];

            glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
            glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
            NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);

            glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);

            GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
            NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation");
        }

        glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);

        glViewport(0, 0, backingWidth, backingHeight);

        glUseProgram(outputProgram);

        glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);


        glActiveTexture(GL_TEXTURE4);     
        glBindTexture(GL_TEXTURE_2D, outputTexture);
        glUniform1i(uniformsOutput[outputTextureUniform], 4);   

        glVertexAttribPointer(outputPosition, 2, GL_FLOAT, 0, 0, squareVertices2);
        glVertexAttribPointer(outputTextureCoordinate, 2, GL_FLOAT, 0, 0, textureCoordinates2);

        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

        glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);
        [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay];        

        //glBindRenderbuffer(GL_RENDERBUFFER, 0);

        CVPixelBufferUnlockBaseAddress(cameraFrame, 0);

        glBindTexture(outputTexture, 0);

        // Flush the CVOpenGLESTexture cache and release the texture
        CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
        CFRelease(texture);
        outputTexture = 0;
    }

    else
        NSLog(@"No support for fast CVOpenGLESTextureCacheCreate");
}

这是Brad的原始框架(非常酷)http://www.sunsetlakesoftware.com/2012/02/12/introducing-gpuimage-framework

如果有人知道为什么会这样,我会很感激帮助。

到目前为止,我认为可能导致这种情况的原因是:

  • 在循环期间渲染2次(因为它丢弃了它) 每1秒发生一次,即使它确实发生在最后一次超快 一个人会留下哪个是经过处理的一个)
  • 启用或禁用OpenGL的状态(我认为可能是 因为但无法找到哪里)
  • 没有正确使用缓冲区(我使用相同的缓冲区变量 对于这两个步骤,这可能会造成一些麻烦)
  • 它可能与上下文有关(我相信上下文 是所有东西都被绘制的地方,所以如果我正在研究glkview 上下文我可能会覆盖图片)

如果这些想法有误,请纠正我,这是我第一次尝试使用OpenGL进行处理

谢谢!

修改

正如Brad所建议的那样,我使用了OpenGL分析器,得到了以下结果:

Image

对于优化OpenGL代码,这个工具确实很棒。

0 个答案:

没有答案