使用叠加图像OpenGLES保存屏幕截图

时间:2014-05-30 04:27:08

标签: ios opengl-es uiimageview augmented-reality vuforia

我使用vuforia进行增强现实应用。当我检测到图像时我能够显示或渲染3D对象和UIImageView然后我可以拍摄3D对象的屏幕截图,但我无法保存正常图像。我只是显示正常的UIImageview图像。我是否需要渲染2D图像而不是普通的UIImageView?

渲染3D:

- (void)renderFrameQCAR
{
    [self setFramebuffer];

    // Clear colour and depth buffers
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // Render video background and retrieve tracking state
    QCAR::State state = QCAR::Renderer::getInstance().begin();
    QCAR::Renderer::getInstance().drawVideoBackground();

    glEnable(GL_DEPTH_TEST);
    // We must detect if background reflection is active and adjust the culling direction.
    // If the reflection is active, this means the pose matrix has been reflected as well,
    // therefore standard counter clockwise face culling will result in "inside out" models.
    if (offTargetTrackingEnabled) {
        glDisable(GL_CULL_FACE);
    } else {
        glEnable(GL_CULL_FACE);
    }
    glCullFace(GL_BACK);
    if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
        glFrontFace(GL_CW);  //Front camera
    else
        glFrontFace(GL_CCW);   //Back camera


    for (int i = 0; i < state.getNumTrackableResults(); ++i) {
        // Get the trackable

       // _numResults = state.getNumTrackableResults();
        [self performSelectorOnMainThread:@selector(DisplayPhotoButton
                                                    ) withObject:nil waitUntilDone:YES];

        const QCAR::TrackableResult* result = state.getTrackableResult(i);
        const QCAR::Trackable& trackable = result->getTrackable();

        //const QCAR::Trackable& trackable = result->getTrackable();
        QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose());

        // OpenGL 2
        QCAR::Matrix44F modelViewProjection;

        if (offTargetTrackingEnabled) {
            SampleApplicationUtils::rotatePoseMatrix(90, 1, 0, 0,&modelViewMatrix.data[0]);
            SampleApplicationUtils::scalePoseMatrix(kObjectScaleOffTargetTracking, kObjectScaleOffTargetTracking, kObjectScaleOffTargetTracking, &modelViewMatrix.data[0]);
        } else {
            SampleApplicationUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScaleNormal, &modelViewMatrix.data[0]);
            SampleApplicationUtils::scalePoseMatrix(kObjectScaleNormal, kObjectScaleNormal, kObjectScaleNormal, &modelViewMatrix.data[0]);
        }

        SampleApplicationUtils::multiplyMatrix(&vapp.projectionMatrix.data[0], &modelViewMatrix.data[0], &modelViewProjection.data[0]);

        glUseProgram(shaderProgramID);

        if (offTargetTrackingEnabled) {
            glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)buildingModel.vertices);
            glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)buildingModel.normals);
            glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)buildingModel.texCoords);
        } else {
            glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)teapotVertices);
            glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)teapotNormals);
            glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)teapotTexCoords);
        }

        glEnableVertexAttribArray(vertexHandle);
        glEnableVertexAttribArray(normalHandle);
        glEnableVertexAttribArray(textureCoordHandle);

        // Choose the texture based on the target name
        int targetIndex = 0; // "stones"
        if (!strcmp(trackable.getName(), "chips"))
            targetIndex = 1;
        else if (!strcmp(trackable.getName(), "tarmac"))
            targetIndex = 2;

        glActiveTexture(GL_TEXTURE0);

        if (offTargetTrackingEnabled) {
            glBindTexture(GL_TEXTURE_2D, augmentationTexture[3].textureID);
        } else {
            glBindTexture(GL_TEXTURE_2D, augmentationTexture[targetIndex].textureID);
        }
        glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (const GLfloat*)&modelViewProjection.data[0]);
        glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);

        if (offTargetTrackingEnabled) {
            glDrawArrays(GL_TRIANGLES, 0, buildingModel.numVertices);
        } else {
            glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT, (const GLvoid*)teapotIndices);
        }

        SampleApplicationUtils::checkGlError("EAGLView renderFrameQCAR");

    }

    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    glDisableVertexAttribArray(vertexHandle);
    glDisableVertexAttribArray(normalHandle);
    glDisableVertexAttribArray(textureCoordHandle);

    QCAR::Renderer::getInstance().end();
    [self presentFramebuffer];
}

当cameraview打开时显示UIImageView:

- (id)initWithFrame:(CGRect)frame appSession:(SampleApplicationSession *) app
{
    self = [super initWithFrame:frame];

    if (self) {
        vapp = app;

      //  takePhotoFlag = NO;


        // [self DisplayPhotoButton];

        // Enable retina mode if available on this device
        if (YES == [vapp isRetinaDisplay]) {
            [self setContentScaleFactor:2.0f];
        }

        // Load the augmentation textures
        for (int i = 0; i < NUM_AUGMENTATION_TEXTURES; ++i) {
            augmentationTexture[i] = [[Texture alloc] initWithImageFile:[NSString stringWithCString:textureFilenames[i] encoding:NSASCIIStringEncoding]];
        }

        // Create the OpenGL ES context
        context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];

        // The EAGLContext must be set for each thread that wishes to use it.
        // Set it the first time this method is called (on the main thread)
        if (context != [EAGLContext currentContext]) {
            [EAGLContext setCurrentContext:context];
        }

        // Generate the OpenGL ES texture and upload the texture data for use
        // when rendering the augmentation
        for (int i = 0; i < NUM_AUGMENTATION_TEXTURES; ++i) {
            GLuint textureID;
            glGenTextures(1, &textureID);
            [augmentationTexture[i] setTextureID:textureID];
            glBindTexture(GL_TEXTURE_2D, textureID);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, [augmentationTexture[i] width], [augmentationTexture[i] height], 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid*)[augmentationTexture[i] pngData]);
        }

        offTargetTrackingEnabled = NO;

        [self loadBuildingsModel];
        [self initShaders];


        _takePhotoFlag = NO;


        [self DisplayPhotoButton];



    }

    return self;
}


- (void)DisplayPhotoButton

{




    UIImage *closeButtonImage = [UIImage imageNamed:@"back.png"];
    // UIImage *closeButtonTappedImage = [UIImage imageNamed:@"button_close_pressed.png"];



    CGRect aRect = CGRectMake(20,20,

                              closeButtonImage.size.width,
                              closeButtonImage.size.height);

    photo = [UIButton buttonWithType:UIButtonTypeCustom];
    photo.frame = aRect;

    photo.userInteractionEnabled=YES;

    [photo setImage:closeButtonImage forState:UIControlStateNormal];


    // photo.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin;

    [photo addTarget:self action:@selector(takePhoto) forControlEvents:UIControlEventTouchUpInside];

    [self  addSubview:photo];



    UIButton *arrowButton = [UIButton buttonWithType:UIButtonTypeCustom];
    [arrowButton setImage:[UIImage imageNamed:@"back_btn.png"] forState:UIControlStateNormal];
    // [overlayButton setFrame:CGRectMake(80, 420, 60, 30)];

    [arrowButton setFrame:CGRectMake(100, 10, 40, 40)];

    [arrowButton addTarget:self action:@selector(showActionSheet:forEvent:) forControlEvents:UIControlEventTouchUpInside];
    [self addSubview:arrowButton];

}

OpenGL截图:

    - (UIImage*) glToUIImage

    {

              UIImage *outputImage = nil;

            CGFloat scale = [[UIScreen mainScreen] scale];
            CGRect s = CGRectMake(0, 0, 320.0f * scale, 480.0f * scale);
            uint8_t *buffer = (uint8_t *) malloc(s.size.width * s.size.height * 4);

            glReadPixels(0, 0, s.size.width, s.size.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);

 UIImage *imageFromRawData(uint8_t *data, int width, int height) {
        CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,data,width*height*4,NULL);

        CGImageRef imageRef = CGImageCreate(width, height, 8, 32, width*4, CGColorSpaceCreateDeviceRGB(), kCGImageAlphaLast, provider, NULL, NO, kCGRenderingIntentDefault);

        UIImage *newImage = [UIImage imageWithCGImage:imageRef];
        return newImage;
    }

            CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, buffer, s.size.width * s.size.height * 4, NULL);

            CGImageRef iref = CGImageCreate(s.size.width, s.size.height, 8, 32, s.size.width * 4, CGColorSpaceCreateDeviceRGB(), kCGBitmapByteOrderDefault, ref, NULL, true, kCGRenderingIntentDefault);

            size_t width = CGImageGetWidth(iref);
            size_t height = CGImageGetHeight(iref);
            size_t length = width * height * 4;
            uint32_t *pixels = (uint32_t *)malloc(length);

            CGContextRef context1 = CGBitmapContextCreate(pixels, width, height, 8, width * 4,
                                                          CGImageGetColorSpace(iref), kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Big);

            CGAffineTransform transform = CGAffineTransformIdentity;
            transform = CGAffineTransformMakeTranslation(0.0f, height);
            transform = CGAffineTransformScale(transform, 1.0, -1.0);
            CGContextConcatCTM(context1, transform);
            CGContextDrawImage(context1, CGRectMake(0.0f, 0.0f, width, height), iref);
            CGImageRef outputRef = CGBitmapContextCreateImage(context1);

            outputImage = [UIImage imageWithCGImage: outputRef];

            CGDataProviderRelease(ref);
            CGImageRelease(iref);
            CGContextRelease(context1);
            CGImageRelease(outputRef);
            free(pixels);
            free(buffer);

            UIImageWriteToSavedPhotosAlbum(outputImage, nil, nil, nil);
            return outputImage;


        }

从当前帧缓冲区保存:

- (BOOL)presentFramebuffer
{


    if (_takePhotoFlag)

    {

        [self glToUIImage];


        _takePhotoFlag = NO;

    }


    // setFramebuffer must have been called before presentFramebuffer, therefore
    // we know the context is valid and has been set for this (render) thread

    // Bind the colour render buffer and present it
    glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);

    return [context presentRenderbuffer:GL_RENDERBUFFER];
}

1 个答案:

答案 0 :(得分:1)

您可以拥有openGL屏幕截图或UI屏幕截图。要结合2我建议你做两个图像。这听起来很愚蠢,但它可能是最快速,最强大的方式:

  • 从openGL截取屏幕截图(正如您已经做的那样)
  • 从该截图中创建图像视图
  • 在主视图上插入该图片视图
  • 获取主视图的UI截图*
  • 删除图片视图

*通过UI截图我的意思是这样的:

+ (UIImage *)imageFromView:(UIView *)view {
    UIGraphicsBeginImageContextWithOptions(view.bounds.size, view.opaque, .0);
    [view.layer renderInContext:UIGraphicsGetCurrentContext()];
    UIImage * img = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return img;
}

如果您遇到一个问题,例如看到黑色背景而不是背景图像,则可能会在管道中的某个地方生成CGImage,导致其跳过或预乘Alpha通道。 (一个非常常见的错误)

编辑:从读取像素RGBA数据获取图像:

这是我用于从原始RGBA数据中获取UIImage的内容。请注意,此过程不会处理任何方向,但您可以稍微修改它,将方向作为参数,然后使用imageWithCGImage:scale:orientation:

UIImage *imageFromRawData(uint8_t *data, int width, int height) {
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,data,width*height*4,NULL);

    CGImageRef imageRef = CGImageCreate(width, height, 8, 32, width*4, CGColorSpaceCreateDeviceRGB(), kCGImageAlphaLast, provider, NULL, NO, kCGRenderingIntentDefault);

    UIImage *newImage = [UIImage imageWithCGImage:imageRef];
    return newImage;
}