多个渲染目标(MRT)和OSG

时间:2018-05-13 23:12:56

标签: openscenegraph render-to-texture deferred-rendering deferred-shading

民间,

我已经研究了FBO,RTT和MRT以在我的应用程序中包含此功能,但是我遇到了一些问题/疑问,我在搜索过程中没有找到答案/提示。下面是我的场景描述。如果有人能帮助我,我将不胜感激。

我想做什么?

  • 将两个渲染纹理(用于颜色和深度缓冲区)附加到同一个摄像头;
  • 仅显示后期渲染相机中的颜色缓冲区;
  • 在最终绘制回调中从深度和颜色缓冲区读取图像;
  • 将收集的浮动图像写入磁盘。

到目前为止我得到了什么?

  • 允许分别渲染颜色或深度缓冲区,但不能同时渲染两者;
  • 在后期渲染相机中显示颜色缓冲区;
  • 在最终绘制回调中读取颜色或深度缓冲区;
  • 将收集的图像(颜色或深度)写入磁盘 - 仅图像为GL_UNSIGNED_BYTE。出现以下错误:

写入文件时出错./Test-depth.png:警告:写入“./Test-depth.png”时出错。

有什么疑虑? (帮助!)

  • 如何在同一台相机中正确渲染两种纹理(颜色和深度缓冲区)?
  • 如何在最终绘制回调中正确读取深度和颜色缓冲区?
  • 在磁盘映像写入期间,为什么错误仅针对图像呈现为GL_FLOAT,而不是针对GL_UNSIGNED_BYTE?
  • 在此过程中,渲染纹理是附加到osg :: Geode的强制还是可选的?我是否需要创建两个osg :: Geode(每个缓冲区一个),或者只为这两个缓冲区创建一个osg :: Geode?

请查看我当前的源代码(我在这里做错了什么?):

// OSG includes
#include <osgDB/ReadFile>
#include <osgDB/WriteFile>
#include <osgViewer/Viewer>
#include <osg/Camera>
#include <osg/Geode>
#include <osg/Geometry>
#include <osg/Texture2D>

struct SnapImage : public osg::Camera::DrawCallback {
    SnapImage(osg::GraphicsContext* gc) {
        _image = new osg::Image;
        _depth = new osg::Image;
        if (gc->getTraits()) {
            int width = gc->getTraits()->width;
            int height = gc->getTraits()->height;
            _image->allocateImage(width, height, 1, GL_RGBA, GL_FLOAT);
            _depth->allocateImage(width, height, 1, GL_DEPTH_COMPONENT, GL_FLOAT);
        }
    }

    virtual void operator () (osg::RenderInfo& renderInfo) const {
        osg::Camera* camera = renderInfo.getCurrentCamera();
        osg::GraphicsContext* gc = camera->getGraphicsContext();
        if (gc->getTraits() && _image.valid()) {
            int width = gc->getTraits()->width;
            int height = gc->getTraits()->height;
           _image->readPixels(0, 0, width, height, GL_RGBA, GL_FLOAT);
           _depth->readPixels(0, 0, width, height, GL_DEPTH_COMPONENT, GL_FLOAT);
            osgDB::writeImageFile(*_image,  "./Test-color.png");
            osgDB::writeImageFile(*_depth,  "./Test-depth.png");
        }
    }

    osg::ref_ptr<osg::Image> _image;
    osg::ref_ptr<osg::Image> _depth;
};

osg::Camera* setupMRTCamera( osg::ref_ptr<osg::Camera> camera, std::vector<osg::Texture2D*>& attachedTextures, int w, int h ) {
    camera->setClearColor( osg::Vec4() );
    camera->setClearMask( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
    camera->setRenderTargetImplementation( osg::Camera::FRAME_BUFFER_OBJECT );
    camera->setRenderOrder( osg::Camera::PRE_RENDER );
    camera->setViewport( 0, 0, w, h );

    osg::Texture2D* tex = new osg::Texture2D;
    tex->setTextureSize( w, h );
    tex->setSourceType( GL_FLOAT );
    tex->setSourceFormat( GL_RGBA );
    tex->setInternalFormat( GL_RGBA32F_ARB );
    tex->setResizeNonPowerOfTwoHint( false );
    tex->setFilter( osg::Texture2D::MIN_FILTER, osg::Texture2D::LINEAR );
    tex->setFilter( osg::Texture2D::MAG_FILTER, osg::Texture2D::LINEAR );
    attachedTextures.push_back( tex );
    camera->attach( osg::Camera::COLOR_BUFFER, tex );

    tex = new osg::Texture2D;
    tex->setTextureSize( w, h );
    tex->setSourceType( GL_FLOAT );
    tex->setSourceFormat( GL_DEPTH_COMPONENT );
    tex->setInternalFormat( GL_DEPTH_COMPONENT32 );
    tex->setResizeNonPowerOfTwoHint( false );
    attachedTextures.push_back( tex );
    camera->attach( osg::Camera::DEPTH_BUFFER, tex );
    return camera.release();
}


int main() {
    osg::ref_ptr< osg::Group > root( new osg::Group );
    root->addChild( osgDB::readNodeFile( "cow.osg" ) );
    unsigned int winW = 800;
    unsigned int winH = 600;

    osgViewer::Viewer viewer;
    viewer.setUpViewInWindow( 0, 0, winW, winH );
    viewer.setSceneData( root.get() );
    viewer.realize();

    // setup MRT camera
    std::vector<osg::Texture2D*> attachedTextures;
    osg::Camera* mrtCamera ( viewer.getCamera() );
    setupMRTCamera( mrtCamera, attachedTextures, winW, winH );

    // set RTT textures to quad
    osg::Geode* geode( new osg::Geode );
    geode->addDrawable( osg::createTexturedQuadGeometry(
        osg::Vec3(-1,-1,0), osg::Vec3(2.0,0.0,0.0), osg::Vec3(0.0,2.0,0.0)) );
    geode->getOrCreateStateSet()->setTextureAttributeAndModes( 0, attachedTextures[0] );
    geode->getOrCreateStateSet()->setMode( GL_LIGHTING, osg::StateAttribute::OFF );
    geode->getOrCreateStateSet()->setMode( GL_DEPTH_TEST, osg::StateAttribute::OFF );

    // configure postRenderCamera to draw fullscreen textured quad
    osg::Camera* postRenderCamera( new osg::Camera );
    postRenderCamera->setClearMask( 0 );
    postRenderCamera->setRenderTargetImplementation( osg::Camera::FRAME_BUFFER, osg::Camera::FRAME_BUFFER );
    postRenderCamera->setReferenceFrame( osg::Camera::ABSOLUTE_RF );
    postRenderCamera->setRenderOrder( osg::Camera::POST_RENDER );
    postRenderCamera->setViewMatrix( osg::Matrixd::identity() );
    postRenderCamera->setProjectionMatrix( osg::Matrixd::identity() );
    postRenderCamera->addChild( geode );
    root->addChild(postRenderCamera);

    // setup the callback
    SnapImage* finalDrawCallback = new SnapImage(viewer.getCamera()->getGraphicsContext());
    mrtCamera->setFinalDrawCallback(finalDrawCallback);

    return (viewer.run());
}

提前致谢,

RômuloCerqueira

0 个答案:

没有答案