我正在尝试将场景渲染为两个纹理(左侧和右侧),以便与Oculus Rift一起使用。当我将渲染目标设置为2D纹理渲染视图并调用DrawIndexed()时,它将渲染到后缓冲区而不是纹理。我正在使用Visual Studio,我在其上运行Graphics Diagnostics。在DrawIndexed()事件中,它显示渲染目标是纹理,但像素历史记录不显示事件。如果我不清除后备缓冲区,则场景会显示在屏幕上。
在下面的代码中,RenderLeft()函数应将图像渲染到绿色背景上的平面,并将渲染目标设置为左渲染纹理。然后RenderRight()应该获取RenderLeft()渲染的纹理,并将其渲染到平面,然后将其输出到后台缓冲区。 (注意:这不是正常的设置。这只是为了帮助看看纹理是否被渲染)
在最终输出中,屏幕左侧应该没有任何内容,右侧应该是黑色背景上绿色矩形内的源图像。
相反,我得到了这个:http://i.imgur.com/dHX5Ed3.png?1
RenderLeft渲染到后台缓冲区,即使渲染目标是纹理,因此RenderRight使用的纹理只是用于清除它的颜色。
这是我目前正在使用的代码。我想我已经包含了所有相关内容。
// this is the function used to render a single frame
void Direct3D::RenderFrame()
{
CreateTransforms(); //this creates matFinalLeft and matFinalRight, which is (world matrix)*(view matrix)*(projection matrix) with the proper offsets for a stereoscopic view.
setVertices(); //this sets the vertex and index buffers.
setMainShaders(); // this sets the shaders used to render the 3D scene
RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true); //this renders an image to a plane on a green background. It SHOULD render to a texture.
RenderRight(backbuffer, matFinalRight, viewportRight, false);//this renders the render target from RenderLeft to the plane and renders to the back buffer.
swapchain->Present(0, 0); //output back buffer to screen.
}
此部分应渲染一个带有图像的矩形到渲染纹理的左侧。
//Render the scene to the left side of a texture
void Direct3D::RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){
devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer);
devcon->RSSetViewports(1, &viewport);
// update shader resources
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0);
devcon->PSSetShaderResources(0, 1, &pTextureLeftResourceView);
// clear the depth buffer and render target texture
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0);
if (clearRenderTarget){
devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 1.0f, 0.0f, 1.0f));
}
// render to texture on left side (oculus) or full texture
devcon->DrawIndexed(6, 0, 0);
}
此部分应渲染一个矩形,其纹理从RenderLeft()到后台缓冲区。
//Render the scene to the right side of the back buffer
void Direct3D::RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){
//render to texture
devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer);
devcon->RSSetViewports(1, &viewport);
// update shader resources
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0);
devcon->PSSetShaderResources(0, 1, &pRenderTextureLeftResourceView);
// clear the depth buffer and render target texture
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0);
if (clearRenderTarget){
devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 0.0f, 1.0f, 1.0f));
}
// render to texture on left side (oculus) or full texture
devcon->DrawIndexed(6, 0, 0);
}
最后,创建各种视图和视口的代码
void Direct3D::InitD3D(HWND hWnd)
{
// create a struct to hold information about the swap chain
DXGI_SWAP_CHAIN_DESC scd;
// clear out the struct for use
ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC));
// fill the swap chain description struct
scd.BufferCount = 1; // one back buffer
scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // use 32-bit color
scd.BufferDesc.Width = screen_width;
scd.BufferDesc.Height = screen_height;
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; // how swap chain is to be used
scd.OutputWindow = hWnd; // the window to be used
scd.SampleDesc.Count = 4; // how many multisamples
scd.Windowed = TRUE; // windowed/full-screen mode
scd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
// create a device, device context and swap chain using the information in the scd struct
D3D11CreateDeviceAndSwapChain(NULL,
D3D_DRIVER_TYPE_HARDWARE,
NULL,
NULL,
NULL,
NULL,
D3D11_SDK_VERSION,
&scd,
&swapchain,
&dev,
NULL,
&devcon);
// create the depth buffer texture
D3D11_TEXTURE2D_DESC texd;
ZeroMemory(&texd, sizeof(texd));
texd.Width = screen_width;
texd.Height = screen_height;
texd.ArraySize = 1;
texd.MipLevels = 1;
texd.SampleDesc.Count = 4;
texd.Format = DXGI_FORMAT_D32_FLOAT;
texd.BindFlags = D3D11_BIND_DEPTH_STENCIL;
ID3D11Texture2D *pDepthBuffer;
dev->CreateTexture2D(&texd, NULL, &pDepthBuffer);
// create the depth buffer
D3D11_DEPTH_STENCIL_VIEW_DESC dsvd;
ZeroMemory(&dsvd, sizeof(dsvd));
dsvd.Format = DXGI_FORMAT_D32_FLOAT;
dsvd.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS;
dev->CreateDepthStencilView(pDepthBuffer, &dsvd, &zbuffer);
pDepthBuffer->Release();
// get the address of the back buffer
ID3D11Texture2D *pBackBuffer;
swapchain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer);
// use the back buffer address to create the render target
dev->CreateRenderTargetView(pBackBuffer, NULL, &backbuffer);
pBackBuffer->Release();
//create intermediate render textures
ID3D11Texture2D *pRenderTextureLeft;
D3D11_TEXTURE2D_DESC textureDesc;
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc;
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
ZeroMemory(&textureDesc, sizeof(textureDesc));
textureDesc.Width = screen_width;
textureDesc.Height = screen_height;
if (oculus){
textureDesc.Width = (UINT)((FLOAT)textureDesc.Width * oculus->renderScale);
textureDesc.Height = (UINT)((FLOAT)textureDesc.Height *oculus->renderScale);
}
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
textureDesc.SampleDesc.Count = 1;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = 0;
textureDesc.MiscFlags = 0;
dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureLeft);
renderTargetViewDesc.Format = textureDesc.Format;
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
renderTargetViewDesc.Texture2D.MipSlice = 0;
dev->CreateRenderTargetView(pRenderTextureLeft, &renderTargetViewDesc, &pTextureLeftRenderView);
shaderResourceViewDesc.Format = textureDesc.Format;
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
shaderResourceViewDesc.Texture2D.MipLevels = 1;
dev->CreateShaderResourceView(pRenderTextureLeft, &shaderResourceViewDesc, &pRenderTextureLeftResourceView);
ID3D11Texture2D *pRenderTextureRight;
dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureRight);
dev->CreateRenderTargetView(pRenderTextureRight, &renderTargetViewDesc, &pTextureRightRenderView);
dev->CreateShaderResourceView(pRenderTextureRight, &shaderResourceViewDesc, &pRenderTextureRightResourceView);
/*if (oculus){
pOculusOutputDevice = oculus->searchForOculusDisplay(oculus->hmd.DisplayDeviceName);
swapchain->SetFullscreenState(TRUE, pOculusOutputDevice);
}*/
// Set the viewport
ZeroMemory(&viewportLeft, sizeof(D3D11_VIEWPORT));
ZeroMemory(&viewportRight, sizeof(D3D11_VIEWPORT));
ZeroMemory(&viewportCenter, sizeof(D3D11_VIEWPORT));
viewportCenter.TopLeftX = 0.0f;
viewportCenter.TopLeftY = 0.0f;
if (oculus){
viewportCenter.Width = (FLOAT)screen_width*oculus->renderScale;
viewportCenter.Height = (FLOAT)screen_height*oculus->renderScale;
}
else{
viewportCenter.Width = (FLOAT)screen_width;
viewportCenter.Height = (FLOAT)screen_height;
}
viewportCenter.MinDepth = 0.0f;
viewportCenter.MaxDepth = 1.0f;
if (dual_mode){
viewportLeft.TopLeftX = 0.0f;
viewportLeft.TopLeftY = 0.0f;
viewportLeft.Width = (FLOAT)screen_width / 2.0f;
viewportLeft.Height = (FLOAT)screen_height;
viewportLeft.MinDepth = 0.0f;
viewportLeft.MaxDepth = 1.0f;
viewportRight.TopLeftX = (FLOAT)screen_width / 2.0f;
viewportRight.TopLeftY = 0.0f;
viewportRight.Width = (FLOAT)screen_width / 2.0f;
viewportRight.Height = (FLOAT)screen_height;
viewportRight.MinDepth = 0.0f;
viewportRight.MaxDepth = 1.0f;
}
devcon->RSSetViewports(1, &viewportCenter);
InitPipeline();
InitGraphics();
}
根据请求,这里有更多代码:
我包含了整个Direct3D类头,因此您可以看到什么是成员变量。
#pragma once
#include "Oculus.h"
#include <OVR.h>
#include "Camera.h"
#include <d3d11.h>
#include <D3DX11.h>
#include <D3DX10.h>
#pragma comment (lib, "d3d11.lib")
#pragma comment (lib, "d3dx11.lib")
#pragma comment (lib, "d3dx10.lib")
class Direct3D
{
public:
struct VERTEX{ FLOAT X, Y, Z; D3DXCOLOR Color; FLOAT U, V; };
struct DISTORTION{
FLOAT LensCenter[2];
FLOAT ScreenCenter[2];
FLOAT Scale[2];
FLOAT ScaleIn[2];
FLOAT HmdWarpParam[4];
};
IDXGISwapChain *swapchain; // the pointer to the swap chain interface
ID3D11Device *dev; // the pointer to our Direct3D device interface
ID3D11DeviceContext *devcon; // the pointer to our Direct3D device context
ID3D11RenderTargetView *backbuffer;
IDXGIOutput* pOculusOutputDevice;
ID3D11VertexShader *pVS_Primary; // the vertex shader
ID3D11PixelShader *pPS_Primary; // the pixel shader
ID3D11VertexShader *pVS_Distortion;
ID3D11PixelShader *pPS_Distortion; // the pixel shader
ID3D11Buffer *pVBuffer; //vertec buffer
ID3D11Buffer *pIBuffer;
ID3D11InputLayout *pLayout_Primary;
ID3D11InputLayout *pLayout_Distortion;
D3D11_VIEWPORT viewportLeft;
D3D11_VIEWPORT viewportRight;
D3D11_VIEWPORT viewportCenter;
ID3D11Buffer *pCBufferPrimaryShader;
ID3D11Buffer *pCBufferDistortionShader;
ID3D11DepthStencilView *zbuffer; // the pointer to our depth buffer
ID3D11ShaderResourceView *pTextureLeftResourceView; // the pointer to the texture
ID3D11ShaderResourceView *pTextureRightResourceView;
ID3D11ShaderResourceView *pRenderTextureLeftResourceView;
ID3D11ShaderResourceView *pRenderTextureRightResourceView;
ID3D11RenderTargetView *pTextureLeftRenderView;
ID3D11RenderTargetView *pTextureRightRenderView;
D3DXMATRIX matFinalLeft;
D3DXMATRIX matFinalRight;
Camera cameraLeft, cameraRight;
int screen_width;
int screen_height;
bool dual_mode;
Oculus* oculus;
Direct3D(Oculus* oculus);
Direct3D();
~Direct3D();
void InitD3D(HWND hWnd); // sets up and initializes Direct3D
void CleanD3D(void); // closes Direct3D and releases memory
void RenderFrame();
void InitPipeline();
void InitGraphics();
void RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget);
void RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget);
void DistortionCorrection(ID3D11RenderTargetView *RenderTarget);
void CreateTransforms();
void setVertices();
void setMainShaders();
void OVRMatrix4fToD3DXMatrix(OVR::Matrix4f& source, D3DXMATRIX& dest);
};
这是初始化图像纹理的代码(现在它们将相同的图像加载到两个不同的纹理。它最终将成为3D图像的两面。就在我弄清楚如何访问文件中的第二个图像)
FILENAME #defined是我正在显示的图像文件的名称
void Direct3D::InitGraphics()
{
D3DX11CreateShaderResourceViewFromFile(dev, // the Direct3D device
FILENAME, // load Wood.png in the local folder
NULL, // no additional information
NULL, // no multithreading
&pTextureLeftResourceView, // address of the shader-resource-view
NULL); // no multithreading
D3DX11CreateShaderResourceViewFromFile(dev, // the Direct3D device
FILENAME, // load Wood.png in the local folder
NULL, // no additional information
NULL, // no multithreading
&pTextureRightResourceView, // address of the shader-resource-view
NULL); // no multithreading
// get image size for rectangle mesh size
D3DX11_IMAGE_INFO info;
D3DX11GetImageInfoFromFile(FILENAME, NULL, &info, NULL);
FLOAT textureWidth = info.Width*0.001f;
FLOAT textureHeight = info.Height*0.001f;
// create vertices to represent the corners of the cube
VERTEX OurVertices[] =
{
{ -textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 1.0f },
{ textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 1.0f },
{ -textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 0.0f },
{ textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 0.0f }
};
// create the vertex buffer
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(VERTEX)* 4;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
dev->CreateBuffer(&bd, NULL, &pVBuffer);
// copy the vertices into the buffer
D3D11_MAPPED_SUBRESOURCE ms;
devcon->Map(pVBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer
memcpy(ms.pData, OurVertices, sizeof(OurVertices)); // copy the data
devcon->Unmap(pVBuffer, NULL);
// create the index buffer out of DWORDs
DWORD OurIndices[] =
{
0, 1, 2, // side 1
2, 1, 3,
};
// create the index buffer
bd.Usage = D3D11_USAGE_DYNAMIC;
bd.ByteWidth = sizeof(DWORD)* 6;
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
bd.MiscFlags = 0;
dev->CreateBuffer(&bd, NULL, &pIBuffer);
devcon->Map(pIBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer
memcpy(ms.pData, OurIndices, sizeof(OurIndices)); // copy the data
devcon->Unmap(pIBuffer, NULL);
}
如果你需要它,这里是渲染管道的初始化。
void Direct3D::InitPipeline()
{
// compile the shaders
ID3D10Blob *VS_Primary, *PS_Primary, *VS_Distortion, *PS_Distortion;
D3DX11CompileFromFile("vs_primary.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Primary, 0, 0);
D3DX11CompileFromFile("ps_primary.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS_Primary, 0, 0);
D3DX11CompileFromFile("vs_distortion.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Distortion, 0, 0);
D3DX11CompileFromFile("ps_distortion.hlsl", 0, 0, "main", "ps_5_0", 0, 0, 0, &PS_Distortion, 0, 0);
// create the shader objects
dev->CreateVertexShader(VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), NULL, &pVS_Primary);
dev->CreatePixelShader(PS_Primary->GetBufferPointer(), PS_Primary->GetBufferSize(), NULL, &pPS_Primary);
dev->CreateVertexShader(VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), NULL, &pVS_Distortion);
dev->CreatePixelShader(PS_Distortion->GetBufferPointer(), PS_Distortion->GetBufferSize(), NULL, &pPS_Distortion);
// set the shader objects
devcon->VSSetShader(pVS_Primary, 0, 0);
devcon->PSSetShader(pPS_Primary, 0, 0);
// create the input element object
D3D11_INPUT_ELEMENT_DESC ied[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 28, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
// use the input element descriptions to create the input layout
dev->CreateInputLayout(ied, 3, VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), &pLayout_Primary);
devcon->IASetInputLayout(pLayout_Primary);
dev->CreateInputLayout(ied, 3, VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), &pLayout_Distortion);
devcon->IASetInputLayout(pLayout_Distortion);
// create the constant buffer
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = 64;
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
dev->CreateBuffer(&bd, NULL, &pCBufferPrimaryShader);
devcon->VSSetConstantBuffers(0, 1, &pCBufferPrimaryShader);
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = 48;
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
dev->CreateBuffer(&bd, NULL, &pCBufferDistortionShader);
}
Pixel Shader:
Texture2D Texture;
SamplerState ss;
float4 PShader(float4 color : COLOR, float2 texcoord : TEXCOORD0) : SV_TARGET
{
return color * Texture.Sample(ss, texcoord);
}
顶点着色器:
cbuffer ConstantBuffer
{
float4x4 matFinal;
}
struct VOut
{
float4 color : COLOR;
float2 texcoord : TEXCOORD0;
float4 position : SV_POSITION;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR, float2 texcoord : TEXCOORD0)
{
VOut output;
output.position = mul(matFinal, position);
output.color = color;
output.texcoord = texcoord;
return output;
}
答案 0 :(得分:0)
从下面的代码中,我没有看到你如何将纹理从RenderLeft()传递给RenderRight()。你只需将backbuffer传递给RenderRight()。
RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true);
RenderRight(backbuffer, matFinalRight, viewportRight, false);
因此,结果是渲染到左视口的纹理和右视口仅显示后缓冲的颜色(绿色)。