directx中的阴影贴图,采样器深度纹理始终返回0

时间:2014-12-17 09:30:00

标签: c++ graphics 3d directx directx-11

我目前正在研究Directx 11阴影映射示例。

这是我的步骤

  • 创建2个顶点hlsl文件和2个像素hlsl文件。一个用于标准绘制,另一个用于渲染阴影到深度缓冲。
  • 创建阴影着色器depthStencilView。
  • 创建纹理并绑定:D3D11_BIND_DEPTH_STENCIL | D3D11_BIND_SHADER_RESOURCE
  • 每帧:
    • 设置新渲染目标以渲染阴影: OMSetRenderTargets(0,nullptr,m_shadowDepthStencilView.Get());
    • 清除深度缓冲区: ClearDepthStencilView(m_depthStencilView.Get(),D3D11_CLEAR_DEPTH,1.0f,0);
    • 渲染所有对象。
    • 将渲染目标重置为stander渲染目标视图: OMSetRenderTargets(1,renderTarget.GetAddressOf(),m_standarDepthStencilView.Get());
    • 将深度纹理(在最后一次绘制中更改)设置为像素着色器资源: PSSetShaderResources(1,1,m_directLightShadowResourceView.GetAddressOf());
    • 再次渲染所有对象。

除了使用:

在像素着色器中获得z值时,每件事似乎都有效

float depthValue = depthTexture.Sample(samplerClamp,projectTexCoord).r;

depthValue总是返回0,没什么特别的。 我使用visual studio图形诊断检查了我的depthTexture,没关系。

但是当我对它的纹理进行采样时,结果始终为0.

我不知道为什么,我已经坚持了很久,所以请帮助我。

请注意:

  • depthTexture格式:DXGI_FORMAT_R24G8_TYPELESS
  • shaderResourceView格式:DXGI_FORMAT_R24_UNORM_X8_TYPELESS

这是我的hlsl文件

标准顶点着色器



/////////////////////
// CONSTANT BUFFER
cbuffer worldMatrixBuffer : register(b0)
{
	float4x4 worldMatrix[10];
};

cbuffer cameraBuffer : register(b1)
{
	float4x4 viewProjectionMatrix;
};

cbuffer directLightBuffer : register(b2)
{
	float4x4 directLightViewProjectionMatrix;
};


////////////////////////////
// INPUT VERTEX STRUCTURE
struct VIn
{
	float4 position : POSITION;
	float4 normal : NORMAL;
	float worldMatrixIndex : TEXCOORD0;
	float2 texcoord : TEXCOORD1;
};


//////////////////////
// OUTPUT STRUCTURE
struct VOut
{
	float3 normal : NORMAL;
	float4 position : SV_POSITION;
	float2 texcoord : TEXCOORD0;
	float4 lightViewPosition : TEXCOORD1;
};


////////////////////////
// MAIN VERTEX SHADER
VOut main(VIn vertex)
{
	// Create output struct
	VOut output;

	// Change the position vector to be 4 units for proper matrix calculations.
	vertex.position.w = 1.0f;

	// Get world-view-projection matrix
	float4x4 currentWorldMatrix = worldMatrix[vertex.worldMatrixIndex];

	// vertex form object space to world-light-view space
	output.lightViewPosition = mul(currentWorldMatrix, vertex.position);
	output.lightViewPosition = mul(directLightViewProjectionMatrix, output.lightViewPosition);

	// vertex form object space to world-camera space;
	output.position = mul(currentWorldMatrix, vertex.position);
	output.position = mul(viewProjectionMatrix, output.position);

	// transform normal vector from object-space to world space
	output.normal = mul((float3x3)currentWorldMatrix,	// convert to matrix 3x3 is important because
														// we don't want to translate normal vector
		vertex.normal);

	// we need to normalize the normal vector to preform next calculation
	output.normal = normalize(output.normal);

	// Set texture coordinate
	output.texcoord = vertex.texcoord;

	// Return output values
	return output;
}




标准像素着色器



/////////////////////
// CONSTANT BUFFER
cbuffer lightBuffer : register(b0)
{
	// Light information
	float4 lightDirection;
	float4 diffuseColor;
	float4 ambientColor;
};


//////////////////////
// INPUT STRUCTURE
struct PIn
{
	float3 normal : NORMAL;
	float4 position : SV_POSITION;
	float2 texcoord : TEXCOORD0;
	float4 lightViewPosition : TEXCOORD1;
};


//////////////////////
// TEXTURE RESOURCE
Texture2D objectTexture : register(t0);	// Register(t...) tell shader that this variable
										// retrive data from texture register 0.
Texture2D depthTexture : register(t1);


/////////////
// SAMPLER
SamplerState samplerWrap : register(s0);
SamplerState samplerClamp : register(s1);


///////////////////////
// MAIN PIXEL SHADER
float4 main(PIn pixel) : SV_TARGET
{
	//----------------------------------------------------------------------------------
	// Set the bias value for fixing the floating point precision issues.
	float bias = 0.001f;

	// set minimum color to ambient color
	float4 color = ambientColor;

	// Calculate the projected texture coordinates.
	float2 projectTexCoord;
	projectTexCoord.x = pixel.lightViewPosition.x / pixel.lightViewPosition.w / 2.0f + 0.5f;
	projectTexCoord.y = -pixel.lightViewPosition.y / pixel.lightViewPosition.w / 2.0f + 0.5f;
	// By divine x and y to w we convert it lightViewPosition to uv coordinate from -1 to 1
	// convert it to 0 -> 1 coordinate simply by divine it to 2 and add 0.5

	if (saturate(projectTexCoord.x) == projectTexCoord.x
		&& saturate(projectTexCoord.y) == projectTexCoord.y)
	{
		// Sample the shadow map depth value from the depth texture using the sampler at
		// the projected texture coordinate location.
		float depthValue = depthTexture.Sample(samplerClamp, projectTexCoord).r;

		// Calculate the depth of the light.
		float lightDepthValue = pixel.lightViewPosition.z / pixel.lightViewPosition.w;

		// Subtract the bias from the lightDepthValue.
		lightDepthValue = lightDepthValue - bias;

		// Compare the depth of the shadow map value and the depth of the light to determine
		// whether to shadow or to light this pixel. If the light is in front of the object
		// then light the pixel, if not then shadow this pixel since an object (occluder)
		// is casting a shadow on it.
		if (lightDepthValue > depthValue)
		{
			float4 normalLightDir = normalize(lightDirection);
				float diffuseBrigtness = saturate(			// force result to between 0 and 1
				dot(pixel.normal, normalLightDir));			// dot product of normal vector and 
			// light direction

			// add diffuse light to output color
			color += diffuseColor * diffuseBrigtness;
		}
	}

	// Multiply final color with it's texture color
	if (pixel.texcoord.x > 1)
	{
		return color * (1, 1, 1, 1);
	}
	else
	{
		return color * objectTexture.Sample(samplerWrap, pixel.texcoord);
	}
}




阴影顶点着色器



/////////////////////
// CONSTANT BUFFER
cbuffer worldMatrixBuffer : register(b0)
{
	float4x4 worldMatrix[10];
};

cbuffer directLightBuffer : register(b2)
{
	float4x4 directLightViewProjectionMatrix;
};


////////////////////////////
// INPUT VERTEX STRUCTURE
struct VIn
{
	float4 position : POSITION;
	float4 normal : NORMAL;
	float worldMatrixIndex : TEXCOORD0;
	float2 texcoord : TEXCOORD1;
};


//////////////////////
// OUTPUT STRUCTURE
struct VOut
{
	float4 position : SV_POSITION;
};


////////////////////////
// MAIN VERTEX SHADER
VOut main(VIn vertex)
{
	// Create output struct
	VOut output;

	// Get world-view-projection matrix
	float4x4 currentWorldMatrix = worldMatrix[vertex.worldMatrixIndex];

	// vertex form object space to world-camera space
	output.position = mul(currentWorldMatrix, vertex.position);
	output.position = mul(directLightViewProjectionMatrix, output.position);

	return output;
}




它的像素着色器,什么都不做



////////////////////////
// PIXEL INPUT STRUCT
struct PIn
{
	float4 position : SV_POSITION;
};

float4 main(PIn pixel) : SV_TARGET
{
	return float4(1.0f, 1.0f, 1.0f, 1.0f);
}




1 个答案:

答案 0 :(得分:0)

对不起每一个人,我已经弄清楚了。我觉得很蠢。

  • 我无法读取深度缓冲区的原因是因为我在OMSetRenderTarget之前使用PSSetShaderResource。这意味着我的深度缓冲区仍然绑定到OM状态,因此无法绑定到PS状态。

  • 我只需要将代码移动几行并且每件事都能正常工作

对于那些在我的问题上浪费时间的人,我感到非常抱歉。