编辑: 事后看来,这些图像可能是正确的,因为它只是显示矢量差异,所以假设它是正确的,问题实际上是关于BRDF的代码中的某个地方。我添加了完整的着色器代码,我附上了一个新的屏幕截图,显示了我看到的工件。它似乎在某些角度过度饱和.. 问题可能在分发中。我也尝试了一个贝克曼分布模型,它显示了相同类型的问题..
看到这里,光源从地形向下移动......它在右侧过度饱和..
我在顶点着色器中计算方向时遇到一些问题,方向偏向一个角(原点) 我使用实例创建地形,但如果我只使用静态平面,则会出现同样的问题。
我的顶点着色器看起来像这样(使用ogre3d)
# version 330 compatibility
# define MAP_HEIGHT_FACTOR 50000
# define MAP_SCALE_FACTOR 100
#
// attributes
in vec4 blendIndices;
in vec4 uv0;
in vec4 uv1;
in vec4 uv2;
in vec4 position;
in vec2 vtx_texcoord0;
uniform mat4 viewProjMatrix;
uniform mat4 modelMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 worldMatrix;
uniform vec3 cameraPosition;
uniform vec3 sunPosition;
out vec4 vtxPosWorld;
out vec3 lightDirection;
out vec3 viewVector;
uniform sampler2D heightmap;
uniform mat4 worldViewProjMatrix;
void main()
{
vec4 vtxPosWorld = vec4((gl_Vertex.x * MAP_SCALE_FACTOR) + uv0.w,
(gl_Vertex.y * MAP_SCALE_FACTOR) + uv1.w,
(gl_Vertex.z * MAP_SCALE_FACTOR) + uv2.w,
1.0 ) * worldMatrix;
l_texcoord0 = vec2((vtxPosWorld.x)/(8192*MAP_SCALE_FACTOR), (vtxPosWorld.z)/(8192*MAP_SCALE_FACTOR));
vec4 hmt = texture(heightmap, l_texcoord0);
height = (hmt.x * MAP_HEIGHT_FACTOR);
// take the height from the heightmap
vtxPosWorld = vec4(vtxPosWorld.x, height, vtxPosWorld.z, vtxPosWorld.w);
lightDirection = vec4(normalize(vec4(sunPosition,1.0)) * viewMatrix).xyz;
viewVector = normalize((vec4(cameraPosition,1.0)*viewMatrix).xyz-(vtxPosWorld*viewMatrix).xyz);
l_Position = worldViewProjMatrix * vtxPosWorld;
}
片段着色器。
#version 330 compatibility
#define TERRAIN_SIZE 8192.0
#define HEIGHT_SCALE_FACTOR 50000
#define MAP_SCALE_FACTOR 100
#define M_PI 3.1415926535897932384626433832795
in vec2 l_texcoord0;
in vec4 vtxPosWorld;
in vec3 viewVector;
uniform vec3 sunPosition;
uniform vec3 cameraPosition;
uniform sampler2D heightmap;
float G1V(float dotP, float k)
{
return 1.0f/(dotP*(1.0f-k)+k);
}
float calcBRDF(vec3 normal, float fresnel, float MFD, vec3 sunColor) {
float F = fresnel;
vec3 Nn = normalize(normal.xyz);
vec3 Vn = viewVector;
vec3 Ln = lightDirection;
vec3 Hn = normalize(viewVector + lightDirection);
float NdotV = max(dot(Nn,Vn),0.0);
float NdotL = max(dot(Nn,Ln),0.0);
float NdotH = max(dot(Nn,Hn),0.1);
float VdotH = max(dot(Vn,Hn),0.0);
float LdotH = max(dot(Ln,Hn),0.0);
// Microfacet Distribution
float denom, alpha, beckmannD, GGXD;
float NdotHSqr = NdotH * NdotH;
float alphaSqr = MFD*MFD;
// GGX distribution (better performance)
denom = NdotHSqr * ( alphaSqr-1.0 ) + 1.0f;
GGXD = alphaSqr/(M_PI * pow(denom,2));
float k = MFD/2.0f;
float GGX = G1V(NdotL,k) * G1V(NdotV,k);
return GGXSpecular = F * GGXD * GGX;
}
float calcFresnel(float R) {
vec3 Hn = normalize(viewVector + lightDirection);
vec3 Vn = viewVector;
vec3 Ln = lightDirection;
float VdotH = dot(Vn,Hn);
float NdotL = dot(Hn,Vn);
float fresnel = R + (1-R)*pow((1-NdotL),5);
return fresnel;
}
vec3 calcNormal(sampler2D heightmap, vec2 texcoord) {
const vec2 size = vec2(MAP_SCALE_FACTOR,0.0);
vec3 off = ivec3(1,0,1)/TERRAIN_SIZE;
float hL = texture2D(heightmap, texcoord - off.xy).x*HEIGHT_SCALE_FACTOR;
float hR = texture2D(heightmap, texcoord + off.xy).x*HEIGHT_SCALE_FACTOR;
float hD = texture2D(heightmap, texcoord - off.yz).x*HEIGHT_SCALE_FACTOR;
float hU = texture2D(heightmap, texcoord + off.yz).x*HEIGHT_SCALE_FACTOR;
vec3 va = normalize(vec3(size.xy,(hL-hR)));
vec3 vb = normalize(vec3(size.yx,(hD-hU)));
return vec3(1.0,1.0,1.0);
return normalize(cross(va,vb)/2 + 0.5);
}
void main()
{
vec3 normal = calcNormal(heightmap, l_texcoord0);
float N = 1.69;
float microFacetDistribution = 1.5;
vec3 sunColor = vec3(1.0,1.0,1.0);
float Rfactor = calcFresnelReflectance(N);
float fresnel = calcFresnel(Rfactor);
float brdf = calcBRDF(normal,fresnel,microFacetDistribution,sunColor);
float conservedBrdf = clamp(brdf,0.0,fresnel);
gl_FragColor.rgb = vec4(0.5,0.5,0.5,1.0)*conservedBrdf;
}
我尝试过使用视图空间,世界空间等。这似乎是一个简单/愚蠢的问题,但我无法弄清楚: 任何建议赞赏..
答案 0 :(得分:0)
当然答案是愚蠢的。 首先,法线不正确。这导致了光线方向的歪斜,使得光线似乎只在一个方向上击中。
其次,光方向本身需要被否定。