我正在渲染2个平面,每个平面具有2个混合纹理。但是,当我将一个纹理设为空时,GPU会用其他应用程序纹理替换不存在的纹理,甚至使用前一个平面的纹理。 我在着色器中编写了一个解决方案,其中包括检测是否已启动 sampler2D textureN :
void main()
{
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
if(has_tex_1 > 0.0)
{
finalPx += texture2D(tex_1, uv_1) * 0.5;
}
if(has_tex_2 > 0.0)
{
finalPx += texture2D(tex_2, uv_2) * 0.5;
}
gl_FragColor = finalPx;
}
...但是我担心我的解决方案可能会使我以后的着色器编程变得困难,我认为webgl应该负责尊重代码逻辑:如果应用未加载纹理,则着色器将不加载任何内容。
这是我的应用程序
const PICTURES = {
indices: [
0, 1, 2,
0, 3, 1,
],
vertices: [
1.000000, -1.000000, 0.000000,
-1.000000, 1.000000, 0.000000,
-1.000000, -1.000000, 0.000000,
1.000000, 1.000000, 0.000000,
],
uv_layers: {
'uv_1': [
1.000000, 1.077309,
0.000000, -0.077309,
0.000000, 1.077309,
1.000000, -0.077309,
],
'uv_2': [
1.500000, 1.500000,
-0.500000, -0.500000,
-0.500000, 1.500000,
1.500000, -0.500000,
],
},
};
const vertexShaderCode = `
precision mediump float;
attribute vec4 vertex;
attribute vec2 uv;
varying vec2 UV;
attribute vec2 uv2;
varying vec2 UV2;
uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
void main()
{
UV = uv;
UV2 = uv2;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertex;
}`;
const fragmentShaderCode = `
precision mediump float;
varying vec2 UV;
varying vec2 UV2;
uniform sampler2D bitmap_1;
uniform sampler2D bitmap_2;
void main() {
// Mix the bitmaps in equal proportions
vec4 px1 = texture2D(bitmap_1, UV) * 0.5;
vec4 px2 = texture2D(bitmap_2, UV2) * 0.5;
gl_FragColor = px1 + px2;
}`;
function loadShader(gl, vertexShaderCode, fragmentShaderCode) {
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderCode);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(vertexShader));
gl.deleteShader(vertexShader);
return null;
}
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderCode);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(fragmentShader));
gl.deleteShader(fragmentShader);
return null;
}
return [vertexShader, fragmentShader];
}
function loadTexture(gl, url) {
function isPowerOf2(value){ return (value & (value - 1)) == 0; }
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because images have to be download over the internet they might take a moment until they are ready.
// Until then put a single pixel in the texture so we can use it immediately.
// When the image has finished downloading we'll update the texture with the contents of the image.
const level = 0;
const internalFormat= gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([1.0, 0.0, 1.0, 1.0]); // magenta to warn if there is no texture
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel);
const image = new Image();
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, image);
// WebGL1 has different requirements for power of 2 images vs non power of 2 images so check if the image is a
// power of 2 in both dimensions.
if (isPowerOf2(image.width) && isPowerOf2(image.height)) {
// Yes, it's a power of 2. Generate mips.
gl.generateMipmap(gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set wrapping to clamp to edge
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
};
image.src = url;
return texture;
}
function Material(gl, shaders) {
this.shaderProgram = gl.createProgram();
gl.attachShader(this.shaderProgram, shaders[0]); // Vertex shader
gl.attachShader(this.shaderProgram, shaders[1]); // Fragment shader
gl.linkProgram(this.shaderProgram);
if (!gl.getProgramParameter(this.shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(this.shaderProgram));
return null;
}
}
function main() {
var surface = document.getElementById('glcanvas');
var gl = surface.getContext('webgl');
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// One material for two textures maped within its own uv maps, like any 3D software could allow
let shaders = loadShader(gl,vertexShaderCode, fragmentShaderCode);
// -------------------------------------mesh 1---------------------------------------
// Create the index buffer
const indexBuffer_1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer_1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(PICTURES.indices), gl.STATIC_DRAW);
// The vertex buffer
const vertexBuffer_1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer_1);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.vertices), gl.STATIC_DRAW);
// uv layers buffers (two uv channels for two different bitmaps)
const uvBuffer_1_A = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_1_A);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_1']), gl.STATIC_DRAW);
const uvBuffer_1_B = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_1_B);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_2']), gl.STATIC_DRAW);
const material_1 = new Material(gl, shaders);
// Create the textures for the respectives uv buffers
const texture_1 = loadTexture(gl, TEXTURES_FOLDER+'old_fashioned_portrait_1.jpg');
const texture_2 = loadTexture(gl, TEXTURES_FOLDER+'demon.jpg');
// Transform data
let modelMatrix_1 = mat4.create();
mat4.translate(modelMatrix_1, modelMatrix_1, [-1.5, 0.0, 0.0]);
let meshData_1 = {
indices : PICTURES.indices,
indexBuffer : indexBuffer_1,
vertexBuffer: vertexBuffer_1,
uvBuffer_A : uvBuffer_1_A,
uvBuffer_B : uvBuffer_1_B,
material : material_1,
texture_A : texture_1,
texture_B : texture_2,
modelMatrix : modelMatrix_1,
};
// -------------------------------------mesh 2---------------------------------------
// Create the index buffer
const indexBuffer_2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer_2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(PICTURES.indices), gl.STATIC_DRAW);
// The vertex buffer
const vertexBuffer_2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer_2);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.vertices), gl.STATIC_DRAW);
// uv layers buffers (two uv channels for two different bitmaps)
const uvBuffer_2_A = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_2_A);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_1']), gl.STATIC_DRAW);
const uvBuffer_2_B = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer_2_B);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(PICTURES.uv_layers['uv_2']), gl.STATIC_DRAW);
const material_2 = new Material(gl, shaders);
const texture_3 = loadTexture(gl, TEXTURES_FOLDER+'old_fashioned_portrait_2.jpg');
const texture_4 = loadTexture(gl, TEXTURES_FOLDER+'phantom_girl.jpg');
// Transform data
let modelMatrix_2 = mat4.create();
mat4.translate(modelMatrix_2, modelMatrix_2, [1.5, 0.0, 0.0]);
let meshData_2 = {
indices : PICTURES.indices,
indexBuffer : indexBuffer_2,
vertexBuffer: vertexBuffer_2,
uvBuffer_A : uvBuffer_2_A,
uvBuffer_B : uvBuffer_2_B,
material : material_2,
texture_A : texture_3,
texture_B : texture_4,
modelMatrix : modelMatrix_2,
};
// Render data and methods
requestAnimationFrame(function () {render(gl, meshData_1, meshData_2, Date.now()*0.001);});
}
function render(gl, meshData_1, meshData_2, before)
{
var now = Date.now()*0.001;
var delta = now-before;
rotation += delta;
// ------------------------------------canvas----------------------------------------
gl.clearColor(0.0, 0.5, 0.5, 1.0);
gl.clearDepth(1.0);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LEQUAL);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fieldOfView = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 100.0;
let projectionMatrix = mat4.create();
mat4.perspective(projectionMatrix, fieldOfView, aspect, zNear, zFar);
let viewMatrix = mat4.create();
mat4.translate(viewMatrix, viewMatrix, [0.0, 0.0, -5.0]);
// -------------------------------------mesh 1---------------------------------------
gl.useProgram(meshData_1.material.shaderProgram);
// Vertices transformations
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'projectionMatrix'), false, projectionMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'viewMatrix'), false, viewMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_1.material.shaderProgram, 'modelMatrix'), false, meshData_1.modelMatrix);
// Give the GPU the order of the points to form the triangles
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, meshData_1.indexBuffer);
// Give the GPU the transformed vertices
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.vertexBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'vertex'), 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'vertex'));
// First texture mapped in the triangles with the first uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.uvBuffer_A);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, meshData_1.texture_A);
gl.uniform1i(gl.getUniformLocation(meshData_1.material.shaderProgram, 'bitmap_1'), 0);
// Second texture mapped in the triangles with the second uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_1.uvBuffer_B);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv2'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_1.material.shaderProgram, 'uv2'));
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, meshData_1.texture_B);
gl.uniform1i(gl.getUniformLocation(meshData_1.material.shaderProgram, 'bitmap_2'), 1);
gl.drawElements(gl.TRIANGLES, meshData_1.indices.length, gl.UNSIGNED_SHORT, 0);
gl.useProgram(null);
// -------------------------------------mesh 2---------------------------------------
gl.useProgram(meshData_2.material.shaderProgram);
// Vertices transformations
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'projectionMatrix'), false, projectionMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'viewMatrix'), false, viewMatrix);
gl.uniformMatrix4fv(gl.getUniformLocation(meshData_2.material.shaderProgram, 'modelMatrix'), false, meshData_2.modelMatrix);
// Give the GPU the order of the points to form the triangles
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, meshData_2.indexBuffer);
// Give the GPU the transformed vertices
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.vertexBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'vertex'), 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'vertex'));
// First texture mapped in the triangles with the first uv coordinates
在继续执行代码之前,我指出了这个问题:
如果我发表评论,则下一个纹理渲染就像应用程序(GPU?)一样,将变量'uniform sampler2D bitmap_1'与上一个平面的第一个纹理一起馈入:'old_fashioned_portrait_1.jpg'
/*
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.uvBuffer_A);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv'));
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, meshData_2.texture_A);
gl.uniform1i(gl.getUniformLocation(meshData_2.material.shaderProgram, 'bitmap_1'), 0);
*/
// Second texture mapped in the triangles with the second uv coordinates
gl.bindBuffer(gl.ARRAY_BUFFER, meshData_2.uvBuffer_B);
gl.vertexAttribPointer(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv2'), 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(meshData_2.material.shaderProgram, 'uv2'));
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, meshData_2.texture_B);
gl.uniform1i(gl.getUniformLocation(meshData_2.material.shaderProgram, 'bitmap_2'), 1);
gl.drawElements(gl.TRIANGLES, meshData_2.indices.length, gl.UNSIGNED_SHORT, 0);
gl.useProgram(null);
requestAnimationFrame(function(){render(gl, meshData_1, meshData_2, now);});
}
main();
答案 0 :(得分:1)
LJ关于WebGL是状态机的说法,或者说it has a bunch of state that stays the same until you set something different是对的,但我想补充一些内容
可以说,最好的做法是尽可能不分支到着色器。
如果您需要一个着色器来做一件事情,或者将另一个着色器做为两个着色器,那么一个着色器可以做一件事情,而另一个着色器可以做另一件事情。诸如Unreal和Unity之类的引擎会生成数千个着色器变体。甚至three.js也会产生很多着色器变化。
具有变化will break的分支和纹理查找。
该链接的重要部分是
如果与someOtherSampler相关的纹理使用任何类型的mipmapping或各向异性过滤,则任何需要在片段着色器中使用隐式导数的纹理函数均会在统一控制流之外检索未定义的结果。 ...
注意:GLSL编译器不会为此提供错误。这是完全合法的GLSL代码,并且仅基于与someOtherSampler关联的纹理和采样器对象产生未定义的行为。
换句话说,这段代码
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
if(has_tex_1 > 0.0)
{
finalPx += texture2D(tex_1, uv_1) * 0.5;
}
if(has_tex_2 > 0.0)
{
finalPx += texture2D(tex_2, uv_2) * 0.5;
}
真的需要这样
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
vec4 color1 = texture2D(tex_1, uv_1) * 0.5;
vec4 color2 = texture2D(tex_2, uv_2) * 0.5;
if(has_tex_1 > 0.0)
{
finalPx += color1;
}
if(has_tex_2 > 0.0)
{
finalPx += color2;
}
根据条件(uv_1和uv_2)使从纹理中查找事物的部分变为无条件。
您通常可以设计一个着色器,以便可以在不分支的情况下使用它。
在您的示例中,您可以这样做
vec4 finalPx = vec4(0.0, 0.0, 0.0, 1.0);
finalPx += texture2D(tex_1, uv_1) * 0.5;
finalPx += texture2D(tex_2, uv_2) * 0.5;
然后只需像这样制作单个像素的黑色纹理
const blackTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, blackTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0]));
现在,当您只想使用一种纹理时,只需将blackTexture
绑定到
其他纹理。因为它是0、0、0、0,所以着色器中的数学将增加0.0
因此不会有任何效果。不再需要has_tex_1
和has_tex_2
。
这比依赖于没有纹理的绑定要好得多 恰好是0,0,0,1,但会在浏览器中生成警告。
答案 1 :(得分:0)
WebGL是状态机,您设置的每个状态都会保留,直到您设置了其他内容为止。如果要取消绑定纹理,则必须使用:
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, null);
从未绑定的纹理单元读取的任何内容都会产生vec4(0,0,0,1)