如何在webgl中使用framebuffers?

时间:2017-01-24 09:34:13

标签: javascript webgl

我一直在尝试理解WebGL / OpenGL-ES中的framebuffer。 我知道我们可以使用framebuffer混合多个纹理。

因此,要理解我通过采用1 * 1纹理编写了一个示例,并尝试在其上应用帧缓冲逻辑。 但是,它没有用。

查看底部的片段,如果点击“混合红色和蓝色”,图像不会被渲染,我做错了什么?

代码: `

var canvas, gl, attrPosition, texture, program, vertexBuffer, textureBuffer, vertices, texVertices, attrPos, attrTexPos, textures = [], framebuffers = [];

canvas = document.getElementById('canvas');
    gl = getWebGL();
    vertices = new Float32Array([
        -1.0, -1.0,  
         1.0, -1.0, 
         1.0,  1.0, 
        -1.0,  1.0, 
        -1.0, -1.0, 
    ]);

    texVertices = new Float32Array([
        0.0, 0.0,
        1.0, 0.0,
        1.0, 1.0,
        0.0, 1.0,
        0.0, 0.0
    ]);
    var getProgram = function () {
        var vs = createVertexShader([
            'attribute vec2 attrPos;',
            'attribute vec2 attrTexPos;',
            'varying highp vec2 vTexCoord;',
            'void main() {',
                '\tgl_Position = vec4(attrPos, 0.0, 1.0);',
            '}'
        ].join('\n'));

        var fs = createFragmentShader([
            'varying highp vec2 vTexCoord;',
            'uniform sampler2D uImage;',
            'void main() {',
                '\tgl_FragColor = texture2D(uImage, vTexCoord);',
            '}'
        ].join('\n'));
        return createAndLinkPrograms(vs, fs);
    };

    var render = function () {
        gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
        gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
        gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
        gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
        gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
    };
    if (gl) {
        gl.clearColor(0.1, 0.5, 1.0, 1.0);
        render();
        program = getProgram();
        texture = createAndSetupTexture();
        vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
        attrPos = gl.getUniformLocation(program, 'attrPos');
        gl.enableVertexAttribArray(attrPos);

        textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
        attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
        gl.enableVertexAttribArray(attrTexPos);

        gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
        render();
    }

    var initPingPongTextures = function(textures, framebuffers) {
        for (var i = 0; i < 2; ++i) {
            var tex = createAndSetupTexture(gl);
            textures.push(tex);
            // make the texture the same size as the image
            gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
            // Create a framebuffer
            var fbo = gl.createFramebuffer();
            framebuffers.push(fbo);
            gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
            // Attach a texture to it.
            gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
        }
    }

    var setFramebuffer = function(fbo, width, height) {
        gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
        gl.viewport(0, 0, width, height);
    };

    var mixRedAndBlue = function () {
        gl.activeTexture(gl.TEXTURE0);
        gl.bindTexture(gl.TEXTURE_2D, texture);

        setFramebuffer(framebuffers[0], 1, 1);
        gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
        render();
        gl.bindTexture(gl.TEXTURE_2D, textures[0]);

        setFramebuffer(framebuffers[1], 1, 1);
        gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
        render();
        gl.bindTexture(gl.TEXTURE_2D, textures[1]);

        setFramebuffer(null, 1, 1);
        render();
    };`

var getWebGLContext = function(canvas) {
	var webglContextParams = ['webgl', 'experimental-webgl', 'webkit-3d', 'moz-webgl'];
	var webglContext = null;
	for (var index = 0; index < webglContextParams.length; index++) {
		try {
			webglContext = canvas.getContext(webglContextParams[index]);
			if(webglContext) {
				//breaking as we got our context
				break;
			}
		} catch (E) {
			console.log(E);
		}
	}
	if(webglContext === null) {
		alert('WebGL is not supported on your browser.');
	} else {
		//WebGL is supported in your browser, lets render the texture
	}
	fillGLForCleanUp(webglContext);
	return webglContext;
}
var createVertexShader = function (vertexShaderSource) {
	console.log(vertexShaderSource);
	var vertexShader = gl.createShader(gl.VERTEX_SHADER);
	gl.shaderSource(vertexShader, vertexShaderSource);
	gl.compileShader(vertexShader);
	return vertexShader;
}

var createFragmentShader = function (fragmentShaderSource) {
	console.log(fragmentShaderSource);
	var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
	gl.shaderSource(fragmentShader, fragmentShaderSource);
	gl.compileShader(fragmentShader);
	return fragmentShader;
}


var createAndLinkPrograms = function (vertexShader, fragmentShader) {
	var program = gl.createProgram();
	gl.attachShader(program, vertexShader);
	gl.attachShader(program, fragmentShader);
	gl.linkProgram(program);
    if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        alert('Could not initialise shaders');
    }
	gl.useProgram(program);
	return program;
}

var createAndBindBuffer = function (verticesOrIndices, bufferType) {
	var buffer = gl.createBuffer();
	gl.bindBuffer(bufferType, buffer);
	gl.bufferData(bufferType, verticesOrIndices, gl.STATIC_DRAW);
	//clear memory
	gl.bindBuffer(bufferType, null);
	return buffer;
}

var allowAllImageSizes = function() {
	  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
	  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
	  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
	  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
	 // gl.bindTexture(gl.TEXTURE_2D, null);
} 

var createAndSetupTexture = function() {
	var texture = gl.createTexture();
	gl.bindTexture(gl.TEXTURE_2D, texture);
	allowAllImageSizes();
	gl.textures.push(texture);
	return texture;
}

var getWebGL = function (canvas, width, height) {
	if(!canvas) {
		canvas = document.createElement('canvas');
		canvas.id = 'canvas';
		canvas.width = !width ? 512 : width;
		canvas.height = !height ? 512 : height;
		document.body.appendChild(canvas);
	} else {
		canvas.width = !width ? 512 : width;
		canvas.height = !height ? 512 : height;
	}
	return getWebGLContext(canvas);
}

var fillGLForCleanUp = function (gl) {
	gl.textures = [];
	gl.framebuffers = [];
	gl.array_buffer = [];
	gl.element_array_buffers = [];
}


var canvas, gl, attrPosition, texture, program, 
vertexBuffer, textureBuffer, vertices, texVertices,
attrPos, attrTexPos, textures = [], framebuffers = [];
canvas = document.getElementById('canvas');
gl = getWebGL(canvas);
vertices = new Float32Array([
	-1.0, -1.0,  
	 1.0, -1.0, 
	 1.0,  1.0, 
	-1.0,  1.0, 
	-1.0, -1.0, 
]);

texVertices = new Float32Array([
	0.0, 0.0,
	1.0, 0.0,
	1.0, 1.0,
	0.0, 1.0,
	0.0, 0.0
]);
var getProgram = function () {
	var vs = createVertexShader([
		'attribute vec2 attrPos;',
		'attribute vec2 attrTexPos;',
		'varying highp vec2 vTexCoord;',
		'void main() {',
			'\tgl_Position = vec4(attrPos, 0.0, 1.0);',
		'}'
	].join('\n'));

	var fs = createFragmentShader([
		'varying highp vec2 vTexCoord;',
		'uniform sampler2D uImage;',
		'void main() {',
			'\tgl_FragColor = texture2D(uImage, vTexCoord);',
		'}'
	].join('\n'));
	return createAndLinkPrograms(vs, fs);
};

var render = function () {
	gl.clear(gl.DEPTH_BUFFER_BIT|gl.COLOR_BUFFER_BIT);
	gl.bindTexture(gl.TEXTURE_2D, texture);
	gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
	gl.vertexAttribPointer(attrPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
	gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
	gl.vertexAttribPointer(attrTexPos, 2, gl.FLOAT, gl.FALSE, 0, 0);
	gl.drawArrays(gl.TRIANGLE_STRIP, 0, 5);
};
if (gl) {
	gl.clearColor(0.1, 0.5, 1.0, 1.0);
	render();
	program = getProgram();
	texture = createAndSetupTexture();
	vertexBuffer = createAndBindBuffer(vertices, gl.ARRAY_BUFFER);
	attrPos = gl.getUniformLocation(program, 'attrPos');
	gl.enableVertexAttribArray(attrPos);

	textureBuffer = createAndBindBuffer(texVertices, gl.ARRAY_BUFFER);
	attrTexPos = gl.getUniformLocation(program, 'attrTexPos');
	gl.enableVertexAttribArray(attrTexPos);

	gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([123, 0, 60, 255]));
	render();
}

var initPingPongTextures = function(textures, framebuffers) {
	for (var i = 0; i < 2; ++i) {
		var tex = createAndSetupTexture(gl);
		textures.push(tex);
		// make the texture the same size as the image
		gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
		// Create a framebuffer
		var fbo = gl.createFramebuffer();
		framebuffers.push(fbo);
		gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
		// Attach a texture to it.
		gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
	}
}

var setFramebuffer = function(fbo, width, height) {
	gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
	gl.viewport(0, 0, width, height);
};

var mixRedAndBlue = function () {
	gl.activeTexture(gl.TEXTURE0);
	gl.bindTexture(gl.TEXTURE_2D, texture);

	setFramebuffer(framebuffers[0], 1, 1);
	gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([255, 0, 0, 255]));
	render();
	gl.bindTexture(gl.TEXTURE_2D, textures[0]);

	setFramebuffer(framebuffers[1], 1, 1);
	gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 255, 0, 255]));
	render();
	gl.bindTexture(gl.TEXTURE_2D, textures[1]);

	setFramebuffer(null, 1, 1);
	render();
};
<button id="redImg" onclick="mixRedAndBlue()">Mix Red and blue</button><hr/>
<canvas id="canvas" width=512 height=512></canvas>

修改1:

我正在尝试使用多个片段着色器实现相同的多个程序,因为不建议在片段着色器中使用if / else语句,因为它针对每个像素运行。

`
Shaders.prototype.VS_Base = [             '属性vec3 verticesPosition;',             '属性vec2 texturePosition;',             '变高的vep2 vTextureCoord;',             'void main(void){',             '\ ttl_Position = vec4(verticesPosition * vec3(1.0,-1.0,1.0),0.5);',             '\ tvTextureCoord = texturePosition;',             '}'     ]。加入( '\ n');

Shaders.prototype.FS_Base_Image_RED = [
        '#ifdef GL_ES',
        'precision highp float;',
        '#endif',
        'uniform sampler2D uImage;',
        'varying highp vec2 vTextureCoord;',
        'void main (void) {',
        '\tgl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);//texture2D(uImage, vTextureCoord);',
        '}'
].join('\n');

Shaders.prototype.FS_Base_Image_BLUE = [
        '#ifdef GL_ES',
        'precision highp float;',
        '#endif',
        'uniform sampler2D uImage;',
        'varying highp vec2 vTextureCoord;',
        'void main (void) {',
        '\tgl_FragColor = vec4(0.0, 0.0, 1.0, 1.0);//texture2D(uImage, vTextureCoord);',
        '}'
].join('\n');`

现在我有两个独立的程序用于片段着色器,我需要使用framebuffers来混合Red和Blue。我不是在寻找mix(),因为实际情况非常复杂,这就是我使用带有片段着色器的多个程序来避免条件if / else语句的原因。

2 个答案:

答案 0 :(得分:4)

我们不清楚你要做什么。 Framebuffers只是一个附件列表(纹理和渲染缓冲区)。您可以使用它们渲染纹理和/或渲染缓冲区。然后,您可以使用刚刚渲染的纹理作为其他渲染的输入。

这是一个没有帧缓冲区的例子。它融合了2种纹理。

&#13;
&#13;
var vs = `
attribute vec4 position;

varying vec2 v_texcoord;

void main() {
  gl_Position = position;
  v_texcoord = position.xy * .5 + .5;
}
`;

var fs = `
precision mediump float;

varying vec2 v_texcoord;

uniform sampler2D tex1;
uniform sampler2D tex2;

void main() {
  vec4 color1 = texture2D(tex1, v_texcoord);
  vec4 color2 = texture2D(tex2, v_texcoord);
  gl_FragColor = mix(color1, color2, 0.5);
}
`;

const gl = document.querySelector("canvas").getContext("webgl");
const program = twgl.createProgramFromSources(gl, [vs, fs]);

// make 2 textures with canvas 2d
const ctx = document.createElement("canvas").getContext("2d");
ctx.canvas.width = 64;
ctx.canvas.height = 64;

// first texture has a circle
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, 64, 64);
ctx.strokeStyle = "yellow";
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2, false);
ctx.lineWidth = 12;
ctx.stroke();

const tex1 = createTextureFromCanvas(gl, ctx.canvas);

// second texture has a diamond (diagonal square)
ctx.fillStyle = "red";
ctx.fillRect(0, 0, 64, 64);
ctx.fillStyle = "cyan";
ctx.beginPath();
ctx.moveTo(32, 6);
ctx.lineTo(58, 32);
ctx.lineTo(32, 58);
ctx.lineTo(6, 32);
ctx.lineTo(32, 6);
ctx.fill();

const tex2 = createTextureFromCanvas(gl, ctx.canvas);

const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  -1, -1,
   1, -1,
  -1,  1,
  -1,  1,
   1, -1,
   1,  1,
]), gl.STATIC_DRAW);


const positionLoc = gl.getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);

const tex1Loc = gl.getUniformLocation(program, "tex1");  
const tex2Loc = gl.getUniformLocation(program, "tex2");  
  
gl.useProgram(program);

gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, tex1);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, tex2);

gl.drawArrays(gl.TRIANGLES, 0, 6);

function createTextureFromCanvas(gl, canvas) {
  const tex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, tex);
  gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, ctx.canvas);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  return tex;
}
&#13;
canvas { border: 1px solid black; }
&#13;
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
&#13;
&#13;
&#13;

为了您的目的,混合部分没有区别,唯一的区别在于纹理的来源。纹理上方是使用2d画布创建的。相反,您可以使用帧缓冲渲染到纹理。 AFTER 您已经渲染到纹理,然后可以像上面一样在其他渲染中使用该纹理。

首先渲染纹理,然后创建一个帧缓冲区

var fb = gl.createFramebuffer();

然后你附上一个纹理

gl.bindFramebuffer(gl.FRAMEBUFFER, fb);  
gl.framebufferTexture2D(
    gl.FRAMEBUFFER, 
    gl.COLOR_ATTACHMENT0,  // attach texture as COLOR_ATTACHMENT0
    gl.TEXTURE_2D,         // attach a 2D texture
    someTexture,           // the texture to attach
    0);                    // the mip level to render to (must be 0 in WebGL1)

根据您的附件,您应该检查它们是否有效。

if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
  // these attachments don't work
}

WebGL规范列出了3种保证可以正常工作的附件组合。以下示例使用其中一个,因此无需检查

现在如果你绑定了framebuffer

gl.bindFramebuffer(gl.FRAMEBUFFER, fb);

然后,当您调用任何gl.drawXXX函数或gl.clear时,它将绘制到someTexture而不是画布。要再次开始绘制到画布,请绑定null

gl.bindFramebuffer(gl.FRAMEBUFFER, null);

请记住,如果画布和纹理的大小不同,您需要调用gl.viewport才能正确呈现

&#13;
&#13;
var vs = `
attribute vec4 position;

uniform mat4 matrix;

varying vec2 v_texcoord;

void main() {
  gl_Position = matrix * position;
  v_texcoord = position.xy * .5 + .5;
}
`;

var colorFS = `
precision mediump float;

uniform vec4 color;

void main() {
  gl_FragColor = color;
}
`;

var mixFS = `
precision mediump float;

varying vec2 v_texcoord;

uniform sampler2D tex1;
uniform sampler2D tex2;

void main() {
  // probably should use different texture coords for each
  // texture for more flexibility but I'm lazy
  vec4 color1 = texture2D(tex1, v_texcoord);
  vec4 color2 = texture2D(tex2, v_texcoord);
  gl_FragColor = mix(color1, color2, 0.5);
}
`;

const gl = document.querySelector("canvas").getContext("webgl");
const colorProgram = twgl.createProgramFromSources(gl, [vs, colorFS]);
const mixProgram = twgl.createProgramFromSources(gl, [vs, mixFS]);

// make 2 textures by attaching them to framebuffers and rendering to them
const texFbPair1 = createTextureAndFramebuffer(gl, 64, 64);
const texFbPair2 = createTextureAndFramebuffer(gl, 64, 64);

const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  -1, -1,
   1, -1,
  -1,  1,
  -1,  1,
   1, -1,
   1,  1,
]), gl.STATIC_DRAW);

function setAttributes(buf, positionLoc) {
  gl.enableVertexAttribArray(positionLoc);
  gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
}
  
const colorPrgPositionLoc = gl.getAttribLocation(colorProgram, "position");
setAttributes(buf, colorPrgPositionLoc);
const colorLoc = gl.getUniformLocation(colorProgram, "color");
const colorProgMatrixLoc = gl.getUniformLocation(colorProgram, "matrix");

// draw red rect to first texture through the framebuffer it's attached to
gl.useProgram(colorProgram);
  
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair1.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [1, 0, 0, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
  0.5, 0, 0, 0,
    0,.25, 0, 0,
    0, 0, 1, 0,
   .2,.3, 0, 1,
]);

gl.drawArrays(gl.TRIANGLES, 0, 6);

// Draw a blue rect to the second texture through the framebuffer it's attached to
gl.bindFramebuffer(gl.FRAMEBUFFER, texFbPair2.fb);
gl.viewport(0, 0, 64, 64);
gl.uniform4fv(colorLoc, [0, 0, 1, 1]);
gl.uniformMatrix4fv(colorProgMatrixLoc, false, [
  0.25, 0, 0, 0,
    0,.5, 0, 0,
    0, 0, 1, 0,
   .2,.3, 0, 1,
]);

gl.drawArrays(gl.TRIANGLES, 0, 6);

// Draw both textures to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
  
const mixPrgPositionLoc = gl.getAttribLocation(mixProgram, "position");
setAttributes(buf, mixPrgPositionLoc);
  
const mixProgMatrixLoc = gl.getUniformLocation(mixProgram, "matrix");

const tex1Loc = gl.getUniformLocation(mixProgram, "tex1");  
const tex2Loc = gl.getUniformLocation(mixProgram, "tex2");  
  
gl.useProgram(mixProgram);

gl.uniform1i(tex1Loc, 0);
gl.uniform1i(tex2Loc, 1);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texFbPair1.tex);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texFbPair2.tex);
gl.uniformMatrix4fv(mixProgMatrixLoc, false, [
    1, 0, 0, 0,
    0, 1, 0, 0,
    0, 0, 1, 0,
    0, 0, 0, 1,
]);  

gl.drawArrays(gl.TRIANGLES, 0, 6);

function createTextureAndFramebuffer(gl, width, height) {
  const tex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, tex);
  gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  const fb = gl.createFramebuffer();
  gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
  gl.framebufferTexture2D(
     gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
  return {tex: tex, fb: fb};
}
&#13;
canvas { border: 1px solid black; }
&#13;
<script src="https://twgljs.org/dist/2.x/twgl.min.js"></script>
<canvas></canvas>
&#13;
&#13;
&#13;

第一个程序和第二个程序之间唯一的功能区别是纹理如何获取数据。在第一个示例中,纹理从画布2d获取其数据。在第二个例子中,纹理通过使用WebGL渲染它们来获取数据。

至于为什么你的例子没有混合纹理,为了混合2个纹理你需要一个使用两个纹理的着色器。

答案 1 :(得分:-2)

通过使用以下代码,使自己变得容易。看这个例子,祝你好运!

<html>
<body>
<canvas id="myCanvas" width="400" height="400">
</canvas>
<script src="https://www.drawing3d.de/bridge.js"></script>
<script src="https://www.drawing3d.de/WebDrawing3d.js"></script>
<script src="https://www.drawing3d.de/Classes.js"></script>
<script>   
 var canvas = document.getElementById("myCanvas");
 var WebDevice = new Device(canvas);
 WebDevice.BackGround=Color.WhiteSmoke;
 var fbu = new FrameBuffer(WebDevice);
 fbu.BackGround = Color.Red;
 fbu.Init(WebDevice.ViewPortHeight);
 fbu.StartWriting();
 WebDevice.drawSphere(10); // draw something
 fbu.EndWriting();
 var _texture =fbu.FBOTexture;
 WebDevice.ActivateTexture(_texture); // set the texture
 WebDevice.Paint = draw;
 WebDevice.Refresh();
 function draw() {
 WebDevice.drawBox(new xyz(-10,-2,0), new xyz(10,10,1), new xyz(1,1,1));
 }
 </script>
 </body>
 </html>