WebGL Droste效果

时间:2019-07-01 18:55:36

标签: webgl

我正在尝试使用WebGL在多维数据集的表面上实现Droste effect。视口中只有一个网格,一个立方体,并且其所有面共享相同的纹理。为了实现Droste效果,我在每个帧上更新了纹理,实际上我只是拍摄了要绘制其WebGL上下文的canvas的快照,随着快照越来越多地包含越来越多的快照,随着时间的推移,将导致Droste效果。嵌套过去的帧。

这里有一个我现在正在操作的演示:

https://tomashubelbauer.github.io/webgl-op-1/?cubeTextured

有问题的代码如下:

// Set up fragment and vertex shader and attach them to a program, link the program
// Create a vertex buffer, an index buffer and a texture coordinate buffer
// Tesselate the cube's vertices and fill in the index and texture coordinate buffers
const textureCanvas = document.createElement('canvas');
textureCanvas.width = 256;
textureCanvas.height = 256;
const textureContext = textureCanvas.getContext('2d');

// In every `requestAnimationFrame`:
textureContext.drawImage(context.canvas, 0, 0);
const texture = context.createTexture();
context.bindTexture(context.TEXTURE_2D, texture);
context.texImage2D(context.TEXTURE_2D, 0, context.RGBA, context.RGBA, context.UNSIGNED_BYTE, textureCanvas);
context.generateMipmap(context.TEXTURE_2D);
// Clear the viewport completely (depth and color buffers)
// Set up attribute and uniform values, the projection and model view matrices
context.activeTexture(context.TEXTURE0);
context.bindTexture(context.TEXTURE_2D, texture);
context.uniform1i(fragmentShaderTextureSamplerUniformLocation, 0);
context.drawElements(context.TRIANGLES, 36, context.UNSIGNED_SHORT, 0)

以上是全部内容,有一个与WebGL不同的画布,它在每个WebGL框架之前在其上绘制了WebGL画布,然后使用该画布为给定的框架和根据纹理坐标缓冲区和提供给片段着色器(仅使用gl_FragColor = texture2D(textureSampler, textureCoordinate)的片段采样器提供的纹理采样器制服),将纹理应用于立方体的表面。

但这太慢了(在一个具有一个立方体网格的简单演示中,速度降低了30 FPS,而我的所有其他演示都以更高的Tris数量级仍然超过了60 FPS requestAnimationFrame上限)。

当我觉得单独使用WebGL应该可以实现时,通过使用外部画布在WebGL的“外部”进行操作也很奇怪。

我知道WebGL保留两个缓冲区,一个用于活动帧,一个用于最近绘制的帧的后缓冲区,并且这两个与每个帧交换以实现即时屏幕更新。是否可以点击此后备缓冲区并将其用作纹理?您能否提供示例代码来说明如何实现?

1 个答案:

答案 0 :(得分:1)

来自this article

通常的方法是通过将纹理附加到帧缓冲区来渲染纹理。

 service = client.connect(
        host=host,
        port=port,
        username=username,
        password=password,
        verify=False
    )   

现在可以渲染到纹理

const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
    gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0 /* level */) 

渲染到画布

gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, textureWidth, textureHeight);

要执行所需的操作,您需要2个纹理,因为您无法同时读取和写入相同的纹理,因此您只能说

  • 将图像绘制到TextureA
  • 将前一帧(TextureB)绘制到TextureA
  • 使用TextureA将多维数据集绘制到TextureB
  • 在画布上绘制TextureB

gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
"use strict";

function main() {
  const m4 = twgl.m4;
  const gl = document.querySelector('canvas').getContext('webgl')

  const vs = `
  attribute vec4 position;
  attribute vec2 texcoord;
  uniform mat4 u_matrix;
  varying vec2 v_texcoord;
  void main() {
    gl_Position = u_matrix * position;
    v_texcoord = texcoord;
  }
  `;
  
  const fs = `
  precision mediump float;
  varying vec2 v_texcoord;
  uniform sampler2D u_tex;
  void main() {
    gl_FragColor = texture2D(u_tex, v_texcoord);
  }
  `;
  
  // compile shaders, link program, look up locations
  const programInfo = twgl.createProgramInfo(gl, [vs, fs]);

  // gl.createBuffer, gl.bufferData for positions and texcoords of a cube
  const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
  // gl.createBuffer, gl.bufferData for positions and texcoords of a quad
  const quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl, 2);

  // all the normal stuff for setting up a texture
  const imageTexture = twgl.createTexture(gl, {
    src: 'https://i.imgur.com/ZKMnXce.png',
  });

  function makeFramebufferAndTexture(gl, width, height) {
    const framebuffer = gl.createFramebuffer();
    gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
    
    const texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.texImage2D(gl.TEXTURE_2D,
       0,       // level
       gl.RGBA, // internal format
       width,
       height,
       0,       // border
       gl.RGBA, // format
       gl.UNSIGNED_BYTE, // type
       null,    // data (no data needed)
    );
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    
    gl.framebufferTexture2D(
       gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
       gl.TEXTURE_2D, texture, 0 /* level */);
  
    // note: depending on what you're rendering you might want to atttach
    // a depth renderbuffer or depth texture. See linked article
    
    return {
      framebuffer,
      texture,
      width,
      height,
    };
  }
  
  function bindFramebufferAndSetViewport(gl, fbi) {
    gl.bindFramebuffer(gl.FRAMEBUFFER, fbi ? fbi.framebuffer : null);
    const {width, height} = fbi || gl.canvas;
    gl.viewport(0, 0, width, height);
  }

  let fbiA = makeFramebufferAndTexture(gl, 512, 512);
  let fbiB = makeFramebufferAndTexture(gl, 512, 512);
  
  function drawImageAndPreviousFrameToTextureB() {
    bindFramebufferAndSetViewport(gl, fbiB);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    // for each attribute
    twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);

    // calls gl.activeTexture, gl.bindTexture, gl.uniform 
    twgl.setUniforms(programInfo, {
      u_tex: imageTexture,
      u_matrix: m4.identity(),
    });

    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, quadBufferInfo);
    
    // ---------
    
    // draw previous cube texture into current cube texture
    {
      twgl.setUniforms(programInfo, {
        u_tex: fbiA.texture,
        u_matrix: m4.scaling([0.8, 0.8, 1]),
      });
      twgl.drawBufferInfo(gl, quadBufferInfo);
    }
  }    
    
  function drawTexturedCubeToTextureA(time) {
    // ---------   
    // draw cube to "new" dstFB using srcFB.texture on cube
    bindFramebufferAndSetViewport(gl, fbiA);
    gl.clear(gl.COLOR_BUFFER_BIT);
    
    twgl.setBuffersAndAttributes(gl, programInfo, cubeBufferInfo);
    
    {
      const fov = 60 * Math.PI / 180;
      const aspect = fbiA.width / fbiA.height;
      const near = 0.1;
      const far = 100;
      let mat = m4.perspective(fov, aspect, near, far); 
      mat = m4.translate(mat, [0, 0, -2]);
      mat = m4.rotateX(mat, time);
      mat = m4.rotateY(mat, time * 0.7);

      twgl.setUniforms(programInfo, {
        u_tex: fbiB.texture,
        u_matrix: mat,
      });
    }
    
    twgl.drawBufferInfo(gl, cubeBufferInfo);
  }
  
  function drawTextureAToCanvas() {
    // --------
    // draw dstFB.texture to canvas
    bindFramebufferAndSetViewport(gl, null);
    
    twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
    
    {
      const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
      const near = -1;
      const far = 1;
      let mat = m4.ortho(-aspect, aspect, -1, 1, near, far);

      twgl.setUniforms(programInfo, {
        u_tex: fbiA.texture,
        u_matrix: mat,
      });
    }
    
    twgl.drawBufferInfo(gl, quadBufferInfo);
  }  
  
  function render(time) {
    time *= 0.001; // convert to seconds;
    
    twgl.resizeCanvasToDisplaySize(gl.canvas);
    
    gl.enable(gl.DEPTH_TEST);
    gl.enable(gl.CULL_FACE);
    
    // there's only one shader program so let's set it here
    gl.useProgram(programInfo.program);
  
    drawImageAndPreviousFrameToTextureB();
    drawTexturedCubeToTextureA(time);
    drawTextureAToCanvas();
  
    requestAnimationFrame(render);
  }
  requestAnimationFrame(render);
}

main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }

对于画布及其2个缓冲区,不能将它们直接用作纹理。您可以调用<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script> <canvas></canvas>gl.copyTexImage2D,尽管将画布的一部分复制到纹理上,但这是另一种解决方案。它不够灵活,我相信比framebuffer方法要慢