如何确定WebGL中的平均场景亮度?

时间:2012-04-15 15:36:36

标签: image-processing opengl-es opengl-es-2.0 webgl

我目前在WebGL中直接进行直接到屏幕(没有多次传递或后处理)渲染。我想以一种足够有效的方式确定整个渲染图像的平均亮度/亮度(即单个数字)。

我想要实现的是在场景中实现“曝光”调整(如摄像机或人眼所见),以便用真实的灯光和无过渡来观看室内和室外场景 - 亮度当前帧的反馈将是对下一帧亮度的负反馈。

我正在计算CPU侧的非常粗略的近似值,通过在场景数据中发送一些光线来找到这些点的亮度;这是有效的,但是样本太少而不稳定(当光线穿过光源时,亮度随着视角而显着变化)。如果可能的话,我宁愿将工作卸载到GPU,因为我的应用程序通常是CPU限制的。

2 个答案:

答案 0 :(得分:3)

  

我只是想到了一个可怕的kludge,即渲染纹理并在其上生成Micmaps,然后读取最小的级别。我希望有更好的方法。

这有什么问题?这种方式几乎完全在GPU上完成,可以很好地工作到现有的渲染管道中,并且应该给出合理的结果。我不知道有任何理由反对它。

答案 1 :(得分:2)

我知道这个问题已经8岁了,但是....

首先,WebGL1 generateMipmap仅适用于2张图片。

我建议(1)生成像这样的简单着色器

function createShader(texWidth, texHeight) {
  return `
  precision mediump float;
  uniform sampler2D tex;

  void main() {
    vec2 size = vec2(${texWidth}, ${texHeight});
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < ${texHeight}; ++y) {
      for (int x = 0; x < ${texWidth}; ++x) {
        vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / (size.x * size.y);
    gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;
}

const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});

function createShader(texWidth, texHeight) {
  return `
  precision mediump float;
  uniform sampler2D tex;

  void main() {
    vec2 size = vec2(${texWidth}, ${texHeight});
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < ${texHeight}; ++y) {
      for (int x = 0; x < ${texWidth}; ++x) {
        vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / (size.x * size.y);
    gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;
}

const prgs = {}
function getAverageProgram(gl, width, height) {
  const id = `${width}x${height}`;
  const prg = prgs[id];
  if (prg) {
    return prg;
  }
  const vs = `
  attribute vec4 position;
  void main() {
    gl_Position = position;
  }
  `;
  const fs = createShader(width, height);
  // compile shaders, link program, look up uniforms
  const newPrg = twgl.createProgramInfo(gl, [vs, fs]);
  prgs[id] = newPrg;
  return newPrg;
}


function main() {
  const gl = document.querySelector('canvas').getContext('webgl');
  
  let updateTexture = false;
  const video = document.createElement('video');
  video.crossOrigin = 'anonymous';
  video.loop = true;
  video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
  if (video.requestVideoFrameCallback) {
    function update() {
      draw();
      video.requestVideoFrameCallback(update);
    };
    video.requestVideoFrameCallback(update);
  } else {
    function update() {
      if (video.currentTime > 0) {
        draw();
      }
      requestAnimationFrame(update);
    }
    requestAnimationFrame(update);
  }
  video.volume = 0;
  video.play();
  
  // create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
  const fbi = twgl.createFramebufferInfo(gl, [
    { internalForamt: gl.RGBA },
  ], 1, 1);
  
  const tVS = `
  attribute vec4 position;
  attribute vec2 texcoord;
  varying vec2 v_texcoord;
  void main() {
    gl_Position = position;
    v_texcoord = texcoord;
  }
  `;
  const tFS = `
  precision mediump float;
  uniform sampler2D tex;
  varying vec2 v_texcoord;
  void main() {
    gl_FragColor = texture2D(tex, v_texcoord);
  }
  `;
  // compile shaders, link program, look up uniforms
  const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
  
  const avgMinMaxVS = `
  attribute float id;
  varying float v_id;
  uniform sampler2D avgMinMaxTex;
  void main() {
    vec4 avgMinMax = texture2D(avgMinMaxTex, vec2(0.5));
    float v = id < 1.0
       ? avgMinMax.x
       : id < 2.0
          ? avgMinMax.y
          : avgMinMax.z;
    gl_Position = vec4(1. - (id + 1.0) / 10., v * 2. - 1., 0, 1);
    gl_PointSize = 10.0;
    v_id = id;
  }
  `;
  const avgMinMaxFS = `
  precision mediump float;
  varying float v_id;
  void main() {
    gl_FragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
  }
  `;
  // compile shaders, link program, look up uniforms
  const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
  
  const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
  const idBufferInfo = twgl.createBufferInfoFromArrays(gl, {
    id: {
      data: [0, 1, 2],
      numComponents: 1,
    },
  });
  
  const videoTex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, videoTex);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  
  gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

  function draw() {
    // copy video to texture
    gl.bindTexture(gl.TEXTURE_2D, videoTex);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
    
    // --- [ compute average, min, max to single pixel ] ---
    
    const averagePrgInfo = getAverageProgram(gl, video.videoWidth, video.videoHeight);
    gl.useProgram(averagePrgInfo.program);
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, fbi);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // --- [ draw video to texture ] ---
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, null);
    
    gl.useProgram(textureProgInfo.program);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // -- [ draw 3 points showing avg, min, max] ---
    
    gl.useProgram(avgMinMaxPrgInfo.program);

    gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, avgMinMaxPrgInfo, idBufferInfo);    

    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, idBufferInfo, gl.POINTS);
  }
}
body {
  background: #444;
}
canvas {
  border: 1px solid black;
  display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

此解决方案的唯一问题是它不能被GPU AFAIK并行化,因此(2)我可能会测试做类似于生成mipmap的操作,在此我说要制作一个16x16像素的着色器并将其定位为生成一个着色器。较小的纹理,重复直到我达到1x1。我必须测试一下它是否实际上更快,以及2x2、4x4、16x16等尺寸的单元格是最好的。

最后,如上例所示,如果可能的话,如果我实际上不需要CPU上的结果,则只需将1x1纹理作为输入传递给其他着色器。该示例仅绘制了3个点,但是您当然可以将这些值输入到绘制视频的着色器中,以进行一些图像处理,例如在亮度较低时提高曝光度,尝试根据最小和最大亮度自动调平图像等。 ...

请注意,在WebGL2中,您不必像WebGL2那样为每个尺寸生成不同的着色器,而在GLSL ES 3.0中,您可以具有不基于常量值的循环。

const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});

function main() {
  const gl = document.querySelector('canvas').getContext('webgl2');
  if (!gl) {
    return alert('need WebGL2')
  }
  
  let updateTexture = false;
  const video = document.createElement('video');
  video.crossOrigin = 'anonymous';
  video.loop = true;
  video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
  if (video.requestVideoFrameCallback) {
    function update() {
      draw();
      video.requestVideoFrameCallback(update);
    };
    video.requestVideoFrameCallback(update);
  } else {
    function update() {
      if (video.currentTime > 0) {
        draw();
      }
      requestAnimationFrame(update);
    }
    requestAnimationFrame(update);
  }
  video.volume = 0;
  video.play();
  
  // create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
  const fbi = twgl.createFramebufferInfo(gl, [
    { internalForamt: gl.RGBA },
  ], 1, 1);
  
  const avgVS = `#version 300 es
  in vec4 position;
  void main() {
    gl_Position = position;
  }
  `;
  
  const avgFS = `#version 300 es
  precision highp float;
  uniform sampler2D tex;
  out vec4 result;
  void main() {
    ivec2 size = textureSize(tex, 0);
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < size.y; ++y) {
      for (int x = 0; x < size.x; ++x) {
        vec4 color = texelFetch(tex, ivec2(x, y), 0);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / float(size.x * size.y);
    result = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;  
  
  // compile shaders, link program, look up uniforms
  const averagePrgInfo = twgl.createProgramInfo(gl, [avgVS, avgFS]);


  const tVS = `#version 300 es
  in vec4 position;
  in vec2 texcoord;
  out vec2 v_texcoord;
  void main() {
    gl_Position = position;
    v_texcoord = texcoord;
  }
  `;
  const tFS = `#version 300 es
  precision mediump float;
  uniform sampler2D tex;
  in vec2 v_texcoord;
  out vec4 fragColor;
  void main() {
    fragColor = texture(tex, v_texcoord);
  }
  `;
  // compile shaders, link program, look up uniforms
  const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
  
  const avgMinMaxVS = `#version 300 es
  out float v_id;
  uniform sampler2D avgMinMaxTex;
  void main() {
    vec4 avgMinMax = texelFetch(avgMinMaxTex, ivec2(0), 0);
    float v = gl_VertexID == 0
       ? avgMinMax.x
       : gl_VertexID == 1
          ? avgMinMax.y
          : avgMinMax.z;
    gl_Position = vec4(1. - (float(gl_VertexID) + 1.0) / 10., v * 2. - 1., 0, 1);
    gl_PointSize = 10.0;
    v_id = float(gl_VertexID);
  }
  `;
  const avgMinMaxFS = `#version 300 es
  precision mediump float;
  in float v_id;
  out vec4 fragColor;
  void main() {
    fragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
  }
  `;
  // compile shaders, link program, look up uniforms
  const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
  
  // creates buffers with positions and texcoords for a -1 to +1 quad
  const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
  
  const videoTex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, videoTex);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  
  gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

  function draw() {
    // copy video to texture
    gl.bindTexture(gl.TEXTURE_2D, videoTex);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
    
    // --- [ compute average, min, max to single pixel ] ---
    
    gl.useProgram(averagePrgInfo.program);
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, fbi);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // --- [ draw video to texture ] ---
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, null);
    
    gl.useProgram(textureProgInfo.program);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // -- [ draw 3 points showing avg, min, max] ---
    
    gl.useProgram(avgMinMaxPrgInfo.program);

    gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);

    // draw 3 points
    gl.drawArrays(gl.POINTS, 0, 3);
  }
}
body {
  background: #444;
}
canvas {
  border: 1px solid black;
  display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>