1

我目前正在 WebGL 中进行直接的直接屏幕(没有多次传递或后处理)渲染。我想确定整个渲染图像的平均亮度/亮度(即单个数字),以一种足够高效的方式来完成每一帧。

我想要完成的是在场景中实现“曝光”调整(就像摄像机或人眼一样),以便以逼真的照明和没有过渡的方式查看室内和室外场景 - 当前的亮度帧将负反馈到下一帧的亮度。

我目前正在通过我的场景数据发送几条光线来计算 CPU 端的一个非常粗略的近似值,以找到这些点的亮度;这可行,但样本太少而无法稳定(当光线穿过光源时,亮度随视角显着变化)。如果可能的话,我宁愿将工作卸载到 GPU 上,因为我的应用程序通常受 CPU 限制。

4

2 回答 2

3

我只是想到了一个可怕的kludge,即渲染到纹理并在其上生成Mipmaps,然后读取最小的级别。我希望有更好的方法。

那有什么问题?这种方式几乎完全在 GPU 上完成,可以很好地用于现有的渲染管道,并且应该给出合理的结果。我不知道有什么理由反对它。

于 2012-04-15T22:39:03.153 回答
2

我知道这个问题已经有 8 年历史了,但是嘿....

首先,WebGL1,generateMipmap 仅适用于 2 图像的幂。

我建议(1)生成一个像这样的简单着色器

function createShader(texWidth, texHeight) {
  return `
  precision mediump float;
  uniform sampler2D tex;

  void main() {
    vec2 size = vec2(${texWidth}, ${texHeight});
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < ${texHeight}; ++y) {
      for (int x = 0; x < ${texWidth}; ++x) {
        vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / (size.x * size.y);
    gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;
}

const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});

function createShader(texWidth, texHeight) {
  return `
  precision mediump float;
  uniform sampler2D tex;

  void main() {
    vec2 size = vec2(${texWidth}, ${texHeight});
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < ${texHeight}; ++y) {
      for (int x = 0; x < ${texWidth}; ++x) {
        vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / (size.x * size.y);
    gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;
}

const prgs = {}
function getAverageProgram(gl, width, height) {
  const id = `${width}x${height}`;
  const prg = prgs[id];
  if (prg) {
    return prg;
  }
  const vs = `
  attribute vec4 position;
  void main() {
    gl_Position = position;
  }
  `;
  const fs = createShader(width, height);
  // compile shaders, link program, look up uniforms
  const newPrg = twgl.createProgramInfo(gl, [vs, fs]);
  prgs[id] = newPrg;
  return newPrg;
}


function main() {
  const gl = document.querySelector('canvas').getContext('webgl');
  
  let updateTexture = false;
  const video = document.createElement('video');
  video.crossOrigin = 'anonymous';
  video.loop = true;
  video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
  if (video.requestVideoFrameCallback) {
    function update() {
      draw();
      video.requestVideoFrameCallback(update);
    };
    video.requestVideoFrameCallback(update);
  } else {
    function update() {
      if (video.currentTime > 0) {
        draw();
      }
      requestAnimationFrame(update);
    }
    requestAnimationFrame(update);
  }
  video.volume = 0;
  video.play();
  
  // create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
  const fbi = twgl.createFramebufferInfo(gl, [
    { internalForamt: gl.RGBA },
  ], 1, 1);
  
  const tVS = `
  attribute vec4 position;
  attribute vec2 texcoord;
  varying vec2 v_texcoord;
  void main() {
    gl_Position = position;
    v_texcoord = texcoord;
  }
  `;
  const tFS = `
  precision mediump float;
  uniform sampler2D tex;
  varying vec2 v_texcoord;
  void main() {
    gl_FragColor = texture2D(tex, v_texcoord);
  }
  `;
  // compile shaders, link program, look up uniforms
  const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
  
  const avgMinMaxVS = `
  attribute float id;
  varying float v_id;
  uniform sampler2D avgMinMaxTex;
  void main() {
    vec4 avgMinMax = texture2D(avgMinMaxTex, vec2(0.5));
    float v = id < 1.0
       ? avgMinMax.x
       : id < 2.0
          ? avgMinMax.y
          : avgMinMax.z;
    gl_Position = vec4(1. - (id + 1.0) / 10., v * 2. - 1., 0, 1);
    gl_PointSize = 10.0;
    v_id = id;
  }
  `;
  const avgMinMaxFS = `
  precision mediump float;
  varying float v_id;
  void main() {
    gl_FragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
  }
  `;
  // compile shaders, link program, look up uniforms
  const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
  
  const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
  const idBufferInfo = twgl.createBufferInfoFromArrays(gl, {
    id: {
      data: [0, 1, 2],
      numComponents: 1,
    },
  });
  
  const videoTex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, videoTex);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  
  gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

  function draw() {
    // copy video to texture
    gl.bindTexture(gl.TEXTURE_2D, videoTex);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
    
    // --- [ compute average, min, max to single pixel ] ---
    
    const averagePrgInfo = getAverageProgram(gl, video.videoWidth, video.videoHeight);
    gl.useProgram(averagePrgInfo.program);
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, fbi);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // --- [ draw video to texture ] ---
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, null);
    
    gl.useProgram(textureProgInfo.program);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // -- [ draw 3 points showing avg, min, max] ---
    
    gl.useProgram(avgMinMaxPrgInfo.program);

    gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, avgMinMaxPrgInfo, idBufferInfo);    

    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, idBufferInfo, gl.POINTS);
  }
}
body {
  background: #444;
}
canvas {
  border: 1px solid black;
  display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

这个解决方案的唯一问题是它不能被 GPU AFAIK 并行化,所以(2)我可能会测试做一些类似于生成 mipmap 的事情,我说制作一个 16x16 像素的着色器并将其作为目标以生成更小的纹理和重复直到我达到 1x1。我必须测试一下这是否真的更快,以及 2x2、4x4、16x16 等大小的单元格是最好的。

最后,如果可能的话,就像上面的例子一样,如果我实际上不需要 CPU 上的结果,那么只需将该 1x1 纹理作为输入传递给其他着色器。该示例仅绘制 3 个点,但当然您可以将这些值输入到绘制视频的着色器中以进行一些图像处理,例如在亮度低时提高曝光度尝试根据最小和最大亮度等自动调整图像...

请注意,在 WebGL2 中,您不必像 WebGL2 那样为每个大小生成不同的着色器,或者更确切地说,在 GLSL ES 3.0 中,您可以拥有不基于常量值的循环。

const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});

function main() {
  const gl = document.querySelector('canvas').getContext('webgl2');
  if (!gl) {
    return alert('need WebGL2')
  }
  
  let updateTexture = false;
  const video = document.createElement('video');
  video.crossOrigin = 'anonymous';
  video.loop = true;
  video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
  if (video.requestVideoFrameCallback) {
    function update() {
      draw();
      video.requestVideoFrameCallback(update);
    };
    video.requestVideoFrameCallback(update);
  } else {
    function update() {
      if (video.currentTime > 0) {
        draw();
      }
      requestAnimationFrame(update);
    }
    requestAnimationFrame(update);
  }
  video.volume = 0;
  video.play();
  
  // create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
  const fbi = twgl.createFramebufferInfo(gl, [
    { internalForamt: gl.RGBA },
  ], 1, 1);
  
  const avgVS = `#version 300 es
  in vec4 position;
  void main() {
    gl_Position = position;
  }
  `;
  
  const avgFS = `#version 300 es
  precision highp float;
  uniform sampler2D tex;
  out vec4 result;
  void main() {
    ivec2 size = textureSize(tex, 0);
    float totalBrightness = 0.0;
    float minBrightness = 1.0;
    float maxBrightness = 0.0;
    for (int y = 0; y < size.y; ++y) {
      for (int x = 0; x < size.x; ++x) {
        vec4 color = texelFetch(tex, ivec2(x, y), 0);
        vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
        float brightness = adjusted.r + adjusted.g + adjusted.b;
        totalBrightness += brightness;
        minBrightness = min(brightness, minBrightness);
        maxBrightness = max(brightness, maxBrightness);
      }
    }
    float averageBrightness = totalBrightness / float(size.x * size.y);
    result = vec4(averageBrightness, minBrightness, maxBrightness, 0);
  }
  `;  
  
  // compile shaders, link program, look up uniforms
  const averagePrgInfo = twgl.createProgramInfo(gl, [avgVS, avgFS]);


  const tVS = `#version 300 es
  in vec4 position;
  in vec2 texcoord;
  out vec2 v_texcoord;
  void main() {
    gl_Position = position;
    v_texcoord = texcoord;
  }
  `;
  const tFS = `#version 300 es
  precision mediump float;
  uniform sampler2D tex;
  in vec2 v_texcoord;
  out vec4 fragColor;
  void main() {
    fragColor = texture(tex, v_texcoord);
  }
  `;
  // compile shaders, link program, look up uniforms
  const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
  
  const avgMinMaxVS = `#version 300 es
  out float v_id;
  uniform sampler2D avgMinMaxTex;
  void main() {
    vec4 avgMinMax = texelFetch(avgMinMaxTex, ivec2(0), 0);
    float v = gl_VertexID == 0
       ? avgMinMax.x
       : gl_VertexID == 1
          ? avgMinMax.y
          : avgMinMax.z;
    gl_Position = vec4(1. - (float(gl_VertexID) + 1.0) / 10., v * 2. - 1., 0, 1);
    gl_PointSize = 10.0;
    v_id = float(gl_VertexID);
  }
  `;
  const avgMinMaxFS = `#version 300 es
  precision mediump float;
  in float v_id;
  out vec4 fragColor;
  void main() {
    fragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
  }
  `;
  // compile shaders, link program, look up uniforms
  const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
  
  // creates buffers with positions and texcoords for a -1 to +1 quad
  const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
  
  const videoTex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, videoTex);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  
  gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

  function draw() {
    // copy video to texture
    gl.bindTexture(gl.TEXTURE_2D, videoTex);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
    
    // --- [ compute average, min, max to single pixel ] ---
    
    gl.useProgram(averagePrgInfo.program);
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, fbi);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // --- [ draw video to texture ] ---
    
    // calls gl.bindFramebuffer and gl.viewport
    twgl.bindFramebufferInfo(gl, null);
    
    gl.useProgram(textureProgInfo.program);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
    
    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, planeBufferInfo);
    
    // -- [ draw 3 points showing avg, min, max] ---
    
    gl.useProgram(avgMinMaxPrgInfo.program);

    gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);

    // draw 3 points
    gl.drawArrays(gl.POINTS, 0, 3);
  }
}
body {
  background: #444;
}
canvas {
  border: 1px solid black;
  display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

于 2020-08-02T11:47:15.287 回答