WebGL Droste 效果

WebGL Droste effect

我正在尝试使用 WebGL 在立方体的面上实现 Droste effect。视口中只有一个网格,一个立方体,它的所有面都共享相同的纹理。为了实现 Droste 效果,我更新了每一帧的纹理,实际上我只是拍摄了我正在绘制的 WebGL 上下文的 canvas 的快照,随着时间的推移,随着快照越来越多地包含越来越多的内容,这会导致 Droste 效果嵌套过去的帧。

这里有一个我现在正在使用的演示:

https://tomashubelbauer.github.io/webgl-op-1/?cubeTextured

有问题的代码如下:

// Set up fragment and vertex shader and attach them to a program, link the program
// Create a vertex buffer, an index buffer and a texture coordinate buffer
// Tesselate the cube's vertices and fill in the index and texture coordinate buffers
const textureCanvas = document.createElement('canvas');
textureCanvas.width = 256;
textureCanvas.height = 256;
const textureContext = textureCanvas.getContext('2d');

// In every `requestAnimationFrame`:
textureContext.drawImage(context.canvas, 0, 0);
const texture = context.createTexture();
context.bindTexture(context.TEXTURE_2D, texture);
context.texImage2D(context.TEXTURE_2D, 0, context.RGBA, context.RGBA, context.UNSIGNED_BYTE, textureCanvas);
context.generateMipmap(context.TEXTURE_2D);
// Clear the viewport completely (depth and color buffers)
// Set up attribute and uniform values, the projection and model view matrices
context.activeTexture(context.TEXTURE0);
context.bindTexture(context.TEXTURE_2D, texture);
context.uniform1i(fragmentShaderTextureSamplerUniformLocation, 0);
context.drawElements(context.TRIANGLES, 36, context.UNSIGNED_SHORT, 0)

以上是全部内容,WebGL 有一个单独的 canvas,它在每个 WebGL 帧之前在其上绘制 WebGL canvas,这个 canvas 然后用于为给定帧创建纹理,并根据纹理坐标缓冲区和提供给片段着色器的纹理采样器制服将纹理应用于立方体的面,片段着色器仅使用 gl_FragColor = texture2D(textureSampler, textureCoordinate),就像您期望的那样。

但这太慢了(这个简单的演示用一个立方体网格慢了 30 FPS,而我的所有其他演示一些有一个数量级更多的 tris 仍然接近 60 FPS requestAnimationFrame 上限)。

当我觉得单独使用 WebGL 应该可以实现时,通过使用外部 canvas 来执行 WebGL "outside" 感觉很奇怪。

我知道 WebGL 保留两个缓冲区,一个用于活动帧,一个用于最近绘制的帧的后台缓冲区,这两个缓冲区与每一帧交换以实现即时屏幕更新。是否可以点击这个后台缓冲区并将其用作纹理?您能否提供如何完成的示例代码?

来自this article

执行此操作的正常方法是通过将纹理附加到帧缓冲区来渲染纹理。

const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
    gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0 /* level */) 

现在渲染到纹理

gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, textureWidth, textureHeight);

渲染到 canvas

gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

做你想做的事你需要 2 个纹理,因为你不能同时读取和写入同一个纹理所以你绘制说

  • 将图像绘制到 TextureA
  • 将上一帧 (TextureB) 绘制到 TextureA
  • 用TextureA绘制立方体到TextureB
  • 将TextureB绘制到Canvas

"use strict";

function main() {
  const m4 = twgl.m4;
  const gl = document.querySelector('canvas').getContext('webgl')

  const vs = `
  attribute vec4 position;
  attribute vec2 texcoord;
  uniform mat4 u_matrix;
  varying vec2 v_texcoord;
  void main() {
    gl_Position = u_matrix * position;
    v_texcoord = texcoord;
  }
  `;
  
  const fs = `
  precision mediump float;
  varying vec2 v_texcoord;
  uniform sampler2D u_tex;
  void main() {
    gl_FragColor = texture2D(u_tex, v_texcoord);
  }
  `;
  
  // compile shaders, link program, look up locations
  const programInfo = twgl.createProgramInfo(gl, [vs, fs]);

  // gl.createBuffer, gl.bufferData for positions and texcoords of a cube
  const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
  // gl.createBuffer, gl.bufferData for positions and texcoords of a quad
  const quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl, 2);

  // all the normal stuff for setting up a texture
  const imageTexture = twgl.createTexture(gl, {
    src: 'https://i.imgur.com/ZKMnXce.png',
  });

  function makeFramebufferAndTexture(gl, width, height) {
    const framebuffer = gl.createFramebuffer();
    gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
    
    const texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.texImage2D(gl.TEXTURE_2D,
       0,       // level
       gl.RGBA, // internal format
       width,
       height,
       0,       // border
       gl.RGBA, // format
       gl.UNSIGNED_BYTE, // type
       null,    // data (no data needed)
    );
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    
    gl.framebufferTexture2D(
       gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
       gl.TEXTURE_2D, texture, 0 /* level */);
  
    // note: depending on what you're rendering you might want to atttach
    // a depth renderbuffer or depth texture. See linked article
    
    return {
      framebuffer,
      texture,
      width,
      height,
    };
  }
  
  function bindFramebufferAndSetViewport(gl, fbi) {
    gl.bindFramebuffer(gl.FRAMEBUFFER, fbi ? fbi.framebuffer : null);
    const {width, height} = fbi || gl.canvas;
    gl.viewport(0, 0, width, height);
  }

  let fbiA = makeFramebufferAndTexture(gl, 512, 512);
  let fbiB = makeFramebufferAndTexture(gl, 512, 512);
  
  function drawImageAndPreviousFrameToTextureB() {
    bindFramebufferAndSetViewport(gl, fbiB);
    
    // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
    // for each attribute
    twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);

    // calls gl.activeTexture, gl.bindTexture, gl.uniform 
    twgl.setUniforms(programInfo, {
      u_tex: imageTexture,
      u_matrix: m4.identity(),
    });

    // calls gl.drawArrays or gl.drawElements
    twgl.drawBufferInfo(gl, quadBufferInfo);
    
    // ---------
    
    // draw previous cube texture into current cube texture
    {
      twgl.setUniforms(programInfo, {
        u_tex: fbiA.texture,
        u_matrix: m4.scaling([0.8, 0.8, 1]),
      });
      twgl.drawBufferInfo(gl, quadBufferInfo);
    }
  }    
    
  function drawTexturedCubeToTextureA(time) {
    // ---------   
    // draw cube to "new" dstFB using srcFB.texture on cube
    bindFramebufferAndSetViewport(gl, fbiA);
    gl.clear(gl.COLOR_BUFFER_BIT);
    
    twgl.setBuffersAndAttributes(gl, programInfo, cubeBufferInfo);
    
    {
      const fov = 60 * Math.PI / 180;
      const aspect = fbiA.width / fbiA.height;
      const near = 0.1;
      const far = 100;
      let mat = m4.perspective(fov, aspect, near, far); 
      mat = m4.translate(mat, [0, 0, -2]);
      mat = m4.rotateX(mat, time);
      mat = m4.rotateY(mat, time * 0.7);

      twgl.setUniforms(programInfo, {
        u_tex: fbiB.texture,
        u_matrix: mat,
      });
    }
    
    twgl.drawBufferInfo(gl, cubeBufferInfo);
  }
  
  function drawTextureAToCanvas() {
    // --------
    // draw dstFB.texture to canvas
    bindFramebufferAndSetViewport(gl, null);
    
    twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
    
    {
      const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
      const near = -1;
      const far = 1;
      let mat = m4.ortho(-aspect, aspect, -1, 1, near, far);

      twgl.setUniforms(programInfo, {
        u_tex: fbiA.texture,
        u_matrix: mat,
      });
    }
    
    twgl.drawBufferInfo(gl, quadBufferInfo);
  }  
  
  function render(time) {
    time *= 0.001; // convert to seconds;
    
    twgl.resizeCanvasToDisplaySize(gl.canvas);
    
    gl.enable(gl.DEPTH_TEST);
    gl.enable(gl.CULL_FACE);
    
    // there's only one shader program so let's set it here
    gl.useProgram(programInfo.program);
  
    drawImageAndPreviousFrameToTextureB();
    drawTexturedCubeToTextureA(time);
    drawTextureAToCanvas();
  
    requestAnimationFrame(render);
  }
  requestAnimationFrame(render);
}

main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>

至于 canvas 和它的 2 个缓冲区,不,不能直接将它们用作纹理。您可以调用 gl.copyTexImage2Dgl.copyTexSubImage2D 将 canvas 的一部分复制到纹理,这是另一种解决方案。它不太灵活,我相信比帧缓冲方法慢