对半透明物体实施深度测试
Implementing depth testing for semi-transparent objects
过去两天我一直在网上仔细搜索以了解半透明对象的深度测试。我已经阅读了很多关于这个主题的文章 papers/tutorials,理论上我相信我理解它是如何工作的。但是 none 给了我实际的示例代码。
我对半透明物体的深度测试有三个要求:
它应该与顺序无关。
如果相同对象的两个四边形彼此相交,它应该可以工作。都是半透明的。想象一下从上面看时看起来像 X 的草对象:
- 它应该正确渲染半透明播放器
rgba(0, 1, 0, 0.5)
,在建筑物 window rgba(0, 0, 1, 0.5)
后面,但在背景对象 rgba(1, 0, 0, 1)
前面:
最左边的线是我想象的light/color在穿过半透明物体朝向相机时的变化
最后的想法
我怀疑最好的方法是进行深度剥离,但我仍然缺少一些 implementation/example。我倾向于这种方法,因为游戏是 2.5D 并且因为它可能会对性能造成危险(需要剥离很多层),所以 "peel" 不需要超过两个半透明对象。
我已经熟悉帧缓冲区以及如何对它们进行编码(用它们做一些 post 处理效果)。我会使用它们,对吗?
opengl的大部分知识来自this tutorial,但它分别涵盖了深度测试和半透明。令人遗憾的是,他也根本没有涵盖与顺序无关的透明度(请参阅混合页面底部)。
最后,请不要只理论上回答。例如
Draw opaque, draw transparent, draw opaque again, etc.
我理想的答案将包含缓冲区配置方式的代码、着色器和每个通道的屏幕截图,并解释其作用。
使用的编程语言也不是太重要,只要它使用 OpenGL 4 或更新版本即可。非 opengl 代码可以是伪代码(我不关心你如何排序数组或创建 GLFW window)。
编辑:
我正在更新我的问题以仅提供我的代码当前状态的示例。此示例首先绘制半透明播放器(绿色),其次绘制不透明背景(红色),然后绘制半透明 window(蓝色)。然而,深度应该根据正方形的 Z 位置而不是绘制的顺序来计算。
(function() {
// your page initialization code here
// the DOM will be available here
var script = document.createElement('script');
script.onload = function () {
main();
};
script.src = 'https://mdn.github.io/webgl-examples/tutorial/gl-matrix.js';
document.head.appendChild(script); //or something of the likes
})();
//
// Start here
//
function main() {
const canvas = document.querySelector('#glcanvas');
const gl = canvas.getContext('webgl', {alpha:false});
// If we don't have a GL context, give up now
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
attribute vec4 aVertexColor;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
varying lowp vec4 vColor;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vColor = aVertexColor;
}
`;
// Fragment shader program
const fsSource = `
varying lowp vec4 vColor;
void main(void) {
gl_FragColor = vColor;
}
`;
// Initialize a shader program; this is where all the lighting
// for the vertices and so forth is established.
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// Collect all the info needed to use the shader program.
// Look up which attributes our shader program is using
// for aVertexPosition, aVevrtexColor and also
// look up uniform locations.
const programInfo = {
program: shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'),
vertexColor: gl.getAttribLocation(shaderProgram, 'aVertexColor'),
},
uniformLocations: {
projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'),
modelViewMatrix: gl.getUniformLocation(shaderProgram, 'uModelViewMatrix'),
},
};
// Here's where we call the routine that builds all the
// objects we'll be drawing.
const buffers = initBuffers(gl);
// Draw the scene
drawScene(gl, programInfo, buffers);
}
//
// initBuffers
//
// Initialize the buffers we'll need. For this demo, we just
// have one object -- a simple two-dimensional square.
//
function initBuffers(gl) {
// Create a buffer for the square's positions.
const positionBuffer0 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer0);
// Now create an array of positions for the square.
var positions = [
0.5, 0.5,
-0.5, 0.5,
0.5, -0.5,
-0.5, -0.5,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
var colors = [
0.0, 1.0, 0.0, 0.5, // white
0.0, 1.0, 0.0, 0.5, // red
0.0, 1.0, 0.0, 0.5, // green
0.0, 1.0, 0.0, 0.5, // blue
];
const colorBuffer0 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
// Create a buffer for the square's positions.
const positionBuffer1 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer1);
// Now create an array of positions for the square.
positions = [
2.0, 0.4,
-2.0, 0.4,
2.0, -2.0,
-2.0, -2.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
colors = [
1.0, 0.0, 0.0, 1.0, // white
1.0, 0.0, 0.0, 1.0, // red
1.0, 0.0, 0.0, 1.0, // green
1.0, 0.0, 0.0, 1.0, // blue
];
const colorBuffer1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer1);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
// Create a buffer for the square's positions.
const positionBuffer2 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer2);
// Now create an array of positions for the square.
positions = [
1.0, 1.0,
-0.0, 1.0,
1.0, -1.0,
-0.0, -1.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
colors = [
0.0, 0.0, 1.0, 0.5, // white
0.0, 0.0, 1.0, 0.5, // red
0.0, 0.0, 1.0, 0.5, // green
0.0, 0.0, 1.0, 0.5, // blue
];
const colorBuffer2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer2);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
return {
position0: positionBuffer0,
color0: colorBuffer0,
position1: positionBuffer1,
color1: colorBuffer1,
position2: positionBuffer2,
color2: colorBuffer2,
};
}
//
// Draw the scene.
//
function drawScene(gl, programInfo, buffers) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//gl.clearDepth(1.0); // Clear everything
gl.disable(gl.DEPTH_TEST)
gl.enable(gl.BLEND)
gl.blendEquation(gl.FUNC_ADD)
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA)
// Clear the canvas before we start drawing on it.
// Create a perspective matrix, a special matrix that is
// used to simulate the distortion of perspective in a camera.
// Our field of view is 45 degrees, with a width/height
// ratio that matches the display size of the canvas
// and we only want to see objects between 0.1 units
// and 100 units away from the camera.
const fieldOfView = 45 * Math.PI / 180; // in radians
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
// note: glmatrix.js always has the first argument
// as the destination to receive the result.
mat4.perspective(projectionMatrix,
fieldOfView,
aspect,
zNear,
zFar);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
const modelViewMatrix = mat4.create();
// Now move the drawing position a bit to where we want to
// start drawing the square.
mat4.translate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to translate
[-0.0, 0.0, -6.0]); // amount to translate
function drawSquare(positionbuffer, colorbuffer) {
// Tell WebGL how to pull out the positions from the position
// buffer into the vertexPosition attribute
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, positionbuffer);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexPosition,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexPosition);
}
// Tell WebGL how to pull out the colors from the color buffer
// into the vertexColor attribute.
{
const numComponents = 4;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, colorbuffer);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexColor,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexColor);
}
// Tell WebGL to use our program when drawing
gl.useProgram(programInfo.program);
// Set the shader uniforms
gl.uniformMatrix4fv(
programInfo.uniformLocations.projectionMatrix,
false,
projectionMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.modelViewMatrix,
false,
modelViewMatrix);
{
const offset = 0;
const vertexCount = 4;
gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount);
}
}
drawSquare(buffers.position0, buffers.color0); // Player
drawSquare(buffers.position1, buffers.color1); // Background
drawSquare(buffers.position2, buffers.color2); // Window
}
//
// Initialize a shader program, so WebGL knows how to draw our data
//
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
//
// creates a shader of the given type, uploads the source and
// compiles it.
//
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title></title>
</head>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
</body>
</html>
I have three requirements for my depth testing of semi-transparent objects
实际上很少有自相交对象和部分透明(实际上是混合的)样本。自相交几何的常见情况是草和树叶。然而,在这些情况下,被草和树叶覆盖的实际区域并不透明——它们是不透明的。
此处常见的解决方案是 alpha 测试。将叶子渲染为不透明(未混合)四边形(具有正常深度测试和写入),并丢弃 alpha 不足的片段(例如,因为它们在叶子之外)。因为这里的单个样本是不透明的,所以您可以免费获得顺序独立性,因为深度测试的工作方式与您对不透明对象的预期一样。
如果您想要混合边缘,则启用 alpha-to-coverage 并让多样本解析稍微清理一下边缘。
对于您留下的少量实际透明的东西,通常您需要在 CPU 上进行从后到前的排序,并在不透明通道之后渲染它。
适当的 OIT 是可能的,但这通常是一项相当昂贵的技术,所以我还没有看到有人在学术环境之外实际使用它(至少在移动 OpenGL ES 实现上)。
这似乎是 the paper linked by ripi2 正在做的事情
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 Ci;
out vec4 fragData[2];
float w(float z, float a) {
return a * max(pow(10.0,-2.0),3.0*pow(10.0,3.0)*pow((1.0 - z), 3.));
}
void main() {
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
fragColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const bufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const fbi = twgl.createFramebufferInfo(
gl,
[
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
]);
function render(time) {
time *= 0.001;
twgl.setBuffersAndAttributes(gl, transparentProgramInfo, bufferInfo);
// drawOpaqueSurfaces();
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_matrix: m4.identity(),
});
twgl.drawBufferInfo(gl, bufferInfo);
twgl.bindFramebufferInfo(gl, fbi);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false);
gl.enable(gl.BLEND);
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
// drawTransparentSurfaces();
const quads = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
quads.forEach((color, ndx) => {
const u = ndx / (quads.length - 1);
// change the order every second
const v = ((ndx + time | 0) % quads.length) / (quads.length - 1);
const xy = (u * 2 - 1) * .25;
const z = (v * 2 - 1) * .25;
let mat = m4.identity();
mat = m4.translate(mat, [xy, xy, z]);
mat = m4.scale(mat, [.3, .3, 1]);
twgl.setUniforms(transparentProgramInfo, {
Ci: color,
u_matrix: mat,
});
twgl.drawBufferInfo(gl, bufferInfo);
});
twgl.bindFramebufferInfo(gl, null);
gl.drawBuffers([gl.BACK]);
gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
gl.useProgram(compositeProgramInfo.program);
twgl.setUniforms(compositeProgramInfo, {
ATexture: fbi.attachments[0],
BTexture: fbi.attachments[1],
u_matrix: m4.identity(),
});
twgl.drawBufferInfo(gl, bufferInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
一些注意事项:
- 它使用的是 WebGL2,但它应该可以在 WebGL1 中使用,您必须更改着色器才能使用 GLSL ES 1.0。
- 它使用浮点纹理。该论文提到您也可以使用半浮动纹理。请注意,渲染到半纹理和浮动纹理甚至在 WebGL2 中都是可选功能。我相信大多数移动硬件可以渲染到一半但不能浮动。
- 它使用论文中的权重方程式 10。论文中有4个权重方程。 7、8、9 和 10。要执行 7、8 或 9,您需要将视图 space
z
从顶点着色器传递到片段着色器
- 每秒切换绘制顺序
代码非常简单。
它创建了 3 个着色器。一个是画一个棋盘,这样我们就有一些不透明的东西可以看到上面画的透明的东西。一种是透明对象着色器。最后是着色器,将透明的东西合成到场景中。
接下来制作 2 个纹理,一个浮点 RGBA32F 纹理和一个浮点 R32F 纹理(仅限红色通道)。它将那些附加到帧缓冲区。 (这都是在 1 函数 twgl.createFramebufferInfo
中完成的。默认情况下,该函数使纹理与 canvas 的大小相同。
我们制作一个从 -1 到 +1 的四边形
我们使用那个四边形将棋盘绘制到 canvas
然后我们打开混合,按照论文所述设置混合方程,切换到渲染到我们的帧缓冲区,清除该帧缓冲区。请注意,它分别被清除为 0,0,0,1 和 1。这是我们没有每个绘制缓冲区的单独混合函数的版本。如果您切换到可以为每个绘制缓冲区使用单独的混合函数的版本,那么您需要清除不同的值并使用不同的着色器(参见论文)
使用我们的透明度着色器,我们用同一个四边形绘制 6 个矩形,每个矩形都是纯色。我只是使用纯色来保持简单。每个都在不同的 Z 上,Z 每秒都在变化,只是为了查看结果 Z 的变化。
着色器中Ci
是输入颜色。根据论文,预计它是预乘的 alpha 颜色。 fragData[0]is the "accumulate" texture and
fragData[1]is the "revealage" texture and is only one channel, red. The
w`函数表示论文中的等式10。
绘制完所有 6 个四边形后,我们切换回渲染 canvas 并使用合成着色器将透明结果与非透明 canvas 内容合成。
这是一个带有一些几何图形的示例。差异:
- 它使用论文中的等式 (7) 而不是 (10)
- 为了进行正确的 zbuffering,在进行不透明和透明渲染时需要共享深度缓冲区。所以有 2 个帧缓冲区。一个缓冲区具有 RGBA8 + 深度,另一个是 RGBA32F + R32F + 深度。深度缓冲区是共享的。
- 透明渲染器计算简单光照,然后将结果用作论文中的
Ci
值
- 将透明合成为不透明后,我们还需要将不透明复制到canvas中以查看结果
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
layout(location=1) in vec3 normal;
uniform mat4 u_projection;
uniform mat4 u_modelView;
out vec4 v_viewPosition;
out vec3 v_normal;
void main() {
gl_Position = u_projection * u_modelView * position;
v_viewPosition = u_modelView * position;
v_normal = (u_modelView * vec4(normal, 0)).xyz;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const opaqueFS = `
#version 300 es
precision highp float;
in vec4 v_viewPosition;
in vec3 v_normal;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
out vec4 fragColor;
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
fragColor = vec4(u_color.rgb * light, u_color.a);
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
in vec4 v_viewPosition;
in vec3 v_normal;
out vec4 fragData[2];
// eq (7)
float w(float z, float a) {
return a * max(
pow(10.0, -2.0),
min(
3.0 * pow(10.0, 3.0),
10.0 /
(pow(10.0, -5.0) +
pow(abs(z) / 5.0, 2.0) +
pow(abs(z) / 200.0, 6.0)
)
)
);
}
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
vec4 Ci = vec4(u_color.rgb * light, u_color.a);
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
fragColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
}
`;
const blitFS = `
#version 300 es
precision highp float;
uniform sampler2D u_texture;
out vec4 fragColor;
void main() {
fragColor = texelFetch(u_texture, ivec2(gl_FragCoord.xy), 0);
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const opaqueProgramInfo = twgl.createProgramInfo(gl, [vs, opaqueFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const blitProgramInfo = twgl.createProgramInfo(gl, [vs, blitFS]);
const xyQuadVertexArrayInfo = makeVAO(checkerProgramInfo, twgl.primitives.createXYQuadBufferInfo(gl));
const sphereVertexArrayInfo = makeVAO(transparentProgramInfo, twgl.primitives.createSphereBufferInfo(gl, 1, 16, 12));
const cubeVertexArrayInfo = makeVAO(opaqueProgramInfo, twgl.primitives.createCubeBufferInfo(gl, 1, 1));
function makeVAO(programInfo, bufferInfo) {
return twgl.createVertexArrayInfo(gl, programInfo, bufferInfo);
}
// In order to do proper zbuffering we need to share
// the depth buffer
const opaqueAttachments = [
{ internalFormat: gl.RGBA8, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST },
];
const opaqueFBI = twgl.createFramebufferInfo(gl, opaqueAttachments);
const transparentAttachments = [
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST, attachment: opaqueFBI.attachments[1] },
];
const transparentFBI = twgl.createFramebufferInfo(gl, transparentAttachments);
function render(time) {
time *= 0.001;
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
// if the canvas is resized also resize the framebuffer
// attachments (the depth buffer will be resized twice
// but I'm too lazy to fix it)
twgl.resizeFramebufferInfo(gl, opaqueFBI, opaqueAttachments);
twgl.resizeFramebufferInfo(gl, transparentFBI, transparentAttachments);
}
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fov = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 500;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const eye = [0, 0, -5];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const lightDirection = v3.normalize([1, 3, 5]);
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.depthMask(true);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
// drawOpaqueSurfaces();
// draw checkerboard
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
// draw a cube with depth buffer
gl.enable(gl.DEPTH_TEST);
{
gl.useProgram(opaqueProgramInfo.program);
gl.bindVertexArray(cubeVertexArrayInfo.vertexArrayObject);
let mat = view;
mat = m4.rotateX(mat, time * .1);
mat = m4.rotateY(mat, time * .2);
mat = m4.scale(mat, [1.5, 1.5, 1.5]);
twgl.setUniforms(opaqueProgramInfo, {
u_color: [1, .5, .2, 1],
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, cubeVertexArrayInfo);
}
twgl.bindFramebufferInfo(gl, transparentFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
// these values change if using separate blend functions
// per attachment (something WebGL2 does not support)
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false); // don't write to depth buffer (but still testing)
gl.enable(gl.BLEND);
// this changes if using separate blend functions per attachment
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
gl.bindVertexArray(sphereVertexArrayInfo.vertexArrayObject);
// drawTransparentSurfaces();
const spheres = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
spheres.forEach((color, ndx) => {
const u = ndx + 2;
let mat = view;
mat = m4.rotateX(mat, time * u * .1);
mat = m4.rotateY(mat, time * u * .2);
mat = m4.translate(mat, [0, 0, 1 + ndx * .1]);
twgl.setUniforms(transparentProgramInfo, {
u_color: color,
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, sphereVertexArrayInfo);
});
// composite transparent results with opaque
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.disable(gl.DEPTH_TEST);
gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
gl.useProgram(compositeProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(compositeProgramInfo, {
ATexture: transparentFBI.attachments[0],
BTexture: transparentFBI.attachments[1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
// draw opaque color buffer into canvas
// could probably use gl.blitFramebuffer
gl.disable(gl.BLEND);
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(blitProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(blitProgramInfo, {
u_texture: opaqueFBI.attachments[0],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
我突然想到,与其在最后 2 个步骤中使用标准 OpenGL 混合(复合后跟 blit),不如更改复合着色器,使其采用 3 个纹理(ATexutre、BTexture、opaqueTexture)并在着色器输出中混合直接到canvas。那会更快。
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
layout(location=1) in vec3 normal;
uniform mat4 u_projection;
uniform mat4 u_modelView;
out vec4 v_viewPosition;
out vec3 v_normal;
void main() {
gl_Position = u_projection * u_modelView * position;
v_viewPosition = u_modelView * position;
v_normal = (u_modelView * vec4(normal, 0)).xyz;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const opaqueFS = `
#version 300 es
precision highp float;
in vec4 v_viewPosition;
in vec3 v_normal;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
out vec4 fragColor;
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
fragColor = vec4(u_color.rgb * light, u_color.a);
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
in vec4 v_viewPosition;
in vec3 v_normal;
out vec4 fragData[2];
// eq (7)
float w(float z, float a) {
return a * max(
pow(10.0, -2.0),
min(
3.0 * pow(10.0, 3.0),
10.0 /
(pow(10.0, -5.0) +
pow(abs(z) / 5.0, 2.0) +
pow(abs(z) / 200.0, 6.0)
)
)
);
}
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
vec4 Ci = vec4(u_color.rgb * light, u_color.a);
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
uniform sampler2D opaqueTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
vec4 transparentColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
vec4 opaqueColor = texelFetch(opaqueTexture, ivec2(gl_FragCoord.xy), 0);
// gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
fragColor = transparentColor * (1. - r) + opaqueColor * r;
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const opaqueProgramInfo = twgl.createProgramInfo(gl, [vs, opaqueFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const xyQuadVertexArrayInfo = makeVAO(checkerProgramInfo, twgl.primitives.createXYQuadBufferInfo(gl));
const sphereVertexArrayInfo = makeVAO(transparentProgramInfo, twgl.primitives.createSphereBufferInfo(gl, 1, 16, 12));
const cubeVertexArrayInfo = makeVAO(opaqueProgramInfo, twgl.primitives.createCubeBufferInfo(gl, 1, 1));
function makeVAO(programInfo, bufferInfo) {
return twgl.createVertexArrayInfo(gl, programInfo, bufferInfo);
}
// In order to do proper zbuffering we need to share
// the depth buffer
const opaqueAttachments = [
{ internalFormat: gl.RGBA8, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST },
];
const opaqueFBI = twgl.createFramebufferInfo(gl, opaqueAttachments);
const transparentAttachments = [
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST, attachment: opaqueFBI.attachments[1] },
];
const transparentFBI = twgl.createFramebufferInfo(gl, transparentAttachments);
function render(time) {
time *= 0.001;
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
// if the canvas is resized also resize the framebuffer
// attachments (the depth buffer will be resized twice
// but I'm too lazy to fix it)
twgl.resizeFramebufferInfo(gl, opaqueFBI, opaqueAttachments);
twgl.resizeFramebufferInfo(gl, transparentFBI, transparentAttachments);
}
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fov = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 500;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const eye = [0, 0, -5];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const lightDirection = v3.normalize([1, 3, 5]);
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.depthMask(true);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
// drawOpaqueSurfaces();
// draw checkerboard
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
// draw a cube with depth buffer
gl.enable(gl.DEPTH_TEST);
{
gl.useProgram(opaqueProgramInfo.program);
gl.bindVertexArray(cubeVertexArrayInfo.vertexArrayObject);
let mat = view;
mat = m4.rotateX(mat, time * .1);
mat = m4.rotateY(mat, time * .2);
mat = m4.scale(mat, [1.5, 1.5, 1.5]);
twgl.setUniforms(opaqueProgramInfo, {
u_color: [1, .5, .2, 1],
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, cubeVertexArrayInfo);
}
twgl.bindFramebufferInfo(gl, transparentFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
// these values change if using separate blend functions
// per attachment (something WebGL2 does not support)
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false); // don't write to depth buffer (but still testing)
gl.enable(gl.BLEND);
// this changes if using separate blend functions per attachment
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
gl.bindVertexArray(sphereVertexArrayInfo.vertexArrayObject);
// drawTransparentSurfaces();
const spheres = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
spheres.forEach((color, ndx) => {
const u = ndx + 2;
let mat = view;
mat = m4.rotateX(mat, time * u * .1);
mat = m4.rotateY(mat, time * u * .2);
mat = m4.translate(mat, [0, 0, 1 + ndx * .1]);
twgl.setUniforms(transparentProgramInfo, {
u_color: color,
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, sphereVertexArrayInfo);
});
// composite transparent results with opaque
twgl.bindFramebufferInfo(gl, null);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
gl.useProgram(compositeProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(compositeProgramInfo, {
ATexture: transparentFBI.attachments[0],
BTexture: transparentFBI.attachments[1],
opaqueTexture: opaqueFBI.attachments[0],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
过去两天我一直在网上仔细搜索以了解半透明对象的深度测试。我已经阅读了很多关于这个主题的文章 papers/tutorials,理论上我相信我理解它是如何工作的。但是 none 给了我实际的示例代码。
我对半透明物体的深度测试有三个要求:
它应该与顺序无关。
如果相同对象的两个四边形彼此相交,它应该可以工作。都是半透明的。想象一下从上面看时看起来像 X 的草对象:
- 它应该正确渲染半透明播放器
rgba(0, 1, 0, 0.5)
,在建筑物 windowrgba(0, 0, 1, 0.5)
后面,但在背景对象rgba(1, 0, 0, 1)
前面:
最左边的线是我想象的light/color在穿过半透明物体朝向相机时的变化
最后的想法
我怀疑最好的方法是进行深度剥离,但我仍然缺少一些 implementation/example。我倾向于这种方法,因为游戏是 2.5D 并且因为它可能会对性能造成危险(需要剥离很多层),所以 "peel" 不需要超过两个半透明对象。
我已经熟悉帧缓冲区以及如何对它们进行编码(用它们做一些 post 处理效果)。我会使用它们,对吗?
opengl的大部分知识来自this tutorial,但它分别涵盖了深度测试和半透明。令人遗憾的是,他也根本没有涵盖与顺序无关的透明度(请参阅混合页面底部)。
最后,请不要只理论上回答。例如
Draw opaque, draw transparent, draw opaque again, etc.
我理想的答案将包含缓冲区配置方式的代码、着色器和每个通道的屏幕截图,并解释其作用。
使用的编程语言也不是太重要,只要它使用 OpenGL 4 或更新版本即可。非 opengl 代码可以是伪代码(我不关心你如何排序数组或创建 GLFW window)。
编辑:
我正在更新我的问题以仅提供我的代码当前状态的示例。此示例首先绘制半透明播放器(绿色),其次绘制不透明背景(红色),然后绘制半透明 window(蓝色)。然而,深度应该根据正方形的 Z 位置而不是绘制的顺序来计算。
(function() {
// your page initialization code here
// the DOM will be available here
var script = document.createElement('script');
script.onload = function () {
main();
};
script.src = 'https://mdn.github.io/webgl-examples/tutorial/gl-matrix.js';
document.head.appendChild(script); //or something of the likes
})();
//
// Start here
//
function main() {
const canvas = document.querySelector('#glcanvas');
const gl = canvas.getContext('webgl', {alpha:false});
// If we don't have a GL context, give up now
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
attribute vec4 aVertexColor;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
varying lowp vec4 vColor;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vColor = aVertexColor;
}
`;
// Fragment shader program
const fsSource = `
varying lowp vec4 vColor;
void main(void) {
gl_FragColor = vColor;
}
`;
// Initialize a shader program; this is where all the lighting
// for the vertices and so forth is established.
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// Collect all the info needed to use the shader program.
// Look up which attributes our shader program is using
// for aVertexPosition, aVevrtexColor and also
// look up uniform locations.
const programInfo = {
program: shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'),
vertexColor: gl.getAttribLocation(shaderProgram, 'aVertexColor'),
},
uniformLocations: {
projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'),
modelViewMatrix: gl.getUniformLocation(shaderProgram, 'uModelViewMatrix'),
},
};
// Here's where we call the routine that builds all the
// objects we'll be drawing.
const buffers = initBuffers(gl);
// Draw the scene
drawScene(gl, programInfo, buffers);
}
//
// initBuffers
//
// Initialize the buffers we'll need. For this demo, we just
// have one object -- a simple two-dimensional square.
//
function initBuffers(gl) {
// Create a buffer for the square's positions.
const positionBuffer0 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer0);
// Now create an array of positions for the square.
var positions = [
0.5, 0.5,
-0.5, 0.5,
0.5, -0.5,
-0.5, -0.5,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
var colors = [
0.0, 1.0, 0.0, 0.5, // white
0.0, 1.0, 0.0, 0.5, // red
0.0, 1.0, 0.0, 0.5, // green
0.0, 1.0, 0.0, 0.5, // blue
];
const colorBuffer0 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
// Create a buffer for the square's positions.
const positionBuffer1 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer1);
// Now create an array of positions for the square.
positions = [
2.0, 0.4,
-2.0, 0.4,
2.0, -2.0,
-2.0, -2.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
colors = [
1.0, 0.0, 0.0, 1.0, // white
1.0, 0.0, 0.0, 1.0, // red
1.0, 0.0, 0.0, 1.0, // green
1.0, 0.0, 0.0, 1.0, // blue
];
const colorBuffer1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer1);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
// Create a buffer for the square's positions.
const positionBuffer2 = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer2);
// Now create an array of positions for the square.
positions = [
1.0, 1.0,
-0.0, 1.0,
1.0, -1.0,
-0.0, -1.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the colors for the vertices
colors = [
0.0, 0.0, 1.0, 0.5, // white
0.0, 0.0, 1.0, 0.5, // red
0.0, 0.0, 1.0, 0.5, // green
0.0, 0.0, 1.0, 0.5, // blue
];
const colorBuffer2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer2);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
return {
position0: positionBuffer0,
color0: colorBuffer0,
position1: positionBuffer1,
color1: colorBuffer1,
position2: positionBuffer2,
color2: colorBuffer2,
};
}
//
// Draw the scene.
//
function drawScene(gl, programInfo, buffers) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//gl.clearDepth(1.0); // Clear everything
gl.disable(gl.DEPTH_TEST)
gl.enable(gl.BLEND)
gl.blendEquation(gl.FUNC_ADD)
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA)
// Clear the canvas before we start drawing on it.
// Create a perspective matrix, a special matrix that is
// used to simulate the distortion of perspective in a camera.
// Our field of view is 45 degrees, with a width/height
// ratio that matches the display size of the canvas
// and we only want to see objects between 0.1 units
// and 100 units away from the camera.
const fieldOfView = 45 * Math.PI / 180; // in radians
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
// note: glmatrix.js always has the first argument
// as the destination to receive the result.
mat4.perspective(projectionMatrix,
fieldOfView,
aspect,
zNear,
zFar);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
const modelViewMatrix = mat4.create();
// Now move the drawing position a bit to where we want to
// start drawing the square.
mat4.translate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to translate
[-0.0, 0.0, -6.0]); // amount to translate
function drawSquare(positionbuffer, colorbuffer) {
// Tell WebGL how to pull out the positions from the position
// buffer into the vertexPosition attribute
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, positionbuffer);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexPosition,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexPosition);
}
// Tell WebGL how to pull out the colors from the color buffer
// into the vertexColor attribute.
{
const numComponents = 4;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, colorbuffer);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexColor,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexColor);
}
// Tell WebGL to use our program when drawing
gl.useProgram(programInfo.program);
// Set the shader uniforms
gl.uniformMatrix4fv(
programInfo.uniformLocations.projectionMatrix,
false,
projectionMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.modelViewMatrix,
false,
modelViewMatrix);
{
const offset = 0;
const vertexCount = 4;
gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount);
}
}
drawSquare(buffers.position0, buffers.color0); // Player
drawSquare(buffers.position1, buffers.color1); // Background
drawSquare(buffers.position2, buffers.color2); // Window
}
//
// Initialize a shader program, so WebGL knows how to draw our data
//
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
//
// creates a shader of the given type, uploads the source and
// compiles it.
//
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title></title>
</head>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
</body>
</html>
I have three requirements for my depth testing of semi-transparent objects
实际上很少有自相交对象和部分透明(实际上是混合的)样本。自相交几何的常见情况是草和树叶。然而,在这些情况下,被草和树叶覆盖的实际区域并不透明——它们是不透明的。
此处常见的解决方案是 alpha 测试。将叶子渲染为不透明(未混合)四边形(具有正常深度测试和写入),并丢弃 alpha 不足的片段(例如,因为它们在叶子之外)。因为这里的单个样本是不透明的,所以您可以免费获得顺序独立性,因为深度测试的工作方式与您对不透明对象的预期一样。
如果您想要混合边缘,则启用 alpha-to-coverage 并让多样本解析稍微清理一下边缘。
对于您留下的少量实际透明的东西,通常您需要在 CPU 上进行从后到前的排序,并在不透明通道之后渲染它。
适当的 OIT 是可能的,但这通常是一项相当昂贵的技术,所以我还没有看到有人在学术环境之外实际使用它(至少在移动 OpenGL ES 实现上)。
这似乎是 the paper linked by ripi2 正在做的事情
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 Ci;
out vec4 fragData[2];
float w(float z, float a) {
return a * max(pow(10.0,-2.0),3.0*pow(10.0,3.0)*pow((1.0 - z), 3.));
}
void main() {
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
fragColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const bufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const fbi = twgl.createFramebufferInfo(
gl,
[
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
]);
function render(time) {
time *= 0.001;
twgl.setBuffersAndAttributes(gl, transparentProgramInfo, bufferInfo);
// drawOpaqueSurfaces();
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_matrix: m4.identity(),
});
twgl.drawBufferInfo(gl, bufferInfo);
twgl.bindFramebufferInfo(gl, fbi);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false);
gl.enable(gl.BLEND);
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
// drawTransparentSurfaces();
const quads = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
quads.forEach((color, ndx) => {
const u = ndx / (quads.length - 1);
// change the order every second
const v = ((ndx + time | 0) % quads.length) / (quads.length - 1);
const xy = (u * 2 - 1) * .25;
const z = (v * 2 - 1) * .25;
let mat = m4.identity();
mat = m4.translate(mat, [xy, xy, z]);
mat = m4.scale(mat, [.3, .3, 1]);
twgl.setUniforms(transparentProgramInfo, {
Ci: color,
u_matrix: mat,
});
twgl.drawBufferInfo(gl, bufferInfo);
});
twgl.bindFramebufferInfo(gl, null);
gl.drawBuffers([gl.BACK]);
gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
gl.useProgram(compositeProgramInfo.program);
twgl.setUniforms(compositeProgramInfo, {
ATexture: fbi.attachments[0],
BTexture: fbi.attachments[1],
u_matrix: m4.identity(),
});
twgl.drawBufferInfo(gl, bufferInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
一些注意事项:
- 它使用的是 WebGL2,但它应该可以在 WebGL1 中使用,您必须更改着色器才能使用 GLSL ES 1.0。
- 它使用浮点纹理。该论文提到您也可以使用半浮动纹理。请注意,渲染到半纹理和浮动纹理甚至在 WebGL2 中都是可选功能。我相信大多数移动硬件可以渲染到一半但不能浮动。
- 它使用论文中的权重方程式 10。论文中有4个权重方程。 7、8、9 和 10。要执行 7、8 或 9,您需要将视图 space
z
从顶点着色器传递到片段着色器 - 每秒切换绘制顺序
代码非常简单。
它创建了 3 个着色器。一个是画一个棋盘,这样我们就有一些不透明的东西可以看到上面画的透明的东西。一种是透明对象着色器。最后是着色器,将透明的东西合成到场景中。
接下来制作 2 个纹理,一个浮点 RGBA32F 纹理和一个浮点 R32F 纹理(仅限红色通道)。它将那些附加到帧缓冲区。 (这都是在 1 函数 twgl.createFramebufferInfo
中完成的。默认情况下,该函数使纹理与 canvas 的大小相同。
我们制作一个从 -1 到 +1 的四边形
我们使用那个四边形将棋盘绘制到 canvas
然后我们打开混合,按照论文所述设置混合方程,切换到渲染到我们的帧缓冲区,清除该帧缓冲区。请注意,它分别被清除为 0,0,0,1 和 1。这是我们没有每个绘制缓冲区的单独混合函数的版本。如果您切换到可以为每个绘制缓冲区使用单独的混合函数的版本,那么您需要清除不同的值并使用不同的着色器(参见论文)
使用我们的透明度着色器,我们用同一个四边形绘制 6 个矩形,每个矩形都是纯色。我只是使用纯色来保持简单。每个都在不同的 Z 上,Z 每秒都在变化,只是为了查看结果 Z 的变化。
着色器中Ci
是输入颜色。根据论文,预计它是预乘的 alpha 颜色。 fragData[0]is the "accumulate" texture and
fragData[1]is the "revealage" texture and is only one channel, red. The
w`函数表示论文中的等式10。
绘制完所有 6 个四边形后,我们切换回渲染 canvas 并使用合成着色器将透明结果与非透明 canvas 内容合成。
这是一个带有一些几何图形的示例。差异:
- 它使用论文中的等式 (7) 而不是 (10)
- 为了进行正确的 zbuffering,在进行不透明和透明渲染时需要共享深度缓冲区。所以有 2 个帧缓冲区。一个缓冲区具有 RGBA8 + 深度,另一个是 RGBA32F + R32F + 深度。深度缓冲区是共享的。
- 透明渲染器计算简单光照,然后将结果用作论文中的
Ci
值 - 将透明合成为不透明后,我们还需要将不透明复制到canvas中以查看结果
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
layout(location=1) in vec3 normal;
uniform mat4 u_projection;
uniform mat4 u_modelView;
out vec4 v_viewPosition;
out vec3 v_normal;
void main() {
gl_Position = u_projection * u_modelView * position;
v_viewPosition = u_modelView * position;
v_normal = (u_modelView * vec4(normal, 0)).xyz;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const opaqueFS = `
#version 300 es
precision highp float;
in vec4 v_viewPosition;
in vec3 v_normal;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
out vec4 fragColor;
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
fragColor = vec4(u_color.rgb * light, u_color.a);
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
in vec4 v_viewPosition;
in vec3 v_normal;
out vec4 fragData[2];
// eq (7)
float w(float z, float a) {
return a * max(
pow(10.0, -2.0),
min(
3.0 * pow(10.0, 3.0),
10.0 /
(pow(10.0, -5.0) +
pow(abs(z) / 5.0, 2.0) +
pow(abs(z) / 200.0, 6.0)
)
)
);
}
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
vec4 Ci = vec4(u_color.rgb * light, u_color.a);
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
fragColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
}
`;
const blitFS = `
#version 300 es
precision highp float;
uniform sampler2D u_texture;
out vec4 fragColor;
void main() {
fragColor = texelFetch(u_texture, ivec2(gl_FragCoord.xy), 0);
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const opaqueProgramInfo = twgl.createProgramInfo(gl, [vs, opaqueFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const blitProgramInfo = twgl.createProgramInfo(gl, [vs, blitFS]);
const xyQuadVertexArrayInfo = makeVAO(checkerProgramInfo, twgl.primitives.createXYQuadBufferInfo(gl));
const sphereVertexArrayInfo = makeVAO(transparentProgramInfo, twgl.primitives.createSphereBufferInfo(gl, 1, 16, 12));
const cubeVertexArrayInfo = makeVAO(opaqueProgramInfo, twgl.primitives.createCubeBufferInfo(gl, 1, 1));
function makeVAO(programInfo, bufferInfo) {
return twgl.createVertexArrayInfo(gl, programInfo, bufferInfo);
}
// In order to do proper zbuffering we need to share
// the depth buffer
const opaqueAttachments = [
{ internalFormat: gl.RGBA8, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST },
];
const opaqueFBI = twgl.createFramebufferInfo(gl, opaqueAttachments);
const transparentAttachments = [
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST, attachment: opaqueFBI.attachments[1] },
];
const transparentFBI = twgl.createFramebufferInfo(gl, transparentAttachments);
function render(time) {
time *= 0.001;
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
// if the canvas is resized also resize the framebuffer
// attachments (the depth buffer will be resized twice
// but I'm too lazy to fix it)
twgl.resizeFramebufferInfo(gl, opaqueFBI, opaqueAttachments);
twgl.resizeFramebufferInfo(gl, transparentFBI, transparentAttachments);
}
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fov = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 500;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const eye = [0, 0, -5];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const lightDirection = v3.normalize([1, 3, 5]);
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.depthMask(true);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
// drawOpaqueSurfaces();
// draw checkerboard
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
// draw a cube with depth buffer
gl.enable(gl.DEPTH_TEST);
{
gl.useProgram(opaqueProgramInfo.program);
gl.bindVertexArray(cubeVertexArrayInfo.vertexArrayObject);
let mat = view;
mat = m4.rotateX(mat, time * .1);
mat = m4.rotateY(mat, time * .2);
mat = m4.scale(mat, [1.5, 1.5, 1.5]);
twgl.setUniforms(opaqueProgramInfo, {
u_color: [1, .5, .2, 1],
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, cubeVertexArrayInfo);
}
twgl.bindFramebufferInfo(gl, transparentFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
// these values change if using separate blend functions
// per attachment (something WebGL2 does not support)
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false); // don't write to depth buffer (but still testing)
gl.enable(gl.BLEND);
// this changes if using separate blend functions per attachment
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
gl.bindVertexArray(sphereVertexArrayInfo.vertexArrayObject);
// drawTransparentSurfaces();
const spheres = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
spheres.forEach((color, ndx) => {
const u = ndx + 2;
let mat = view;
mat = m4.rotateX(mat, time * u * .1);
mat = m4.rotateY(mat, time * u * .2);
mat = m4.translate(mat, [0, 0, 1 + ndx * .1]);
twgl.setUniforms(transparentProgramInfo, {
u_color: color,
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, sphereVertexArrayInfo);
});
// composite transparent results with opaque
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.disable(gl.DEPTH_TEST);
gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
gl.useProgram(compositeProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(compositeProgramInfo, {
ATexture: transparentFBI.attachments[0],
BTexture: transparentFBI.attachments[1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
// draw opaque color buffer into canvas
// could probably use gl.blitFramebuffer
gl.disable(gl.BLEND);
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(blitProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(blitProgramInfo, {
u_texture: opaqueFBI.attachments[0],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
我突然想到,与其在最后 2 个步骤中使用标准 OpenGL 混合(复合后跟 blit),不如更改复合着色器,使其采用 3 个纹理(ATexutre、BTexture、opaqueTexture)并在着色器输出中混合直接到canvas。那会更快。
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl2', {alpha: false});
if (!gl) {
alert('need WebGL2');
return;
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext) {
alert('EXT_color_buffer_float');
return;
}
const vs = `
#version 300 es
layout(location=0) in vec4 position;
layout(location=1) in vec3 normal;
uniform mat4 u_projection;
uniform mat4 u_modelView;
out vec4 v_viewPosition;
out vec3 v_normal;
void main() {
gl_Position = u_projection * u_modelView * position;
v_viewPosition = u_modelView * position;
v_normal = (u_modelView * vec4(normal, 0)).xyz;
}
`;
const checkerFS = `
#version 300 es
precision highp float;
uniform vec4 color1;
uniform vec4 color2;
out vec4 fragColor;
void main() {
ivec2 grid = ivec2(gl_FragCoord.xy) / 32;
fragColor = mix(color1, color2, float((grid.x + grid.y) % 2));
}
`;
const opaqueFS = `
#version 300 es
precision highp float;
in vec4 v_viewPosition;
in vec3 v_normal;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
out vec4 fragColor;
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
fragColor = vec4(u_color.rgb * light, u_color.a);
}
`;
const transparentFS = `
#version 300 es
precision highp float;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
in vec4 v_viewPosition;
in vec3 v_normal;
out vec4 fragData[2];
// eq (7)
float w(float z, float a) {
return a * max(
pow(10.0, -2.0),
min(
3.0 * pow(10.0, 3.0),
10.0 /
(pow(10.0, -5.0) +
pow(abs(z) / 5.0, 2.0) +
pow(abs(z) / 200.0, 6.0)
)
)
);
}
void main() {
float light = abs(dot(normalize(v_normal), u_lightDirection));
vec4 Ci = vec4(u_color.rgb * light, u_color.a);
float ai = Ci.a;
float zi = gl_FragCoord.z;
float wresult = w(zi, ai);
fragData[0] = vec4(Ci.rgb * wresult, ai);
fragData[1].r = ai * wresult;
}
`;
const compositeFS = `
#version 300 es
precision highp float;
uniform sampler2D ATexture;
uniform sampler2D BTexture;
uniform sampler2D opaqueTexture;
out vec4 fragColor;
void main() {
vec4 accum = texelFetch(ATexture, ivec2(gl_FragCoord.xy), 0);
float r = accum.a;
accum.a = texelFetch(BTexture, ivec2(gl_FragCoord.xy), 0).r;
vec4 transparentColor = vec4(accum.rgb / clamp(accum.a, 1e-4, 5e4), r);
vec4 opaqueColor = texelFetch(opaqueTexture, ivec2(gl_FragCoord.xy), 0);
// gl.blendFunc(gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA);
fragColor = transparentColor * (1. - r) + opaqueColor * r;
}
`;
const checkerProgramInfo = twgl.createProgramInfo(gl, [vs, checkerFS]);
const opaqueProgramInfo = twgl.createProgramInfo(gl, [vs, opaqueFS]);
const transparentProgramInfo = twgl.createProgramInfo(gl, [vs, transparentFS]);
const compositeProgramInfo = twgl.createProgramInfo(gl, [vs, compositeFS]);
const xyQuadVertexArrayInfo = makeVAO(checkerProgramInfo, twgl.primitives.createXYQuadBufferInfo(gl));
const sphereVertexArrayInfo = makeVAO(transparentProgramInfo, twgl.primitives.createSphereBufferInfo(gl, 1, 16, 12));
const cubeVertexArrayInfo = makeVAO(opaqueProgramInfo, twgl.primitives.createCubeBufferInfo(gl, 1, 1));
function makeVAO(programInfo, bufferInfo) {
return twgl.createVertexArrayInfo(gl, programInfo, bufferInfo);
}
// In order to do proper zbuffering we need to share
// the depth buffer
const opaqueAttachments = [
{ internalFormat: gl.RGBA8, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST },
];
const opaqueFBI = twgl.createFramebufferInfo(gl, opaqueAttachments);
const transparentAttachments = [
{ internalFormat: gl.RGBA32F, minMag: gl.NEAREST },
{ internalFormat: gl.R32F, minMag: gl.NEAREST },
{ format: gl.DEPTH_COMPONENT16, minMag: gl.NEAREST, attachment: opaqueFBI.attachments[1] },
];
const transparentFBI = twgl.createFramebufferInfo(gl, transparentAttachments);
function render(time) {
time *= 0.001;
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
// if the canvas is resized also resize the framebuffer
// attachments (the depth buffer will be resized twice
// but I'm too lazy to fix it)
twgl.resizeFramebufferInfo(gl, opaqueFBI, opaqueAttachments);
twgl.resizeFramebufferInfo(gl, transparentFBI, transparentAttachments);
}
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fov = 45 * Math.PI / 180;
const zNear = 0.1;
const zFar = 500;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const eye = [0, 0, -5];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const lightDirection = v3.normalize([1, 3, 5]);
twgl.bindFramebufferInfo(gl, opaqueFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0]);
gl.depthMask(true);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
// drawOpaqueSurfaces();
// draw checkerboard
gl.useProgram(checkerProgramInfo.program);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
twgl.setUniforms(checkerProgramInfo, {
color1: [.5, .5, .5, 1],
color2: [.7, .7, .7, 1],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
// draw a cube with depth buffer
gl.enable(gl.DEPTH_TEST);
{
gl.useProgram(opaqueProgramInfo.program);
gl.bindVertexArray(cubeVertexArrayInfo.vertexArrayObject);
let mat = view;
mat = m4.rotateX(mat, time * .1);
mat = m4.rotateY(mat, time * .2);
mat = m4.scale(mat, [1.5, 1.5, 1.5]);
twgl.setUniforms(opaqueProgramInfo, {
u_color: [1, .5, .2, 1],
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, cubeVertexArrayInfo);
}
twgl.bindFramebufferInfo(gl, transparentFBI);
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1]);
// these values change if using separate blend functions
// per attachment (something WebGL2 does not support)
gl.clearBufferfv(gl.COLOR, 0, new Float32Array([0, 0, 0, 1]));
gl.clearBufferfv(gl.COLOR, 1, new Float32Array([1, 1, 1, 1]));
gl.depthMask(false); // don't write to depth buffer (but still testing)
gl.enable(gl.BLEND);
// this changes if using separate blend functions per attachment
gl.blendFuncSeparate(gl.ONE, gl.ONE, gl.ZERO, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(transparentProgramInfo.program);
gl.bindVertexArray(sphereVertexArrayInfo.vertexArrayObject);
// drawTransparentSurfaces();
const spheres = [
[ .4, 0, 0, .4],
[ .4, .4, 0, .4],
[ 0, .4, 0, .4],
[ 0, .4, .4, .4],
[ 0, .0, .4, .4],
[ .4, .0, .4, .4],
];
spheres.forEach((color, ndx) => {
const u = ndx + 2;
let mat = view;
mat = m4.rotateX(mat, time * u * .1);
mat = m4.rotateY(mat, time * u * .2);
mat = m4.translate(mat, [0, 0, 1 + ndx * .1]);
twgl.setUniforms(transparentProgramInfo, {
u_color: color,
u_lightDirection: lightDirection,
u_projection: projection,
u_modelView: mat,
});
twgl.drawBufferInfo(gl, sphereVertexArrayInfo);
});
// composite transparent results with opaque
twgl.bindFramebufferInfo(gl, null);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
gl.useProgram(compositeProgramInfo.program);
gl.bindVertexArray(xyQuadVertexArrayInfo.vertexArrayObject);
twgl.setUniforms(compositeProgramInfo, {
ATexture: transparentFBI.attachments[0],
BTexture: transparentFBI.attachments[1],
opaqueTexture: opaqueFBI.attachments[0],
u_projection: m4.identity(),
u_modelView: m4.identity(),
});
twgl.drawBufferInfo(gl, xyQuadVertexArrayInfo);
/* only needed if {alpha: false} not passed into getContext
gl.colorMask(false, false, false, true);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.colorMask(true, true, true, true);
*/
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>