如何从 webGL 上下文 readPixels() API 中只读取单通道(R 组件)?
How to read only single channel(R component) from webGL context readPixels() API?
我已经使用 webgl 将 RGBA 图像转换为灰度图像。
当使用 gl.readPixels()
和 gl.RGBA
格式读取像素时,获取每个像素的值作为 YYYA,因为 RGBA 像素被转换为 YYYA 并分配给 gl_FragColor
。我只希望每个像素有 1 个字节的 Y 分量,而不是 4 个字节。
尝试读取 gl.RED
格式的像素(而不是 gl.RGBA
)
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
但在 Chrome 上出现以下错误并且只得到零。
WebGL: INVALID_ENUM: readPixels: invalid format
- 是否可以让
gl_FragColor
在 LUMINANCE 模式下每个像素输出 1 个字节,而不是 RGBA,但输入纹理必须是 RGBA?
- 如果gl渲染的格式不能改变,调用时是否可以只读取每个4字节像素的第一个字节
gl.readPixels()
?
注意:
3. 我已经通过每 4 个字节跳转将 gl.readPixels()
输出复制到另一个数组。但我想避免使用此副本,因为它需要更多时间。
4. 此外,我需要解决方案与移动浏览器(ios safari 和 android chrome)兼容。
function webGL() {
var gTexture;
var gFramebuffer;
var srcCanvas = null;
var programs = {};
var program;
var pixels;
this.convertRGBA2Gray = function(inCanvas, inArray) {
// Y component from YCbCr
const shaderSourceRGB2GRAY = `
precision mediump float;
uniform sampler2D u_image;
uniform vec2 u_textureSize;
vec4 scale = vec4(0.299, 0.587, 0.114, 0.0);
void main() {
vec4 color = texture2D(u_image, gl_FragCoord.xy / u_textureSize);
gl_FragColor = vec4(vec3(dot(color,scale)), color.a);
}`;
if (srcCanvas === null) {
console.log('Setting up webgl');
srcCanvas = inCanvas;
_initialize(srcCanvas.width, srcCanvas.height);
program = _createProgram("rgb2grey", shaderSourceRGB2GRAY);
}
pixels = inArray;
_run(program);
}
///////////////////////////////////////
// Private functions
var _getWebGLContext = function(canvas) {
try {
return (canvas.getContext("webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}) || canvas.getContext("experimental-webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}));
}
catch(e) {
console.log("ERROR: %o", e);
}
return null;
}
var gl = _getWebGLContext(document.createElement('canvas'));
var _initialize = function(width, height) {
var canvas = gl.canvas;
canvas.width = width;
canvas.height = height;
if (this.originalImageTexture) {
return;
}
this.originalImageTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, gTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, null);
gFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, -1.0,
-1.0, -1.0
]), gl.STATIC_DRAW);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, gTexture, 0);
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, srcCanvas);
}
var _createProgram = function(name, fragmentSource, vertexSource) {
shaderProgram = programs[name];
if (shaderProgram){
console.log('Reusing program');
gl.useProgram(shaderProgram);
return shaderProgram;
}
function createShader(type, source){
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
return shader;
}
var vertexShader, fragmentShader;
if (!vertexSource){
vertexShader = createShader(gl.VERTEX_SHADER, `attribute vec2 a_position;
void main() { gl_Position = vec4(a_position, 0.0, 1.0); }`
);
} else {
vertexShader = createShader(gl.VERTEX_SHADER, vertexSource);
}
fragmentShader = createShader(gl.FRAGMENT_SHADER, fragmentSource);
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
return shaderProgram;
}
var _render = function(gl, program){
var positionLocation = gl.getAttribLocation(program, "a_position");
var u_imageLoc = gl.getUniformLocation(program, "u_image");
var textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(positionLocation);
var width = gl.canvas.width,
height = gl.canvas.height;
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
gl.uniform2f(textureSizeLocation, width, height);
gl.uniform1i(u_imageLoc, 0);
gl.viewport(0, 0, width, height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
var _run = function(program){
let t0 = performance.now();
_render(gl, program);
gl.bindTexture(gl.TEXTURE_2D, gTexture);
let t1 = performance.now();
// gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
let t2 = performance.now();
console.log('_render dur = ' + Number((t1-t0).toFixed(3)) + ' ms');
console.log('_run dur = ' + Number((t2-t0).toFixed(3)) + ' ms');
}
};
<div>
<canvas id="source"></canvas>
</div>
<script src="webgl.js" type="text/javascript"></script>
<script>
window.addEventListener('load', function(e) {
var srcImg = new Image();
srcImg.crossOrigin = "anonymous";
srcImg.src = "https://i.picsum.photos/id/17/480/480.jpg";
srcImg.width = 480;
srcImg.height = 480;
srcImg.onload = function(){
// image has been loaded
let srcCanvas = document.getElementById("source");
srcCanvas.width = srcImg.width;
srcCanvas.height = srcImg.height;
let ctx = srcCanvas.getContext('2d');
ctx.drawImage(srcImg, 0, 0, srcImg.width, srcImg.height);
var webgl = new webGL();
let pixels = new Uint8Array(srcCanvas.width * srcCanvas.height * 4);
webgl.convertRGBA2Gray(srcCanvas, pixels);
var outData = ctx.createImageData(srcCanvas.width, srcCanvas.height);
console.log('\n');
for (let k = 0; k < 12; ++k) {
console.log(pixels[k] + ', ');
}
console.log('\n');
// Luminance plot
for (let i = 0, j = 0; i < (srcCanvas.width * srcCanvas.height * 4); i+=4, ++j ) {
outData.data[i] = outData.data[i+1] = outData.data[i+2] = pixels[j];
outData.data[i+3] = 255;
}
// RGB plot
// for ( let i = 0; i < (srcCanvas.width * srcCanvas.height * 4); ++i ) {
// outData.data[i] = pixels[i];
// }
srcCanvas.getContext('2d').putImageData(outData, 0, 0);
};
}, true);
</script>
Is it possible to make gl_FragColor to output 1 byte per pixel in LUMINANCE mode, instead of RGBA, but the input texture has to be RGBA?
不可移植。 WebGL1 的规范说只需要 gl.RGBA / gl.UNSIGNED_BYTE 支持渲染到纹理。所有其他格式都是可选的。
If the format of gl rendering cannot be changed, is it possible to read only the first byte of each 4 bytes pixel, when calling gl.readPixels()?
不,The spec 4.3.1 节说只支持 gl.RGBA
,gl.UNSIGNED_BYTE
。所有其他格式都是可选的,取决于实现。这在 WebGL2 上也是一样的。即使您制作了一个 R8 纹理(仅红色,8 位),如果您可以将其读取为 gl.RED
/gl.UNSIGNED_BYTE
.
,则取决于实现
我已经使用 webgl 将 RGBA 图像转换为灰度图像。
当使用 gl.readPixels()
和 gl.RGBA
格式读取像素时,获取每个像素的值作为 YYYA,因为 RGBA 像素被转换为 YYYA 并分配给 gl_FragColor
。我只希望每个像素有 1 个字节的 Y 分量,而不是 4 个字节。
尝试读取 gl.RED
格式的像素(而不是 gl.RGBA
)
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
但在 Chrome 上出现以下错误并且只得到零。
WebGL: INVALID_ENUM: readPixels: invalid format
- 是否可以让
gl_FragColor
在 LUMINANCE 模式下每个像素输出 1 个字节,而不是 RGBA,但输入纹理必须是 RGBA? - 如果gl渲染的格式不能改变,调用时是否可以只读取每个4字节像素的第一个字节
gl.readPixels()
?
注意:
3. 我已经通过每 4 个字节跳转将 gl.readPixels()
输出复制到另一个数组。但我想避免使用此副本,因为它需要更多时间。
4. 此外,我需要解决方案与移动浏览器(ios safari 和 android chrome)兼容。
function webGL() {
var gTexture;
var gFramebuffer;
var srcCanvas = null;
var programs = {};
var program;
var pixels;
this.convertRGBA2Gray = function(inCanvas, inArray) {
// Y component from YCbCr
const shaderSourceRGB2GRAY = `
precision mediump float;
uniform sampler2D u_image;
uniform vec2 u_textureSize;
vec4 scale = vec4(0.299, 0.587, 0.114, 0.0);
void main() {
vec4 color = texture2D(u_image, gl_FragCoord.xy / u_textureSize);
gl_FragColor = vec4(vec3(dot(color,scale)), color.a);
}`;
if (srcCanvas === null) {
console.log('Setting up webgl');
srcCanvas = inCanvas;
_initialize(srcCanvas.width, srcCanvas.height);
program = _createProgram("rgb2grey", shaderSourceRGB2GRAY);
}
pixels = inArray;
_run(program);
}
///////////////////////////////////////
// Private functions
var _getWebGLContext = function(canvas) {
try {
return (canvas.getContext("webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}) || canvas.getContext("experimental-webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}));
}
catch(e) {
console.log("ERROR: %o", e);
}
return null;
}
var gl = _getWebGLContext(document.createElement('canvas'));
var _initialize = function(width, height) {
var canvas = gl.canvas;
canvas.width = width;
canvas.height = height;
if (this.originalImageTexture) {
return;
}
this.originalImageTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, gTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, null);
gFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, -1.0,
-1.0, -1.0
]), gl.STATIC_DRAW);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, gTexture, 0);
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, srcCanvas);
}
var _createProgram = function(name, fragmentSource, vertexSource) {
shaderProgram = programs[name];
if (shaderProgram){
console.log('Reusing program');
gl.useProgram(shaderProgram);
return shaderProgram;
}
function createShader(type, source){
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
return shader;
}
var vertexShader, fragmentShader;
if (!vertexSource){
vertexShader = createShader(gl.VERTEX_SHADER, `attribute vec2 a_position;
void main() { gl_Position = vec4(a_position, 0.0, 1.0); }`
);
} else {
vertexShader = createShader(gl.VERTEX_SHADER, vertexSource);
}
fragmentShader = createShader(gl.FRAGMENT_SHADER, fragmentSource);
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
return shaderProgram;
}
var _render = function(gl, program){
var positionLocation = gl.getAttribLocation(program, "a_position");
var u_imageLoc = gl.getUniformLocation(program, "u_image");
var textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(positionLocation);
var width = gl.canvas.width,
height = gl.canvas.height;
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
gl.uniform2f(textureSizeLocation, width, height);
gl.uniform1i(u_imageLoc, 0);
gl.viewport(0, 0, width, height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
var _run = function(program){
let t0 = performance.now();
_render(gl, program);
gl.bindTexture(gl.TEXTURE_2D, gTexture);
let t1 = performance.now();
// gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
let t2 = performance.now();
console.log('_render dur = ' + Number((t1-t0).toFixed(3)) + ' ms');
console.log('_run dur = ' + Number((t2-t0).toFixed(3)) + ' ms');
}
};
<div>
<canvas id="source"></canvas>
</div>
<script src="webgl.js" type="text/javascript"></script>
<script>
window.addEventListener('load', function(e) {
var srcImg = new Image();
srcImg.crossOrigin = "anonymous";
srcImg.src = "https://i.picsum.photos/id/17/480/480.jpg";
srcImg.width = 480;
srcImg.height = 480;
srcImg.onload = function(){
// image has been loaded
let srcCanvas = document.getElementById("source");
srcCanvas.width = srcImg.width;
srcCanvas.height = srcImg.height;
let ctx = srcCanvas.getContext('2d');
ctx.drawImage(srcImg, 0, 0, srcImg.width, srcImg.height);
var webgl = new webGL();
let pixels = new Uint8Array(srcCanvas.width * srcCanvas.height * 4);
webgl.convertRGBA2Gray(srcCanvas, pixels);
var outData = ctx.createImageData(srcCanvas.width, srcCanvas.height);
console.log('\n');
for (let k = 0; k < 12; ++k) {
console.log(pixels[k] + ', ');
}
console.log('\n');
// Luminance plot
for (let i = 0, j = 0; i < (srcCanvas.width * srcCanvas.height * 4); i+=4, ++j ) {
outData.data[i] = outData.data[i+1] = outData.data[i+2] = pixels[j];
outData.data[i+3] = 255;
}
// RGB plot
// for ( let i = 0; i < (srcCanvas.width * srcCanvas.height * 4); ++i ) {
// outData.data[i] = pixels[i];
// }
srcCanvas.getContext('2d').putImageData(outData, 0, 0);
};
}, true);
</script>
Is it possible to make gl_FragColor to output 1 byte per pixel in LUMINANCE mode, instead of RGBA, but the input texture has to be RGBA?
不可移植。 WebGL1 的规范说只需要 gl.RGBA / gl.UNSIGNED_BYTE 支持渲染到纹理。所有其他格式都是可选的。
If the format of gl rendering cannot be changed, is it possible to read only the first byte of each 4 bytes pixel, when calling gl.readPixels()?
不,The spec 4.3.1 节说只支持 gl.RGBA
,gl.UNSIGNED_BYTE
。所有其他格式都是可选的,取决于实现。这在 WebGL2 上也是一样的。即使您制作了一个 R8 纹理(仅红色,8 位),如果您可以将其读取为 gl.RED
/gl.UNSIGNED_BYTE
.