无法将图像映射到点 material 缓冲区几何 Three.JS
Trouble mapping image to a point material buffer geometry Three.JS
我一直在使用 three.js 创建一个项目,该项目创建一个点 material 对象,然后可以与之交互。我正在尝试将图像的像素映射到缓冲区几何体的顶点,以便将图像显示为一组点(类似点云)。有问题的图像是地球地图(缩小到 106 x 53 像素)
我通过在 canvas 上绘制图像、提取图像数据、设置图像数据的颜色(基于像素坐标)然后设置几何体的颜色属性(在这种情况下是一个球体缓冲区几何体)。我的问题是映射哪里出错了?
这是提取颜色并将它们放置在几何数组中的代码:
let colors = [];
let color = new THREE.Color();
let positionAttribute = g.attributes.position; //g being geometry
for (let x = 0; x < img.width; x++) {
for (let y = 0; y < img.height; y++) {
let c = earthCanvas.getContext("2d");
let p = c.getImageData(x, y, 1, 1).data;
let hex = "#" + ("000000" + rgbToHex(p[0], p[1], p[2])).slice(-6);
color.set(hex);
console.log("set");
colors.push(color.r, color.g, color.b);
}
}
g.setAttribute("color", new THREE.Float32BufferAttribute(colors, 3));
导致这种情况的发生:
有什么方法可以使它看起来像地球一样吗?我只是把像素的坐标放错了吗?
几何本身的代码如下所示:
g = new THREE.SphereBufferGeometry(3,104,52);
count = g.attributes.position.count;
console.log(count);
g.center();
let pointShape = new THREE.TextureLoader().load("./models/particle.png");
m = new THREE.PointsMaterial({
size: pointSize,
map: pointShape,
vertexColors: true,
//blending: THREE.AdditiveBlending,
depthTest: false,
opacity: 1
});
html 和 canvas 的 JS 如下所示:
function drawImageSource(source, canvas) {
img = new Image();
img.addEventListener("load", function () {
// The image can be drawn from any source
canvas
.getContext("2d")
.drawImage(
img,
0,
0,
img.width,
img.height,
0,
0,
canvas.width,
canvas.height
);
});
img.crossOrigin = "Anonymous";
img.setAttribute("src", source);
}
<div id="canvasDiv">
<canvas id="earthCanvas", width="106", height="53" hidden />
</body>
任何帮助将不胜感激,谢谢!
代码沙箱项目的link在这里:
https://codesandbox.io/s/small-hill-mvhgc?file=/src/index.js:5956-6389
(我为混乱的代码道歉它是一个原型)
我会采用不同的方法:修改现有 PointsMaterial
,使用 .onBeforeCompile()
,并以统一的方式传递纹理。
body{
overflow:hidden;
margin:0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/three@0.136.0";
import {OrbitControls} from "https://cdn.skypack.dev/three@0.136.0/examples/jsm/controls/OrbitControls";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 1000);
camera.position.set(0, 0, 10);
let renderer = new THREE.WebGLRenderer();
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
let controls = new OrbitControls(camera, renderer.domElement);
let g = new THREE.SphereGeometry(4, 360, 180);
let m = new THREE.PointsMaterial({
size: 0.05,
onBeforeCompile: shader => {
shader.uniforms.tex = {value: new THREE.TextureLoader().load("https://threejs.org/examples/textures/uv_grid_opengl.jpg")};
shader.vertexShader = `
varying vec2 vUv;
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
vUv = uv;
`
);
//console.log(shader.vertexShader);
shader.fragmentShader = `
uniform sampler2D tex;
varying vec2 vUv;
${shader.fragmentShader}
`.replace(
`vec4 diffuseColor = vec4( diffuse, opacity );`,
`
vec3 col = texture2D(tex, vUv).rgb;
col *= diffuse;
vec4 diffuseColor = vec4( col, opacity );`
);
//console.log(shader.fragmentShader);
}
});
let p = new THREE.Points(g, m);
scene.add(p);
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
</script>
我一直在使用 three.js 创建一个项目,该项目创建一个点 material 对象,然后可以与之交互。我正在尝试将图像的像素映射到缓冲区几何体的顶点,以便将图像显示为一组点(类似点云)。有问题的图像是地球地图(缩小到 106 x 53 像素)
我通过在 canvas 上绘制图像、提取图像数据、设置图像数据的颜色(基于像素坐标)然后设置几何体的颜色属性(在这种情况下是一个球体缓冲区几何体)。我的问题是映射哪里出错了?
这是提取颜色并将它们放置在几何数组中的代码:
let colors = [];
let color = new THREE.Color();
let positionAttribute = g.attributes.position; //g being geometry
for (let x = 0; x < img.width; x++) {
for (let y = 0; y < img.height; y++) {
let c = earthCanvas.getContext("2d");
let p = c.getImageData(x, y, 1, 1).data;
let hex = "#" + ("000000" + rgbToHex(p[0], p[1], p[2])).slice(-6);
color.set(hex);
console.log("set");
colors.push(color.r, color.g, color.b);
}
}
g.setAttribute("color", new THREE.Float32BufferAttribute(colors, 3));
导致这种情况的发生:
有什么方法可以使它看起来像地球一样吗?我只是把像素的坐标放错了吗?
几何本身的代码如下所示:
g = new THREE.SphereBufferGeometry(3,104,52);
count = g.attributes.position.count;
console.log(count);
g.center();
let pointShape = new THREE.TextureLoader().load("./models/particle.png");
m = new THREE.PointsMaterial({
size: pointSize,
map: pointShape,
vertexColors: true,
//blending: THREE.AdditiveBlending,
depthTest: false,
opacity: 1
});
html 和 canvas 的 JS 如下所示:
function drawImageSource(source, canvas) {
img = new Image();
img.addEventListener("load", function () {
// The image can be drawn from any source
canvas
.getContext("2d")
.drawImage(
img,
0,
0,
img.width,
img.height,
0,
0,
canvas.width,
canvas.height
);
});
img.crossOrigin = "Anonymous";
img.setAttribute("src", source);
}
<div id="canvasDiv">
<canvas id="earthCanvas", width="106", height="53" hidden />
</body>
任何帮助将不胜感激,谢谢!
代码沙箱项目的link在这里: https://codesandbox.io/s/small-hill-mvhgc?file=/src/index.js:5956-6389 (我为混乱的代码道歉它是一个原型)
我会采用不同的方法:修改现有 PointsMaterial
,使用 .onBeforeCompile()
,并以统一的方式传递纹理。
body{
overflow:hidden;
margin:0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/three@0.136.0";
import {OrbitControls} from "https://cdn.skypack.dev/three@0.136.0/examples/jsm/controls/OrbitControls";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 1000);
camera.position.set(0, 0, 10);
let renderer = new THREE.WebGLRenderer();
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
let controls = new OrbitControls(camera, renderer.domElement);
let g = new THREE.SphereGeometry(4, 360, 180);
let m = new THREE.PointsMaterial({
size: 0.05,
onBeforeCompile: shader => {
shader.uniforms.tex = {value: new THREE.TextureLoader().load("https://threejs.org/examples/textures/uv_grid_opengl.jpg")};
shader.vertexShader = `
varying vec2 vUv;
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
vUv = uv;
`
);
//console.log(shader.vertexShader);
shader.fragmentShader = `
uniform sampler2D tex;
varying vec2 vUv;
${shader.fragmentShader}
`.replace(
`vec4 diffuseColor = vec4( diffuse, opacity );`,
`
vec3 col = texture2D(tex, vUv).rgb;
col *= diffuse;
vec4 diffuseColor = vec4( col, opacity );`
);
//console.log(shader.fragmentShader);
}
});
let p = new THREE.Points(g, m);
scene.add(p);
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
</script>