360视频播放器世界坐标
360 video player world coordinte
我使用 three.js 创建了一个 360 视频播放器。每当我单击鼠标时,我都会使用光线投射来查找 3D 世界坐标,并且我成功地获得了这些坐标。
有没有办法将这些 3D 世界坐标映射到展开的(全景、等矩形)视频坐标。我想到了将 3D 世界坐标转换为屏幕坐标,但是那些坐标映射并不对应于实际的 3D 点。任何帮助将不胜感激。
这是我使用的代码:
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r73/three.min.js"></script>
<script src="https://s3-us-west-2.amazonaws.com/s.cdpn.io/211120/orbitControls.js"></script>
<a href="#" id="testButton">TEST</a>
<!-- <a href="#" id="testButton2">TEST2</a> -->
<script>
var width = window.innerWidth;
var height = window.innerHeight;
var renderer = new THREE.WebGLRenderer({ antialias: true });
var projector = new THREE.Projector();
renderer.setSize(width, height);
document.body.appendChild(renderer.domElement);
var video = document.createElement('video');
video.autoplay = true;
video.src = 'https://streams.kolor.com/streams/833ec36d-b115-43a2-bbf1-aaca49046bab/source.02-720p_HD.mp4';
video.crossOrigin = '';
videoTexture = new THREE.Texture(video);
videoTexture.minFilter = THREE.LinearFilter;
videoTexture.magFilter = THREE.LinearFilter;
videoTexture.format = THREE.RGBFormat;
var scene = new THREE.Scene();
var cubeGeometry = new THREE.SphereGeometry(500, 60, 40);
var sphereMat = new THREE.MeshBasicMaterial({map: videoTexture});
sphereMat.side = THREE.BackSide;
var cube = new THREE.Mesh(cubeGeometry, sphereMat);
scene.add(cube);
var camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 10000);
camera.position.set(0,0,50);
scene.add(camera);
var controls = new THREE.OrbitControls( camera);
controls.rotateUp(Math.PI / 4);
controls.target.set(
camera.position.x + 0.1,
camera.position.y,
camera.position.z
);
//console.log('controls', controls);
//create controls
function onMouseMove( event ) {
// calculate mouse position in normalized device coordinates
// (-1 to +1) for both components
var vector = new THREE.Vector3( ( event.clientX / window.innerWidth ) * 2 - 1, - ( event.clientY / window.innerHeight ) * 2 + 1, 0.5 );
projector.unprojectVector( vector, camera);
var raycaster = new THREE.Raycaster( camera.position, vector.sub( camera.position ).normalize() );
var intersects = raycaster.intersectObjects(scene.children, true);
console.log(intersects[0].point);
var width = window.innerWidth, height = window.innerHeight;
var widthHalf = width / 2, heightHalf = height / 2;
/*camera.updateMatrixWorld(true);
scene.updateMatrixWorld();
var vector1 = new THREE.Vector3();
vector.setFromMatrixPosition(intersects[0].object.matrixWorld);
vector.project(camera);*/
var p = new THREE.Vector3(intersects[0].point.x,intersects[0].point.y,intersects[0].point.z);
var vector1 = p.project(camera);
vector1.x = ( vector1.x * widthHalf ) + widthHalf;
vector1.y = - ( vector1.y * heightHalf ) + heightHalf;
console.log(vector1);
}
function render() {
if( video.readyState === video.HAVE_ENOUGH_DATA ){
videoTexture.needsUpdate = true;
}
controls.update();
renderer.render(scene, camera);
requestAnimationFrame(render);
}
render();
window.addEventListener( 'mousedown', onMouseMove, false );
</script>
</body>
</html>
如果我正确理解你的问题,你甚至不需要 3d 坐标来获取平面视频中的坐标:你可以只使用 raycaster 报告的球体交点的 UV 坐标。
坐标 (0, 0) 是视频的左上角,坐标 (1, 1) 是视频的右下角。
我使用 three.js 创建了一个 360 视频播放器。每当我单击鼠标时,我都会使用光线投射来查找 3D 世界坐标,并且我成功地获得了这些坐标。
有没有办法将这些 3D 世界坐标映射到展开的(全景、等矩形)视频坐标。我想到了将 3D 世界坐标转换为屏幕坐标,但是那些坐标映射并不对应于实际的 3D 点。任何帮助将不胜感激。
这是我使用的代码:
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r73/three.min.js"></script>
<script src="https://s3-us-west-2.amazonaws.com/s.cdpn.io/211120/orbitControls.js"></script>
<a href="#" id="testButton">TEST</a>
<!-- <a href="#" id="testButton2">TEST2</a> -->
<script>
var width = window.innerWidth;
var height = window.innerHeight;
var renderer = new THREE.WebGLRenderer({ antialias: true });
var projector = new THREE.Projector();
renderer.setSize(width, height);
document.body.appendChild(renderer.domElement);
var video = document.createElement('video');
video.autoplay = true;
video.src = 'https://streams.kolor.com/streams/833ec36d-b115-43a2-bbf1-aaca49046bab/source.02-720p_HD.mp4';
video.crossOrigin = '';
videoTexture = new THREE.Texture(video);
videoTexture.minFilter = THREE.LinearFilter;
videoTexture.magFilter = THREE.LinearFilter;
videoTexture.format = THREE.RGBFormat;
var scene = new THREE.Scene();
var cubeGeometry = new THREE.SphereGeometry(500, 60, 40);
var sphereMat = new THREE.MeshBasicMaterial({map: videoTexture});
sphereMat.side = THREE.BackSide;
var cube = new THREE.Mesh(cubeGeometry, sphereMat);
scene.add(cube);
var camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 10000);
camera.position.set(0,0,50);
scene.add(camera);
var controls = new THREE.OrbitControls( camera);
controls.rotateUp(Math.PI / 4);
controls.target.set(
camera.position.x + 0.1,
camera.position.y,
camera.position.z
);
//console.log('controls', controls);
//create controls
function onMouseMove( event ) {
// calculate mouse position in normalized device coordinates
// (-1 to +1) for both components
var vector = new THREE.Vector3( ( event.clientX / window.innerWidth ) * 2 - 1, - ( event.clientY / window.innerHeight ) * 2 + 1, 0.5 );
projector.unprojectVector( vector, camera);
var raycaster = new THREE.Raycaster( camera.position, vector.sub( camera.position ).normalize() );
var intersects = raycaster.intersectObjects(scene.children, true);
console.log(intersects[0].point);
var width = window.innerWidth, height = window.innerHeight;
var widthHalf = width / 2, heightHalf = height / 2;
/*camera.updateMatrixWorld(true);
scene.updateMatrixWorld();
var vector1 = new THREE.Vector3();
vector.setFromMatrixPosition(intersects[0].object.matrixWorld);
vector.project(camera);*/
var p = new THREE.Vector3(intersects[0].point.x,intersects[0].point.y,intersects[0].point.z);
var vector1 = p.project(camera);
vector1.x = ( vector1.x * widthHalf ) + widthHalf;
vector1.y = - ( vector1.y * heightHalf ) + heightHalf;
console.log(vector1);
}
function render() {
if( video.readyState === video.HAVE_ENOUGH_DATA ){
videoTexture.needsUpdate = true;
}
controls.update();
renderer.render(scene, camera);
requestAnimationFrame(render);
}
render();
window.addEventListener( 'mousedown', onMouseMove, false );
</script>
</body>
</html>
如果我正确理解你的问题,你甚至不需要 3d 坐标来获取平面视频中的坐标:你可以只使用 raycaster 报告的球体交点的 UV 坐标。
坐标 (0, 0) 是视频的左上角,坐标 (1, 1) 是视频的右下角。