m3u8 360虚拟现实视频带三个js

m3u8 360 virtual reality video with Three js

有没有办法用来自 m3u8 文件的三个 js 播放 360 视频?

我在这里发现了一个类似的问题但没有答案:https://github.com/mrdoob/three.js/issues/8216

https://threejs.org/examples/webgl_video_panorama_equirectangular.html

我使用了你网站上的代码来播放 360 度视频,当使用普通的 mp4 视频 url 时效果很好,但是当我尝试包含 m3u8 时它失败了 error.MEDIA_ERR_SRC_NOT_SUPPORTED:

  The video could not be loaded, either because the server or network failed or because the format is not supported.

代码如下:

<video id="video" width=960 height=540 style="display:none">
  <source src="https://bitmovin.com/player-content/playhouse-vr/m3u8s/105560.m3u8" type="application/x-mpegURL">
</video>



        var camera, scene, renderer;

        var isUserInteracting = false,
            lon = 0, lat = 0,
            phi = 0, theta = 0,
            distance = 50,
            onPointerDownPointerX = 0,
            onPointerDownPointerY = 0,
            onPointerDownLon = 0,
            onPointerDownLat = 0;

        init();
        animate();

        function init() {

            var container, mesh;

            container = document.getElementById( 'container' );

            camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 1100 );
            camera.target = new THREE.Vector3( 0, 0, 0 );

            scene = new THREE.Scene();

            var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 );
            // invert the geometry on the x-axis so that all of the faces point inward
            geometry.scale( - 1, 1, 1 );

            var video = document.getElementById( 'video' );
            video.play();

            var texture = new THREE.VideoTexture( video );
            var material = new THREE.MeshBasicMaterial( { map: texture } );

            mesh = new THREE.Mesh( geometry, material );

            scene.add( mesh );

            renderer = new THREE.WebGLRenderer();
            renderer.setPixelRatio( window.devicePixelRatio );
            renderer.setSize( window.innerWidth, window.innerHeight );
            container.appendChild( renderer.domElement );

            document.addEventListener( 'mousedown', onDocumentMouseDown, false );
            document.addEventListener( 'mousemove', onDocumentMouseMove, false );
            document.addEventListener( 'mouseup', onDocumentMouseUp, false );
            document.addEventListener( 'wheel', onDocumentMouseWheel, false );

            //

            window.addEventListener( 'resize', onWindowResize, false );

        }

        function onWindowResize() {

            camera.aspect = window.innerWidth / window.innerHeight;
            camera.updateProjectionMatrix();

            renderer.setSize( window.innerWidth, window.innerHeight );

        }

        function onDocumentMouseDown( event ) {

            event.preventDefault();

            isUserInteracting = true;

            onPointerDownPointerX = event.clientX;
            onPointerDownPointerY = event.clientY;

            onPointerDownLon = lon;
            onPointerDownLat = lat;

        }

        function onDocumentMouseMove( event ) {

            if ( isUserInteracting === true ) {

                lon = ( onPointerDownPointerX - event.clientX ) * 0.1 + onPointerDownLon;
                lat = ( onPointerDownPointerY - event.clientY ) * 0.1 + onPointerDownLat;

            }

        }

        function onDocumentMouseUp() {

            isUserInteracting = false;

        }

        function onDocumentMouseWheel( event ) {

            distance += event.deltaY * 0.05;

            distance = THREE.MathUtils.clamp( distance, 1, 50 );

        }

        function animate() {

            requestAnimationFrame( animate );
            update();

        }

        function update() {

            lat = Math.max( - 85, Math.min( 85, lat ) );
            phi = THREE.MathUtils.degToRad( 90 - lat );
            theta = THREE.MathUtils.degToRad( lon );

            camera.position.x = distance * Math.sin( phi ) * Math.cos( theta );
            camera.position.y = distance * Math.cos( phi );
            camera.position.z = distance * Math.sin( phi ) * Math.sin( theta );

            camera.lookAt( camera.target );

            renderer.render( scene, camera );

        }

    

m3u8 在 chrome(和其他一些浏览器)上播放。我多年来一直在使用它。问题是结合m3u8格式和三个js

m3u8 的工作原理如下所示:https://hls-js.netlify.app/demo/ enter this url : https://bitmovin.com/player-content/playhouse-vr/m3u8s/105560.m3u8

HLS(.m3u8 文件类型)和 MPEG DASH(.mpd 文件类型)是自适应比特率流媒体协议 (ABR)。

ABR 为您的内容创建多个比特率版本并将它们分块,因此客户端可以从适合设备和当前网络条件的最佳比特率中选择下一个分块 ()。

要播放 HSL 或 DASH 文件,您通常使用基于 Javascript 的视频播放器,这反过来将利用 HTML5 媒体源扩展 API - HTML5 MSE:

Javascript 视频播放器下载视频块,为每个块选择最合适的比特率,将它们组合起来并传递给 HTML5 视频播放器。

许多 Javascript 视频播放器将支持 360 度视频,您可能会发现将其中之一简单地集成到您的项目中会更容易。例如,常用的 videoJS 播放器的扩展将支持 360 视频: