如何从 arraybuffer 中更新 THREE.DataTexture

How to Update THREE.DataTexture in react from arraybuffer

我需要将来自 websocket 的视频流渲染为数组缓冲区。 我可以生成纹理和 material 但是当新块到达时它不会更新。

import React, {Component} from 'react';
import * as THREE from 'three';
import DragControls from 'three-dragcontrols';
import {MeshBasicMaterial} from "three";

const OrbitControls = require('three-orbitcontrols');

class Streamer extends Component {
  constructor(props) {
    super(props);
    this.state = {
      videoData: null,
      resolution: {
        resolutionX: 320,
        resolutionY: 240,
      },
    };

    this.start = this.start.bind(this);
    this.animate = this.animate.bind(this);
  };

检查状态是否改变

  static getDerivedStateFromProps(props, state) {
    if (props.videoData !== state.videoData) {
      return {
        videoData: new Uint8Array(props.videoData),
      };
    }
    return null;
  }

  async componentDidMount() {
    const width = 1024;
    const height = 768;
    let scene = new THREE.Scene();
    const camera = new THREE.PerspectiveCamera(
      90,
      width / height,
      0.1,
      1000
    );

    const renderer = new THREE.WebGLRenderer({antialias: false});
    let ambientLight = new THREE.AmbientLight(0xffffff, 0.7);

    let controls = new OrbitControls(camera, renderer.domElement);
    controls.enableRotate = false;

异步构建网格并等待纹理生成

    let plane = this.buildTexture(this.state.videoData).then((texture) ={

将纹理映射到 material

      return new MeshBasicMaterial({color: 0xFFFFFF, map: texture});
    }).then((material) => {
      let geometry = new THREE.PlaneBufferGeometry(320, 240);
      return new THREE.Mesh(geometry, material);
    });

    scene.add(await plane, ambientLight);

    camera.position.z = 150;
    renderer.setSize(width, height);
    renderer.setPixelRatio(window.devicePixelRatio);

    this.scene = scene;
    this.camera = camera;
    this.renderer = renderer;

    this.mount.appendChild(this.renderer.domElement);
    // window.addEventListener('resize', () => {
    //   camera.aspect = window.innerWidth / window.innerHeight;
    //   camera.updateProjectionMatrix();
    //   renderer.setSize(window.innerWidth, window.innerHeight);
    // }, false);
    this.start();
  }

  async buildTexture(data) {
    let texture = new THREE.DataTexture(data, 320, 240, 
  THREE.LuminanceAlphaFormat);

以为这是为了改变纹理

    texture.needsUpdate = true;
    return texture;
  }

  start = () => {
    if (!this.frameId) {
      this.frameId = requestAnimationFrame(this.animate);
    }
  };

  animate() {
    this.renderScene();
    this.frameId = window.requestAnimationFrame(this.animate);
  };

  renderScene = () => {
    this.renderer.render(this.scene, this.camera);
  };

  render() {
    return (
      <div ref={(mount) => {
        this.mount = mount;
      }}
           style={{
             marginTop: '10%',
           }}>
      </div>
    );
  }
}

export default (Streamer);

我弄明白了,所以如果有人对此感兴趣..

import React, { Component } from 'react';

import * as THREE from 'three';
import DragControls from 'three-dragcontrols';
import { MeshBasicMaterial } from "three";

const OrbitControls = require('three-orbitcontrols');

class Streamer extends Component {
  constructor(props) {
    super(props);
    this.state = {
      videoData: null,
      resolution: {
        resolutionX: 100,
        resolutionY: 100,
      },
    };

    this.start = this.start.bind(this);
    this.animate = this.animate.bind(this);
  };


  static getDerivedStateFromProps(props, state) {
    if (props.videoData !== state.videoData) {
      return {
        videoData: new Uint8Array(props.videoData),
      };
    }
    return null;
  }

  async componentDidMount() {
    const width = window.innerWidth;
    const height = window.innerHeight;
    let scene = new THREE.Scene();
    const camera = new THREE.PerspectiveCamera(
      90,
      width / height,
      0.1,
      1000
    );

    const renderer = new THREE.WebGLRenderer({ antialias: false });
    let ambientLight = new THREE.AmbientLight(0xffffff, 0.7);

    this.renderer = renderer;
    this.scene = scene;
    this.camera = camera;

    let controls = new OrbitControls(this.camera, this.renderer.domElement);
    controls.enableRotate = false;

    this.texture = await this.buildTexture(this.state.videoData);    
    this.material = new MeshBasicMaterial({ color: 0xFFFFFF, map: this.texture });
    this.geometry = new THREE.PlaneBufferGeometry(800, 600);
    this.mesh = new THREE.Mesh(this.geometry, this.material);

    scene.add(this.mesh, ambientLight);
    new DragControls([this.mesh], this.camera, this.renderer.domElement);

    camera.position.z = 150;
    renderer.setSize(width, height);
    renderer.setPixelRatio(window.devicePixelRatio);
    renderer.setClearColor('#FFFFFF');

    this.mount.appendChild(this.renderer.domElement);
    this.start();
  }

  async buildTexture(data) {
    let texture = new THREE.DataTexture(data, 800, 600, THREE.LuminanceFormat);
    texture.needsUpdate = true;
    return texture;
  }

  start = () => {
    if (!this.frameId) {
      this.frameId = requestAnimationFrame(this.animate);
    }
  };

  async renderStuff() {
    this.texture = this.buildTexture(this.state.videoData);
    this.material.map = await this.texture;
  };

  animate() {
    this.renderScene();
    this.frameId = window.requestAnimationFrame(this.animate);
  };

  renderScene = () => {
    this.renderer.render(this.scene, this.camera);
  };

  async componentDidUpdate(prevProps, prevState, snapshot) {
    await this.renderStuff()
  };

  render() {
    return (
      <div ref={ (mount) => {
        this.mount = mount;
      } }
           >
      </div>
    );
  }
}

export default (Streamer);