更改 WebRTC 流中的播放延迟

Change playout delay in WebRTC stream

我正在尝试将实时 MediaStream(最终来自相机)从 peerA 投射到 peerB,我希望 peerB 实时接收实时流,然后在增加延迟的情况下重播。不幸的是,无法简单地暂停流并继续播放,因为它会跳转到实时时刻。

所以我想通了,我可以使用 MediaRecorder + SourceBuffer 重新观看直播。记录流并将缓冲区附加到 MSE (SourceBuffer) 并在 5 秒后播放。 这适用于本地设备(流)。但是当我尝试在接收器 MediaStream(来自 pc.onaddstream)上使用 Media Recorder 时,它看起来像是获取了一些数据并且能够将缓冲区附加到 sourceBuffer。但是它不会重播。有时我只有一帧。

const [pc1, pc2] = localPeerConnectionLoop()
const canvasStream = canvas.captureStream(200)

videoA.srcObject = canvasStream
videoA.play()

// Note: using two MediaRecorder at the same time seem problematic
// But this one works
// stream2mediaSorce(canvasStream, videoB)
// setTimeout(videoB.play.bind(videoB), 5000)

pc1.addTransceiver(canvasStream.getTracks()[0], {
  streams: [ canvasStream ]
})

pc2.onaddstream = (evt) => {
  videoC.srcObject = evt.stream
  videoC.play()

  // Note: using two MediaRecorder at the same time seem problematic
  // THIS DOSE NOT WORK
  stream2mediaSorce(evt.stream, videoD)
  setTimeout(() => videoD.play(), 2000)
}

/**
 * Turn a MediaStream into a SourceBuffer
 * 
 * @param  {MediaStream}      stream   Live Stream to record
 * @param  {HTMLVideoElement} videoElm Video element to play the recorded video in
 * @return {undefined}
 */
function stream2mediaSorce (stream, videoElm) {
  const RECORDER_MIME_TYPE = 'video/webm;codecs=vp9'
  const recorder = new MediaRecorder(stream, { mimeType : RECORDER_MIME_TYPE })

  const mediaSource = new MediaSource()
  videoElm.src = URL.createObjectURL(mediaSource)
  mediaSource.onsourceopen = (e) => {
    sourceBuffer = mediaSource.addSourceBuffer(RECORDER_MIME_TYPE);

    const fr = new FileReader()
    fr.onerror = console.log
    fr.onload = ({ target }) => {
      console.log(target.result)
      sourceBuffer.appendBuffer(target.result)
    }
    recorder.ondataavailable = ({ data }) => {
      console.log(data)
      fr.readAsArrayBuffer(data)
    }
    setInterval(recorder.requestData.bind(recorder), 1000)
  }

  console.log('Recorder created')
  recorder.start() 
}

你知道为什么它不播放视频吗?

我已经创建了一个 fiddle,其中包含所有必要的代码来试用它,javascript 选项卡与上面的代码相同,(html 大部分是无关紧要的,剂量不需要更改)

有些人试图减少延迟,但我实际上想将它增加到大约 10 秒以重新观看您在高尔夫挥杆或其他事情中做错的事情,如果可能的话完全避免使用 MediaRecorder

编辑: 我在一些 RTC 扩展

中发现了一个叫做 "playout-delay" 的东西

that allows the sender to control the minimum and maximum latency from capture to render time

我该如何使用它? 对我有帮助吗?

更新,有一个新功能可以启用此功能,称为 playoutDelayHint

We want to provide means for javascript applications to set their preferences on how fast they want to render audio or video data. As fast as possible might be beneficial for applications which concentrates on real time experience. For others additional data buffering may provide smother experience in case of network issues.

Refs:
https://discourse.wicg.io/t/hint-attribute-in-webrtc-to-influence-underlying-audio-video-buffering/4038

https://bugs.chromium.org/p/webrtc/issues/detail?id=10287

演示:https://jsfiddle.net/rvekxns5/ doe 我只能在我的浏览器中设置最大 10s,但它更多地取决于 UA 供应商在可用资源的情况下做到最好

import('https://jimmy.warting.se/packages/dummycontent/canvas-clock.js')
.then(({AnalogClock}) => {
  const {canvas} = new AnalogClock(100)
  document.querySelector('canvas').replaceWith(canvas)
  
  const [pc1, pc2] = localPeerConnectionLoop()
  const canvasStream = canvas.captureStream(200)

  videoA.srcObject = canvasStream
  videoA.play()

  pc1.addTransceiver(canvasStream.getTracks()[0], {
    streams: [ canvasStream ]
  })

  pc2.onaddstream = (evt) => {
    videoC.srcObject = evt.stream
    videoC.play()
  }

  $dur.onchange = () => {
    pc2.getReceivers()[0].playoutDelayHint = $dur.valueAsNumber
  }
})
<!-- all the irrelevant part, that you don't need to know anything about -->
<h3 style="border-bottom: 1px solid">Original canvas</h3>
<canvas id="canvas" width="100" height="100"></canvas>
<script>
function localPeerConnectionLoop(cfg = {sdpSemantics: 'unified-plan'}) {
  const setD = (d, a, b) => Promise.all([a.setLocalDescription(d), b.setRemoteDescription(d)]);
  return [0, 1].map(() => new RTCPeerConnection(cfg)).map((pc, i, pcs) => Object.assign(pc, {
    onicecandidate: e => e.candidate && pcs[i ^ 1].addIceCandidate(e.candidate),
    onnegotiationneeded: async e => {
      try {
        await setD(await pc.createOffer(), pc, pcs[i ^ 1]);
        await setD(await pcs[i ^ 1].createAnswer(), pcs[i ^ 1], pc);
      } catch (e) {
        console.log(e);
      }
    }
  }));
}
</script>
<h3 style="border-bottom: 1px solid">Local peer (PC1)</h3>
<video id="videoA" muted width="100" height="100"></video>

<h3 style="border-bottom: 1px solid">Remote peer (PC2)</h3>
<video id="videoC" muted width="100" height="100"></video>
<label> Change playoutDelayHint
<input type="number" value="1" id="$dur">
</label>