MediaStream 同时捕获 Canvas 和音频
MediaStream Capture Canvas and Audio Simultaneously
我正在从事一个项目,我想:
- 加载一个视频js并显示在canvas上。
- 使用滤镜改变canvas(以及视频)的外观。
- 使用MediaStream captureStream() 方法和一个MediaRecorder 对象来记录canvas 的表面和原始视频的音频。
- 同时播放 canvas 和 HTML 视频元素中的音频流。
我已经能够通过调整此 WebRTC 演示代码在视频元素中显示 canvas 录制:https://webrtc.github.io/samples/src/content/capture/canvas-record/
就是说,我不知道如何在 canvas 旁边录制视频的音频。是否可以创建一个包含来自两个不同 sources/elements 的 MediaStreamTrack 实例的 MediaStream?
根据 MediaStream API 的规范,理论上应该有一些方法可以实现这一点:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
是的,您可以使用 MediaStream.addTrack()
方法。
但 Firefox 只会在 this bug 修复之前将初始流的轨道用于记录器。
OP 已经知道如何获取所有这些,但这里提醒未来的读者:
要从canvas获取videoStream轨道,可以调用canvas.captureStream(framerate)
方法。
要从视频元素获取音频 streamTrack,您可以使用 WebAudio API 和它的 createMediaStreamDestination
方法。
这将 return 包含我们的音频流的 MediaStreamDestination 节点 (dest
)。然后,您必须将根据您的视频元素创建的 MediaElementSource 连接到此 dest
。
如果您需要向此流添加更多音轨,您应该将所有这些源连接到 dest
.
现在我们有两个流,一个用于 canvas 视频,一个用于音频,我们可以在初始化 new MediaRecorder(canvasStream)
.[=31 之前使用 canvasStream.addTrack(audioStream.getAudioTracks()[0])
=]
这是一个完整的例子,现在只能在 chrome 中使用,可能很快会在 Firefox 中使用,届时他们将修复错误:
var cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
this.textContent = 'stop recording';
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
this.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks)
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
this.parentNode.removeChild(this);
recorder.stop();
}
function initAudioStream(evt) {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(this);
sourceNode.connect(dest)
// start the video
this.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
rec.onclick = clickHandler;
rec.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = initAudioStream;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
<canvas id="canvas" width="500" height="200"></canvas>
<button id="rec" disabled>record</button>
Ps : 由于 FF 团队似乎需要一些时间来修复这个错误,这里有一个快速修复,可以让它在 FF 上也能正常工作。
您还可以使用 new MediaStream([track1, track2])
.
混合两个曲目
然而,chrome 目前是这个构造函数的前缀,但由于它确实支持 addTrack
,所以它并不是真正需要的,我们可以提供像
这样丑陋的东西
var mixedStream = 'MediaStream' in window ?
new MediaStream([cStream.getVideoTracks()[0], aStream.getAudioTracks()[0]]) :
cStream;
recorder = new MediaRecorder(mixedStream);
Kaiido 的演示很棒。对于那些只是寻找 tl;dr 代码以将音频流添加到他们现有的 canvas 流的人:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...
我正在从事一个项目,我想:
- 加载一个视频js并显示在canvas上。
- 使用滤镜改变canvas(以及视频)的外观。
- 使用MediaStream captureStream() 方法和一个MediaRecorder 对象来记录canvas 的表面和原始视频的音频。
- 同时播放 canvas 和 HTML 视频元素中的音频流。
我已经能够通过调整此 WebRTC 演示代码在视频元素中显示 canvas 录制:https://webrtc.github.io/samples/src/content/capture/canvas-record/
就是说,我不知道如何在 canvas 旁边录制视频的音频。是否可以创建一个包含来自两个不同 sources/elements 的 MediaStreamTrack 实例的 MediaStream?
根据 MediaStream API 的规范,理论上应该有一些方法可以实现这一点: https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
是的,您可以使用 MediaStream.addTrack()
方法。
但 Firefox 只会在 this bug 修复之前将初始流的轨道用于记录器。
OP 已经知道如何获取所有这些,但这里提醒未来的读者:
要从canvas获取videoStream轨道,可以调用
canvas.captureStream(framerate)
方法。要从视频元素获取音频 streamTrack,您可以使用 WebAudio API 和它的
createMediaStreamDestination
方法。 这将 return 包含我们的音频流的 MediaStreamDestination 节点 (dest
)。然后,您必须将根据您的视频元素创建的 MediaElementSource 连接到此dest
。 如果您需要向此流添加更多音轨,您应该将所有这些源连接到dest
.
现在我们有两个流,一个用于 canvas 视频,一个用于音频,我们可以在初始化 new MediaRecorder(canvasStream)
.[=31 之前使用 canvasStream.addTrack(audioStream.getAudioTracks()[0])
=]
这是一个完整的例子,现在只能在 chrome 中使用,可能很快会在 Firefox 中使用,届时他们将修复错误:
var cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
this.textContent = 'stop recording';
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
this.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks)
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
this.parentNode.removeChild(this);
recorder.stop();
}
function initAudioStream(evt) {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(this);
sourceNode.connect(dest)
// start the video
this.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
rec.onclick = clickHandler;
rec.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = initAudioStream;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
<canvas id="canvas" width="500" height="200"></canvas>
<button id="rec" disabled>record</button>
Ps : 由于 FF 团队似乎需要一些时间来修复这个错误,这里有一个快速修复,可以让它在 FF 上也能正常工作。
您还可以使用 new MediaStream([track1, track2])
.
混合两个曲目
然而,chrome 目前是这个构造函数的前缀,但由于它确实支持 addTrack
,所以它并不是真正需要的,我们可以提供像
var mixedStream = 'MediaStream' in window ?
new MediaStream([cStream.getVideoTracks()[0], aStream.getAudioTracks()[0]]) :
cStream;
recorder = new MediaRecorder(mixedStream);
Kaiido 的演示很棒。对于那些只是寻找 tl;dr 代码以将音频流添加到他们现有的 canvas 流的人:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...