在 MediaSource HTML5 中播放 MediaRecorder 块 -- 视频冻结
Play MediaRecorder chunks in MediaSource HTML5 -- video frozen
我有这个简单的代码来获取视频流块并在 MediaSource 中播放它们。我看到视频,但有时它会停止。它可能会工作几秒钟或几分钟。但最后它在某个时刻停止了。 chrome://media-internals/ 显示没有错误。
这里有什么问题?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
这里是 JSFIDDLE,它将尝试这样做:
https://jsfiddle.net/stivyakovenko/fkt89cLu/6/
我正在使用 Chrome 作为我的主要目标。
看起来这是 Chrome 中的错误...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
我也在尝试这样做,但是我根本没有收到任何视频。你的 jsfiddle 在 chrome 或 firefox 上对我不起作用(在 ubuntu 14.04 和 windows 7 上测试)。
经过一些研究(主要是在录制后流回文件),我发现该文件没有正确地分段以供 MSE 播放。
@Steve:我很想知道您是如何使用 ffmpeg 进行分段的。
作为旁注,我在这里也有一个类似的问题:Display getUserMediaStream live video with media stream extensions (MSE),错误描述来自 chrome://media-internals.
mediarecorder 将在 ondataavailable 回调中为您提供整个 webm 文件的一部分。看起来这种东西不适用于 mediaSource。它在我的 chrome 66 中根本无法工作。
这里是一种类似于 "video chat" 或 "live stream" 的方法,使用没有 ffmpeg 的 MediaRecorder:
- 您可以使用 ajax 将数据部分地发送到您的服务器。
- 服务器可以return "the whole webm file"到你的chrome浏览器中
长时间响应。一旦服务器从客户端获得一些数据,服务器就可以 return 更多数据部分参与该响应。
而且这种解决方法也只适用于 html:
- 您可以使用 blob 列表来收集来自 ondataavailable 的所有 blob。
- 然后再设置video.src。
这是一个有效的 jsfiddle:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
html 唯一的解决方案(第二个)会一次又一次地闪烁,并且有很大的延迟。服务器长推方案(第一个)不会闪烁,有5秒延迟
chrome 中的工作示例,但它在 firefox 中冻结
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
根据我使用 MediaRecorder 和 MediaSource 的经验,大多数与视频冻结或返回错误相关的错误可能是由于接收的块不同步造成的。我相信 webm(也许还有其他媒体类型)需要按时间码的递增顺序接收这些块。记录、发送和接收块 Async 可能无法保留时间码的递增顺序。
所以,在分析了我自己使用 MediaRecorder/MediaSource 的视频冻结体验后,我更改了我的代码以同步发送录制的块,而不是异步发送。
此解决方案在 Firefox 中运行良好,不会卡顿。浏览器客户端需要 jquery、cgi Python3。它还具有两个 server-side Python3 程序,用于在创建数据时写入和读取网络摄像头数据。
浏览器客户端:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 网络摄像头视频作者:post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 网络摄像头视频 reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)
更新!
这是我也创建的版本 2,它可以在 Firefox 和 Chrome 中运行,并且不会冻结。请注意,我使用相同的两个 server-side Python3 程序来写入和读取网络摄像头数据,因为数据是根据我之前的回答创建的。
浏览器客户端版本 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>
我有这个简单的代码来获取视频流块并在 MediaSource 中播放它们。我看到视频,但有时它会停止。它可能会工作几秒钟或几分钟。但最后它在某个时刻停止了。 chrome://media-internals/ 显示没有错误。
这里有什么问题?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
这里是 JSFIDDLE,它将尝试这样做: https://jsfiddle.net/stivyakovenko/fkt89cLu/6/ 我正在使用 Chrome 作为我的主要目标。
看起来这是 Chrome 中的错误...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
我也在尝试这样做,但是我根本没有收到任何视频。你的 jsfiddle 在 chrome 或 firefox 上对我不起作用(在 ubuntu 14.04 和 windows 7 上测试)。
经过一些研究(主要是在录制后流回文件),我发现该文件没有正确地分段以供 MSE 播放。 @Steve:我很想知道您是如何使用 ffmpeg 进行分段的。
作为旁注,我在这里也有一个类似的问题:Display getUserMediaStream live video with media stream extensions (MSE),错误描述来自 chrome://media-internals.
mediarecorder 将在 ondataavailable 回调中为您提供整个 webm 文件的一部分。看起来这种东西不适用于 mediaSource。它在我的 chrome 66 中根本无法工作。
这里是一种类似于 "video chat" 或 "live stream" 的方法,使用没有 ffmpeg 的 MediaRecorder:
- 您可以使用 ajax 将数据部分地发送到您的服务器。
- 服务器可以return "the whole webm file"到你的chrome浏览器中 长时间响应。一旦服务器从客户端获得一些数据,服务器就可以 return 更多数据部分参与该响应。
而且这种解决方法也只适用于 html:
- 您可以使用 blob 列表来收集来自 ondataavailable 的所有 blob。
- 然后再设置video.src。
这是一个有效的 jsfiddle:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
html 唯一的解决方案(第二个)会一次又一次地闪烁,并且有很大的延迟。服务器长推方案(第一个)不会闪烁,有5秒延迟
chrome 中的工作示例,但它在 firefox 中冻结
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
根据我使用 MediaRecorder 和 MediaSource 的经验,大多数与视频冻结或返回错误相关的错误可能是由于接收的块不同步造成的。我相信 webm(也许还有其他媒体类型)需要按时间码的递增顺序接收这些块。记录、发送和接收块 Async 可能无法保留时间码的递增顺序。
所以,在分析了我自己使用 MediaRecorder/MediaSource 的视频冻结体验后,我更改了我的代码以同步发送录制的块,而不是异步发送。
此解决方案在 Firefox 中运行良好,不会卡顿。浏览器客户端需要 jquery、cgi Python3。它还具有两个 server-side Python3 程序,用于在创建数据时写入和读取网络摄像头数据。
浏览器客户端:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 网络摄像头视频作者:post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 网络摄像头视频 reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)
更新! 这是我也创建的版本 2,它可以在 Firefox 和 Chrome 中运行,并且不会冻结。请注意,我使用相同的两个 server-side Python3 程序来写入和读取网络摄像头数据,因为数据是根据我之前的回答创建的。
浏览器客户端版本 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>