在 javascript 内从麦克风向扬声器播放实时音频
Play realtime audio from mic to speaker in javascript
我需要从麦克风中捕捉音频并实时播放。
我如何在js中实现它?
我不想记录,保存然后播放。我想在我的扬声器上实时输出。
const recordAudio = () =>
new Promise(async resolve => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", event => {
audioChunks.push(event.data);
});
const start = () => mediaRecorder.start();
const stop = () =>
new Promise(resolve => {
mediaRecorder.addEventListener("stop", () => {
const audioBlob = new Blob(audioChunks);
const audioUrl = URL.createObjectURL(audioBlob);
const audio = new Audio(audioUrl);
const play = () => audio.play();
resolve({ audioBlob, audioUrl, play });
});
mediaRecorder.stop();
});
resolve({ start, stop });
});
const sleep = time => new Promise(resolve => setTimeout(resolve, time));
(async () => {
const recorder = await recordAudio();
recorder.start();
await sleep(3000);
const audio = await recorder.stop();
audio.play();
})();
这是经过大量努力后的工作代码。
if (navigator.mediaDevices) {
const constraints = window.constraints = {
audio: true,
video: false
}
navigator.mediaDevices.getUserMedia(constraints)
.then( stm => {
handleSuccess(stm);
}
).catch(handleError);
}
function handleSuccess(stream) {
if (window.stream) {
window.stream.getAudioTracks().forEach(track => track.stop());
window.stream = null;
} else {
const audio = document.createElement('audio');
audio.controls = true;
audio.autoplay = true;
window.stream = stream;
audio.srcObject = stream;
stream.oninactive = function() {
console.log('Stream ended');
};
}
}
function handleError(e){
console.log("Alert", e.message);
//alert('Error: ' + e.message);
}
我需要从麦克风中捕捉音频并实时播放。 我如何在js中实现它? 我不想记录,保存然后播放。我想在我的扬声器上实时输出。
const recordAudio = () =>
new Promise(async resolve => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", event => {
audioChunks.push(event.data);
});
const start = () => mediaRecorder.start();
const stop = () =>
new Promise(resolve => {
mediaRecorder.addEventListener("stop", () => {
const audioBlob = new Blob(audioChunks);
const audioUrl = URL.createObjectURL(audioBlob);
const audio = new Audio(audioUrl);
const play = () => audio.play();
resolve({ audioBlob, audioUrl, play });
});
mediaRecorder.stop();
});
resolve({ start, stop });
});
const sleep = time => new Promise(resolve => setTimeout(resolve, time));
(async () => {
const recorder = await recordAudio();
recorder.start();
await sleep(3000);
const audio = await recorder.stop();
audio.play();
})();
这是经过大量努力后的工作代码。
if (navigator.mediaDevices) {
const constraints = window.constraints = {
audio: true,
video: false
}
navigator.mediaDevices.getUserMedia(constraints)
.then( stm => {
handleSuccess(stm);
}
).catch(handleError);
}
function handleSuccess(stream) {
if (window.stream) {
window.stream.getAudioTracks().forEach(track => track.stop());
window.stream = null;
} else {
const audio = document.createElement('audio');
audio.controls = true;
audio.autoplay = true;
window.stream = stream;
audio.srcObject = stream;
stream.oninactive = function() {
console.log('Stream ended');
};
}
}
function handleError(e){
console.log("Alert", e.message);
//alert('Error: ' + e.message);
}