发送 AudioBuffer 到 Speaker
Send AudioBuffer to Speaker
这是我想要做的:
- 将麦克风音频发送到 AudioWorketProcessor(有效)
- 使用 WebSockets 将结果从 AudioWorkletProcessor 发送到服务器(有效)
- 通过 WebSockets 接收回数据(有效)
- 向电脑喇叭发送数据(怎么做?)
一切正常,除了我不知道如何实施#4。这是我所拥有的,简化了一些内容以专注于问题:
//1。设置音频上下文的代码。将麦克风连接到 worklet
const audioContext = new AudioContext({ sampleRate: 8000 });
audioContext.audioWorklet.addModule('/common/recorderworkletprocess.js').then(
function () {
const recorder = new AudioWorkletNode(audioContext, 'recorder-worklet');
let constraints = { audio: true };
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
const microphone = audioContext.createMediaStreamSource(stream);
microphone.connect(recorder);
recorder.connect(audioContext.destination);
});
}
);
//2。 AudioWorkletProcessor。将音频发送到 WebSocket,它以二进制形式将其发送到服务器:
class RecorderWorkletProcessor extends AudioWorkletProcessor {
constructor() {
super();
}
process(inputs) {
const inputChannel = inputs[0][0]; //inputChannel Float32Array(128)
socket.send(inputChannel); // sent as byte[512]
return true;
}
}
registerProcessor('recorder-worklet', RecorderWorkletProcessor);
//3 和 4. 最后,服务器将接收到的数据原封不动地发回。 WebSocket 将其转换为 ArrayBuffer(512)。在这里我想尽一切努力把它作为音频输出到电脑的扬声器:
socket.messageReceived = function (evt) {
// evt.data contains an ArrayBuffer with length of 512
// I want this to be played on the computer's speakers. How to do this?
}
如有任何指导,我们将不胜感激。
好的,我相信我可以回答我的问题。这不是很可靠,但它提供了我需要知道的信息。
socket.messageReceived = function (evt) {
// evt.data contains an ArrayBuffer with length of 512
// I want this to be played on the computer's speakers. How to do this?
let fArr = new Float32Array(evt.data);
let.buf = audioContext.createBuffer(1, 128, 8000);
buf.copyToChannel(fArr, 0);
let player = audioContext.createBufferSource();
player.buffer = buf;
player.connect(audioContext.destination);
player.start(0);
}
这是我想要做的:
- 将麦克风音频发送到 AudioWorketProcessor(有效)
- 使用 WebSockets 将结果从 AudioWorkletProcessor 发送到服务器(有效)
- 通过 WebSockets 接收回数据(有效)
- 向电脑喇叭发送数据(怎么做?)
一切正常,除了我不知道如何实施#4。这是我所拥有的,简化了一些内容以专注于问题:
//1。设置音频上下文的代码。将麦克风连接到 worklet
const audioContext = new AudioContext({ sampleRate: 8000 });
audioContext.audioWorklet.addModule('/common/recorderworkletprocess.js').then(
function () {
const recorder = new AudioWorkletNode(audioContext, 'recorder-worklet');
let constraints = { audio: true };
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
const microphone = audioContext.createMediaStreamSource(stream);
microphone.connect(recorder);
recorder.connect(audioContext.destination);
});
}
);
//2。 AudioWorkletProcessor。将音频发送到 WebSocket,它以二进制形式将其发送到服务器:
class RecorderWorkletProcessor extends AudioWorkletProcessor {
constructor() {
super();
}
process(inputs) {
const inputChannel = inputs[0][0]; //inputChannel Float32Array(128)
socket.send(inputChannel); // sent as byte[512]
return true;
}
}
registerProcessor('recorder-worklet', RecorderWorkletProcessor);
//3 和 4. 最后,服务器将接收到的数据原封不动地发回。 WebSocket 将其转换为 ArrayBuffer(512)。在这里我想尽一切努力把它作为音频输出到电脑的扬声器:
socket.messageReceived = function (evt) {
// evt.data contains an ArrayBuffer with length of 512
// I want this to be played on the computer's speakers. How to do this?
}
如有任何指导,我们将不胜感激。
好的,我相信我可以回答我的问题。这不是很可靠,但它提供了我需要知道的信息。
socket.messageReceived = function (evt) {
// evt.data contains an ArrayBuffer with length of 512
// I want this to be played on the computer's speakers. How to do this?
let fArr = new Float32Array(evt.data);
let.buf = audioContext.createBuffer(1, 128, 8000);
buf.copyToChannel(fArr, 0);
let player = audioContext.createBufferSource();
player.buffer = buf;
player.connect(audioContext.destination);
player.start(0);
}