如何使用 navigator.mediaDevices.getUserMedia 降低麦克风输入音量?

How to lower mic input volume using navigator.mediaDevices.getUserMedia?

我正在使用 navigator.mediaDevices.getUserMedia() 创建录音应用程序,它记录了我周围的每一个声音,甚至非常安静,距离我 10 米。我不播放这个声音,我只是根据音量来想象它,所以我只需要非常响亮的声音或者靠近麦克风的声音,因为干扰太多。

此外,如果我启用播放以听到我的麦克风输入并开始发出安静的噪音,例如轻敲 table,我无法在播放中播放此声音,但我在可视化器中看到它,这正是我不要

这是我的代码:

const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext();
this.sourceNode = this.audioContext.createMediaStreamSource(stream);
this.analyserNode = this.audioContext.createAnalyser();

this.sourceNode.connect(this.analyserNode);

const data = new Float32Array(this.analyserNode.fftSize);
this.analyserNode.getFloatTimeDomainData(data);

那么如何使用 Web Audio API 降低麦克风灵敏度或降低麦克风输入音量或从分析器转换数据?我读过 AudioContext.createGain()gain.volume,但它用于输出音频音量,而不是输入音量

I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one

不是,它是用来控制通过它的音频的音量。

你必须把你的音频上下文节点看作一个链,然后你可能会明白你确实可以使用一个 GainNode 来控制它所连接的下一个节点的输入音量。

例如,如果我们声明类似

gainNode.gain.volume = 0.5;
input.connect(gainNode);
gainNode.connect(analyserNode);
input.connect(audioContext.destination);

可以看作

Input [mic] ===>  GainNode  ===>  AnalyserNode
    100%   ||       50%                50%
           ||
            ===> AudioContext Output
                       100%

所以这里的 gainNode 确实降低了 AnalyserNode 的音量,但没有降低上下文输出之一。


但这并不是你真正想要的。

确实,AnalyserNode API 具有 minDecibels and maxDecibels 属性,可以完全满足您的需求(过滤掉分贝范围内的声音)。

但这些属性仅对频率数据 (getXXXFrequencyData) 有意义,因为波形不考虑音量。

但是,在决定是否绘制波形之前,仍然可以检查此频率数据是否在我们要求的范围内。

polyfill();

(async() => {

  const ctx = new AudioContext();
  const input = await loadFileAsBufferNode(ctx);
  const analyser = ctx.createAnalyser();
  analyser.minDecibels = -90;
  analyser.maxDecibels = -10;
  analyser.fftSize = 512;
  input.connect(analyser);
  const gainNode = ctx.createGain();
  input.connect(gainNode);

  const bufferLength = analyser.frequencyBinCount;
  const freqArray = new Uint8Array(bufferLength);
  const waveArray = new Uint8Array(bufferLength);

  const canvasCtx = canvas.getContext('2d');
  const WIDTH = canvas.width;
  const HEIGHT = canvas.height;
  canvasCtx.lineWidth = 2;

  draw();
  // taken from https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/maxDecibels#Example
  function draw() {
    requestAnimationFrame(draw);

    canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
    analyser.getByteFrequencyData(freqArray);

    gainNode.gain.value = 1;
    analyser.getByteTimeDomainData(waveArray);

    var barWidth = (WIDTH / bufferLength) * 2.5;
    var barHeight;
    var x = 0;

    for (var i = 0; i < bufferLength; i++) {
      barHeight = freqArray[i];

      canvasCtx.fillStyle = 'rgb(' + (barHeight + 100) + ',50,50)';
      canvasCtx.fillRect(x, HEIGHT - barHeight / 2, barWidth, barHeight / 2);

      x += barWidth + 1;
    }
    // here we check if the volume is in bounds
    if (freqArray.some(isTooHigh) || !freqArray.some(hasValue)) {
      canvasCtx.fillRect(0, HEIGHT / 2, WIDTH, 1);
      gainNode.gain.value = 0;
      return;
    }

    canvasCtx.beginPath();
    var sliceWidth = WIDTH * 1.0 / bufferLength;
    var x = 0;
    for (var i = 0; i < bufferLength; i++) {
      var v = waveArray[i] / 128.0;
      var y = v * HEIGHT / 2;
      if (i === 0) {
        canvasCtx.moveTo(x, y);
      } else {
        canvasCtx.lineTo(x, y);
      }
      x += sliceWidth;
    }

    canvasCtx.lineTo(canvas.width, canvas.height / 2);
    canvasCtx.stroke();

  };

  function isTooHigh(val) {
    return val === 255;
  }

  function hasValue(val) {
    return val;
  }
  // DOM
  maxDB.oninput = e => {
    const max = +maxDB.value;
    if (+minDB.value >= max) minDB.value = analyser.minDecibels = max - 1;
    analyser.maxDecibels = max;
  }
  minDB.oninput = e => {
    const min = +minDB.value;
    if (+maxDB.value <= min) maxDB.value = analyser.maxDecibels = min + 1;
    analyser.minDecibels = min;
  }
  out.onchange = e => {
    if (out.checked)
      gainNode.connect(ctx.destination);
    else
      gainNode.disconnect(ctx.destination);
  };

})();

function loadFileAsBufferNode(ctx, url = 'https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3') {
  return fetch(url)
    .then(r => r.arrayBuffer())
    .then(buf => ctx.decodeAudioData(buf))
    .then(bufferNode => {
      const source = ctx.createBufferSource();
      source.buffer = bufferNode;
      source.repeat = true;
      source.start(0);
      return source;
    });
};

/* for Safari */
function polyfill() {
  window.AudioContext = window.AudioContext || window.webkitAudioContext;
  try {
    const prom = new AudioContext().decodeAudioData(new ArrayBuffer()).catch(e => {});
  } catch (e) {
    const prev = AudioContext.prototype.decodeAudioData;
    Object.defineProperty(AudioContext.prototype, 'decodeAudioData', {
      get: () => asPromise
    });

    function asPromise(audioBuffer, done, failed) {
      return new Promise((res, rej) => {
        prev.apply(this, [audioBuffer, onsuccess, onerror]);
        function onsuccess(buf) {
          if (typeof done === 'function') done(buf);
          res(buf);
        }
        function onerror(err) {
          if (typeof failed === 'function') failed(err);
          rej(err);
        }
      });
    }
  }
}
<label>min<input type="range" id="minDB" min="-100" max="-1" value="-90"></label>
<label>max<input type="range" id="maxDB" min="-99" max="0" value="-10"></label>
<label>output audio<input type="checkbox" id="out"></label>
<canvas id="canvas"></canvas>