Canvas 使用网络音频进行可视化 API 适用于除 Safari 之外的主要浏览器
Canvas visualization with Web Audio API works on major browsers except Safari
我有这段代码可以可视化音频。我的意思是通过网络音频 API 和 HTML canvas 元素进行可视化:
window.MyNamespace = {} // Easy access to some vars by namespace.
// Some browsers require a user gesture before creating Web Audio API context.
MyNamespace.firstUserGesture = true
MyNamespace.audio = {}
MyNamespace.audio.el = document.getElementById("aud")
MyNamespace.audio.canvasEl = document.getElementById("canvasEl")
function setupCanvas() {
MyNamespace.audio.canvasEl.style.width = "88%"
MyNamespace.audio.canvasEl.style.height = "100px"
// High dpi stuff.
// Controls the chart line stroke thickness.
MyNamespace.audio.canvasEl.width = 500 * 2
MyNamespace.audio.canvasEl.height = 100 * 2
// Get canvas context.
MyNamespace.audio.canvasCtx = MyNamespace.audio.canvasEl.getContext("2d")
// Set stroke color.
MyNamespace.audio.canvasCtx.strokeStyle = "#4285F4"
// Draw twice as thick lines due to high dpi scaling.
MyNamespace.audio.canvasCtx.lineWidth = 2
// Animation reference.
MyNamespace.audio.canvasAnimation = {}
MyNamespace.audio.el.addEventListener('play', () => drawWave())
MyNamespace.audio.el.addEventListener('pause', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
MyNamespace.audio.el.addEventListener('ended', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
let N = 0 // Data count to show.
N = MyNamespace.audio.analyzer.fftSize
MyNamespace.audio.data = new Uint8Array(N)
// Our drawing method
function drawWave() {
MyNamespace.audio.analyzer.getByteTimeDomainData(MyNamespace.audio.data)
const WIDTH = MyNamespace.audio.canvasEl.width
const HEIGHT = MyNamespace.audio.canvasEl.height
const bufferLength = MyNamespace.audio.data.length
const dataArray = MyNamespace.audio.data
// https://github.com/mdn/voice-change-o-matic-float-data/blob/c745ba8b48d7a9b93661ac43da2886633c06f2a7/scripts/app.js#L190
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(255, 255, 255)'
MyNamespace.audio.canvasCtx.fillRect(0, 0, WIDTH, HEIGHT)
var barWidth = (WIDTH / bufferLength) * 2.5
var barHeight
var x = 0
for (var j = 0; j < bufferLength; j++) {
// 128 means no sound? 128 corresponds to 0? Log indicates so!
// Multiply by 2 to make it more noticable.
barHeight = Math.abs(dataArray[j] - 128) * 2
//console.log('bar height', barHeight)
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(' + Math.floor(barHeight + 66) + ',133,244)'
MyNamespace.audio.canvasCtx.fillRect(x, Math.abs(HEIGHT - barHeight) / 2, barWidth, barHeight)
x += barWidth + 1
}
// Animate
MyNamespace.audio.canvasAnimation = requestAnimationFrame(drawWave)
}
}
function createAudio() {
if (MyNamespace.firstUserGesture) {
// Note: As a consequence of calling createMediaElementSource(),
// audio playback from the HTMLMediaElement will be re-routed into the processing graph of the AudioContext.
// So playing/pausing the media can still be done through the media element API and the player controls.
if ('webkitAudioContext' in window) {
MyNamespace.ctx = new webkitAudioContext()
console.log('webkitAudioContext() is available.')
} else if ('AudioContext' in window) {
MyNamespace.ctx = new AudioContext()
console.log('AudioContext() is available.')
} else {
console.log('Web Audio API is not available.')
}
MyNamespace.audio.source = MyNamespace.ctx.createMediaElementSource(MyNamespace.audio.el)
MyNamespace.audio.analyzer = MyNamespace.ctx.createAnalyser()
MyNamespace.audio.source.connect(MyNamespace.audio.analyzer)
MyNamespace.audio.source.connect(MyNamespace.ctx.destination)
console.log('Web Audio API is all set')
// We are sure Web Audio API context is ready.
setupCanvas()
}
MyNamespace.firstUserGesture = false
}
function handleClick() {
// The AudioContext is not allowed to start before user gesture.
// It must be resumed (or created) after a user gesture on the page.
createAudio()
// Play/pause the media
if (MyNamespace.audio.el.paused) {
MyNamespace.audio.el.play()
} else {
MyNamespace.audio.el.pause()
}
}
MyNamespace.audio.btnEl = document.getElementById("btn")
MyNamespace.audio.btnEl.addEventListener('click', handleClick)
<div style="width: 100%;">
<button id="btn">Play/pause</button>
</div>
<div style="width: 100%;">
<audio id="aud" style="width: 50%;" preload="metadata" crossorigin="anonymous">
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_OOG_1MG.ogg" />
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_700KB.mp3" />
Your browser does not support the vedio tag.
</audio>
</div>
<div style="width: 100%;">
<canvas id="canvasEl" style="width: 88%; height: 100px;"></canvas>
</div>
该代码在所有现代浏览器上都能正常工作。除了 macOS 上的 2018 Safari。
macOS上Opera浏览器的可视化示例是这样的:
Log表示Safari returns所有数据等于128
:
let d = dataArray[j]
if (d != 128) {
console.log('data is NOT equal to 128')
}
只需添加 MP3
文件,现在问题的代码片段在 Safari 上运行良好:
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_700KB.mp3" />
Safari 12.0.3 似乎不支持 OGG
文件类型。
我有这段代码可以可视化音频。我的意思是通过网络音频 API 和 HTML canvas 元素进行可视化:
window.MyNamespace = {} // Easy access to some vars by namespace.
// Some browsers require a user gesture before creating Web Audio API context.
MyNamespace.firstUserGesture = true
MyNamespace.audio = {}
MyNamespace.audio.el = document.getElementById("aud")
MyNamespace.audio.canvasEl = document.getElementById("canvasEl")
function setupCanvas() {
MyNamespace.audio.canvasEl.style.width = "88%"
MyNamespace.audio.canvasEl.style.height = "100px"
// High dpi stuff.
// Controls the chart line stroke thickness.
MyNamespace.audio.canvasEl.width = 500 * 2
MyNamespace.audio.canvasEl.height = 100 * 2
// Get canvas context.
MyNamespace.audio.canvasCtx = MyNamespace.audio.canvasEl.getContext("2d")
// Set stroke color.
MyNamespace.audio.canvasCtx.strokeStyle = "#4285F4"
// Draw twice as thick lines due to high dpi scaling.
MyNamespace.audio.canvasCtx.lineWidth = 2
// Animation reference.
MyNamespace.audio.canvasAnimation = {}
MyNamespace.audio.el.addEventListener('play', () => drawWave())
MyNamespace.audio.el.addEventListener('pause', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
MyNamespace.audio.el.addEventListener('ended', () => cancelAnimationFrame(MyNamespace.audio.canvasAnimation))
let N = 0 // Data count to show.
N = MyNamespace.audio.analyzer.fftSize
MyNamespace.audio.data = new Uint8Array(N)
// Our drawing method
function drawWave() {
MyNamespace.audio.analyzer.getByteTimeDomainData(MyNamespace.audio.data)
const WIDTH = MyNamespace.audio.canvasEl.width
const HEIGHT = MyNamespace.audio.canvasEl.height
const bufferLength = MyNamespace.audio.data.length
const dataArray = MyNamespace.audio.data
// https://github.com/mdn/voice-change-o-matic-float-data/blob/c745ba8b48d7a9b93661ac43da2886633c06f2a7/scripts/app.js#L190
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(255, 255, 255)'
MyNamespace.audio.canvasCtx.fillRect(0, 0, WIDTH, HEIGHT)
var barWidth = (WIDTH / bufferLength) * 2.5
var barHeight
var x = 0
for (var j = 0; j < bufferLength; j++) {
// 128 means no sound? 128 corresponds to 0? Log indicates so!
// Multiply by 2 to make it more noticable.
barHeight = Math.abs(dataArray[j] - 128) * 2
//console.log('bar height', barHeight)
MyNamespace.audio.canvasCtx.fillStyle = 'rgb(' + Math.floor(barHeight + 66) + ',133,244)'
MyNamespace.audio.canvasCtx.fillRect(x, Math.abs(HEIGHT - barHeight) / 2, barWidth, barHeight)
x += barWidth + 1
}
// Animate
MyNamespace.audio.canvasAnimation = requestAnimationFrame(drawWave)
}
}
function createAudio() {
if (MyNamespace.firstUserGesture) {
// Note: As a consequence of calling createMediaElementSource(),
// audio playback from the HTMLMediaElement will be re-routed into the processing graph of the AudioContext.
// So playing/pausing the media can still be done through the media element API and the player controls.
if ('webkitAudioContext' in window) {
MyNamespace.ctx = new webkitAudioContext()
console.log('webkitAudioContext() is available.')
} else if ('AudioContext' in window) {
MyNamespace.ctx = new AudioContext()
console.log('AudioContext() is available.')
} else {
console.log('Web Audio API is not available.')
}
MyNamespace.audio.source = MyNamespace.ctx.createMediaElementSource(MyNamespace.audio.el)
MyNamespace.audio.analyzer = MyNamespace.ctx.createAnalyser()
MyNamespace.audio.source.connect(MyNamespace.audio.analyzer)
MyNamespace.audio.source.connect(MyNamespace.ctx.destination)
console.log('Web Audio API is all set')
// We are sure Web Audio API context is ready.
setupCanvas()
}
MyNamespace.firstUserGesture = false
}
function handleClick() {
// The AudioContext is not allowed to start before user gesture.
// It must be resumed (or created) after a user gesture on the page.
createAudio()
// Play/pause the media
if (MyNamespace.audio.el.paused) {
MyNamespace.audio.el.play()
} else {
MyNamespace.audio.el.pause()
}
}
MyNamespace.audio.btnEl = document.getElementById("btn")
MyNamespace.audio.btnEl.addEventListener('click', handleClick)
<div style="width: 100%;">
<button id="btn">Play/pause</button>
</div>
<div style="width: 100%;">
<audio id="aud" style="width: 50%;" preload="metadata" crossorigin="anonymous">
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_OOG_1MG.ogg" />
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_700KB.mp3" />
Your browser does not support the vedio tag.
</audio>
</div>
<div style="width: 100%;">
<canvas id="canvasEl" style="width: 88%; height: 100px;"></canvas>
</div>
该代码在所有现代浏览器上都能正常工作。除了 macOS 上的 2018 Safari。
macOS上Opera浏览器的可视化示例是这样的:
Log表示Safari returns所有数据等于128
:
let d = dataArray[j]
if (d != 128) {
console.log('data is NOT equal to 128')
}
只需添加 MP3
文件,现在问题的代码片段在 Safari 上运行良好:
<source src="https://file-examples-com.github.io/uploads/2017/11/file_example_MP3_700KB.mp3" />
Safari 12.0.3 似乎不支持 OGG
文件类型。