从浏览器录制视频并流式传输到 Youtube/Twitch 时如何使用 ffmpeg 添加音频?
how to add audio using ffmpeg when recording video from browser and streaming to Youtube/Twitch?
我有一个正在开发的 Web 应用程序,它允许用户从他们的浏览器流式传输视频,并使用 ffmpeg 同时直播到 Youtube 和 Twitch。当我不需要发送任何音频时,该应用程序工作正常。目前,当我尝试录制视频和音频时出现以下错误。我是使用 ffmpeg 的新手,因此将不胜感激任何帮助。如果需要,这也是我的回购协议:https://github.com/toshvelaga/livestream
这是我的 node.js 服务器和 ffmpeg
const child_process = require('child_process') // To be used later for running FFmpeg
const express = require('express')
const http = require('http')
const WebSocketServer = require('ws').Server
const NodeMediaServer = require('node-media-server')
const app = express()
const cors = require('cors')
const path = require('path')
const logger = require('morgan')
require('dotenv').config()
app.use(logger('dev'))
app.use(cors())
app.use(express.json({ limit: '200mb', extended: true }))
app.use(
express.urlencoded({ limit: '200mb', extended: true, parameterLimit: 50000 })
)
var authRouter = require('./routes/auth')
var compareCodeRouter = require('./routes/compareCode')
app.use('/', authRouter)
app.use('/', compareCodeRouter)
if (process.env.NODE_ENV === 'production') {
// serve static content
// npm run build
app.use(express.static(path.join(__dirname, 'client/build')))
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, 'client/build', 'index.html'))
})
}
const PORT = process.env.PORT || 8080
app.listen(PORT, () => {
console.log(`Server is starting on port ${PORT}`)
})
const server = http.createServer(app).listen(3000, () => {
console.log('Listening on PORT 3000...')
})
const wss = new WebSocketServer({
server: server,
})
wss.on('connection', (ws, req) => {
const ffmpeg = child_process.spawn('ffmpeg', [
// works fine when I use this but when I need audio problems arise
// '-f',
// 'lavfi',
// '-i',
// 'anullsrc',
'-i',
'-',
'-f',
'flv',
'-c',
'copy',
`${process.env.TWITCH_STREAM_ADDRESS}`,
'-f',
'flv',
'-c',
'copy',
`${process.env.YOUTUBE_STREAM_ADDRESS}`,
// '-f',
// 'flv',
// '-c',
// 'copy',
// `${process.env.FACEBOOK_STREAM_ADDRESS}`,
])
ffmpeg.on('close', (code, signal) => {
console.log(
'FFmpeg child process closed, code ' + code + ', signal ' + signal
)
ws.terminate()
})
ffmpeg.stdin.on('error', (e) => {
console.log('FFmpeg STDIN Error', e)
})
ffmpeg.stderr.on('data', (data) => {
console.log('FFmpeg STDERR:', data.toString())
})
ws.on('message', (msg) => {
console.log('DATA', msg)
ffmpeg.stdin.write(msg)
})
ws.on('close', (e) => {
console.log('kill: SIGINT')
ffmpeg.kill('SIGINT')
})
})
const config = {
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60,
},
http: {
port: 8000,
allow_origin: '*',
},
}
var nms = new NodeMediaServer(config)
nms.run()
这是记录 video/audio 并发送到服务器的前端代码:
import React, { useState, useEffect, useRef } from 'react'
import Navbar from '../../components/Navbar/Navbar'
import './Dashboard.css'
const CAPTURE_OPTIONS = {
audio: true,
video: true,
}
function Dashboard() {
const [mute, setMute] = useState(false)
const videoRef = useRef()
const ws = useRef()
const mediaStream = useUserMedia(CAPTURE_OPTIONS)
let liveStream
let liveStreamRecorder
if (mediaStream && videoRef.current && !videoRef.current.srcObject) {
videoRef.current.srcObject = mediaStream
}
const handleCanPlay = () => {
videoRef.current.play()
}
useEffect(() => {
ws.current = new WebSocket(
window.location.protocol.replace('http', 'ws') +
'//' + // http: -> ws:, https: -> wss:
'localhost:3000'
)
ws.current.onopen = () => {
console.log('WebSocket Open')
}
return () => {
ws.current.close()
}
}, [])
const startStream = () => {
liveStream = videoRef.current.captureStream(30) // 30 FPS
liveStreamRecorder = new MediaRecorder(liveStream, {
mimeType: 'video/webm;codecs=h264',
videoBitsPerSecond: 3 * 1024 * 1024,
})
liveStreamRecorder.ondataavailable = (e) => {
ws.current.send(e.data)
console.log('send data', e.data)
}
// Start recording, and dump data every second
liveStreamRecorder.start(1000)
}
const stopStream = () => {
liveStreamRecorder.stop()
ws.current.close()
}
const toggleMute = () => {
setMute(!mute)
}
return (
<>
<Navbar />
<div style={{ marginTop: '5rem' }} className='main'>
<div id='container'>
<video
ref={videoRef}
onCanPlay={handleCanPlay}
autoPlay
playsInline
muted={mute}
/>
</div>
<div className='button-container'>
<button onClick={startStream}>Go Live</button>
<button onClick={stopStream}>Stop Recording</button>
<button>Share Screen</button>
<button onClick={toggleMute}>Mute</button>
</div>
</div>
</>
)
}
const useUserMedia = (requestedMedia) => {
const [mediaStream, setMediaStream] = useState(null)
useEffect(() => {
async function enableStream() {
try {
const stream = await navigator.mediaDevices.getUserMedia(requestedMedia)
setMediaStream(stream)
} catch (err) {
console.log(err)
}
}
if (!mediaStream) {
enableStream()
} else {
return function cleanup() {
mediaStream.getVideoTracks().forEach((track) => {
track.stop()
})
}
}
}, [mediaStream, requestedMedia])
return mediaStream
}
export default Dashboard
因此,在使用 ffmpeg 进行了一些反复试验后,我的音频可以正常工作。不确定这是否是最佳方法,但暂时有效。
这里还有完整的文件:https://github.com/toshvelaga/livestream/blob/main/server/server.js
const ffmpeg = child_process.spawn('ffmpeg', [
'-i',
'-',
// video codec config: low latency, adaptive bitrate
'-c:v',
'libx264',
'-preset',
'veryfast',
'-tune',
'zerolatency',
// audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
'-c:a',
'aac',
'-strict',
'-2',
'-ar',
'44100',
'-b:a',
'64k',
//force to overwrite
'-y',
// used for audio sync
'-use_wallclock_as_timestamps',
'1',
'-async',
'1',
//'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
//'-strict', 'experimental',
'-bufsize',
'1000',
'-f',
'flv',
`${process.env.TWITCH_STREAM_ADDRESS}`,
// video codec config: low latency, adaptive bitrate
'-c:v',
'libx264',
'-preset',
'veryfast',
'-tune',
'zerolatency',
// audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
'-c:a',
'aac',
'-strict',
'-2',
'-ar',
'44100',
'-b:a',
'64k',
//force to overwrite
'-y',
// used for audio sync
'-use_wallclock_as_timestamps',
'1',
'-async',
'1',
'-f',
'flv',
`${process.env.YOUTUBE_STREAM_ADDRESS}`,
])
我有一个正在开发的 Web 应用程序,它允许用户从他们的浏览器流式传输视频,并使用 ffmpeg 同时直播到 Youtube 和 Twitch。当我不需要发送任何音频时,该应用程序工作正常。目前,当我尝试录制视频和音频时出现以下错误。我是使用 ffmpeg 的新手,因此将不胜感激任何帮助。如果需要,这也是我的回购协议:https://github.com/toshvelaga/livestream
这是我的 node.js 服务器和 ffmpeg
const child_process = require('child_process') // To be used later for running FFmpeg
const express = require('express')
const http = require('http')
const WebSocketServer = require('ws').Server
const NodeMediaServer = require('node-media-server')
const app = express()
const cors = require('cors')
const path = require('path')
const logger = require('morgan')
require('dotenv').config()
app.use(logger('dev'))
app.use(cors())
app.use(express.json({ limit: '200mb', extended: true }))
app.use(
express.urlencoded({ limit: '200mb', extended: true, parameterLimit: 50000 })
)
var authRouter = require('./routes/auth')
var compareCodeRouter = require('./routes/compareCode')
app.use('/', authRouter)
app.use('/', compareCodeRouter)
if (process.env.NODE_ENV === 'production') {
// serve static content
// npm run build
app.use(express.static(path.join(__dirname, 'client/build')))
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, 'client/build', 'index.html'))
})
}
const PORT = process.env.PORT || 8080
app.listen(PORT, () => {
console.log(`Server is starting on port ${PORT}`)
})
const server = http.createServer(app).listen(3000, () => {
console.log('Listening on PORT 3000...')
})
const wss = new WebSocketServer({
server: server,
})
wss.on('connection', (ws, req) => {
const ffmpeg = child_process.spawn('ffmpeg', [
// works fine when I use this but when I need audio problems arise
// '-f',
// 'lavfi',
// '-i',
// 'anullsrc',
'-i',
'-',
'-f',
'flv',
'-c',
'copy',
`${process.env.TWITCH_STREAM_ADDRESS}`,
'-f',
'flv',
'-c',
'copy',
`${process.env.YOUTUBE_STREAM_ADDRESS}`,
// '-f',
// 'flv',
// '-c',
// 'copy',
// `${process.env.FACEBOOK_STREAM_ADDRESS}`,
])
ffmpeg.on('close', (code, signal) => {
console.log(
'FFmpeg child process closed, code ' + code + ', signal ' + signal
)
ws.terminate()
})
ffmpeg.stdin.on('error', (e) => {
console.log('FFmpeg STDIN Error', e)
})
ffmpeg.stderr.on('data', (data) => {
console.log('FFmpeg STDERR:', data.toString())
})
ws.on('message', (msg) => {
console.log('DATA', msg)
ffmpeg.stdin.write(msg)
})
ws.on('close', (e) => {
console.log('kill: SIGINT')
ffmpeg.kill('SIGINT')
})
})
const config = {
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60,
},
http: {
port: 8000,
allow_origin: '*',
},
}
var nms = new NodeMediaServer(config)
nms.run()
这是记录 video/audio 并发送到服务器的前端代码:
import React, { useState, useEffect, useRef } from 'react'
import Navbar from '../../components/Navbar/Navbar'
import './Dashboard.css'
const CAPTURE_OPTIONS = {
audio: true,
video: true,
}
function Dashboard() {
const [mute, setMute] = useState(false)
const videoRef = useRef()
const ws = useRef()
const mediaStream = useUserMedia(CAPTURE_OPTIONS)
let liveStream
let liveStreamRecorder
if (mediaStream && videoRef.current && !videoRef.current.srcObject) {
videoRef.current.srcObject = mediaStream
}
const handleCanPlay = () => {
videoRef.current.play()
}
useEffect(() => {
ws.current = new WebSocket(
window.location.protocol.replace('http', 'ws') +
'//' + // http: -> ws:, https: -> wss:
'localhost:3000'
)
ws.current.onopen = () => {
console.log('WebSocket Open')
}
return () => {
ws.current.close()
}
}, [])
const startStream = () => {
liveStream = videoRef.current.captureStream(30) // 30 FPS
liveStreamRecorder = new MediaRecorder(liveStream, {
mimeType: 'video/webm;codecs=h264',
videoBitsPerSecond: 3 * 1024 * 1024,
})
liveStreamRecorder.ondataavailable = (e) => {
ws.current.send(e.data)
console.log('send data', e.data)
}
// Start recording, and dump data every second
liveStreamRecorder.start(1000)
}
const stopStream = () => {
liveStreamRecorder.stop()
ws.current.close()
}
const toggleMute = () => {
setMute(!mute)
}
return (
<>
<Navbar />
<div style={{ marginTop: '5rem' }} className='main'>
<div id='container'>
<video
ref={videoRef}
onCanPlay={handleCanPlay}
autoPlay
playsInline
muted={mute}
/>
</div>
<div className='button-container'>
<button onClick={startStream}>Go Live</button>
<button onClick={stopStream}>Stop Recording</button>
<button>Share Screen</button>
<button onClick={toggleMute}>Mute</button>
</div>
</div>
</>
)
}
const useUserMedia = (requestedMedia) => {
const [mediaStream, setMediaStream] = useState(null)
useEffect(() => {
async function enableStream() {
try {
const stream = await navigator.mediaDevices.getUserMedia(requestedMedia)
setMediaStream(stream)
} catch (err) {
console.log(err)
}
}
if (!mediaStream) {
enableStream()
} else {
return function cleanup() {
mediaStream.getVideoTracks().forEach((track) => {
track.stop()
})
}
}
}, [mediaStream, requestedMedia])
return mediaStream
}
export default Dashboard
因此,在使用 ffmpeg 进行了一些反复试验后,我的音频可以正常工作。不确定这是否是最佳方法,但暂时有效。
这里还有完整的文件:https://github.com/toshvelaga/livestream/blob/main/server/server.js
const ffmpeg = child_process.spawn('ffmpeg', [
'-i',
'-',
// video codec config: low latency, adaptive bitrate
'-c:v',
'libx264',
'-preset',
'veryfast',
'-tune',
'zerolatency',
// audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
'-c:a',
'aac',
'-strict',
'-2',
'-ar',
'44100',
'-b:a',
'64k',
//force to overwrite
'-y',
// used for audio sync
'-use_wallclock_as_timestamps',
'1',
'-async',
'1',
//'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
//'-strict', 'experimental',
'-bufsize',
'1000',
'-f',
'flv',
`${process.env.TWITCH_STREAM_ADDRESS}`,
// video codec config: low latency, adaptive bitrate
'-c:v',
'libx264',
'-preset',
'veryfast',
'-tune',
'zerolatency',
// audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
'-c:a',
'aac',
'-strict',
'-2',
'-ar',
'44100',
'-b:a',
'64k',
//force to overwrite
'-y',
// used for audio sync
'-use_wallclock_as_timestamps',
'1',
'-async',
'1',
'-f',
'flv',
`${process.env.YOUTUBE_STREAM_ADDRESS}`,
])