AVAudioEngine没有声音
AVAudioEngine no sound
我已经尝试了一段时间如何从 iOS 中的 URLSessionDataTask 提供的数据流式传输实时音频。
我已经声明了一个自定义 class 来管理玩家操作,它看起来像这样:
import UIKit
import AVFoundation
class AudioDataPlayer: NSObject {
//MARK:- Variables
//MARK: Constants
enum Status{
case playing
case notPlaying
}
let audioPlayerQueue = DispatchQueue(label: "audioPlayerQueue", qos: DispatchQoS.userInteractive)
//MARK: Vars
private (set) var currentStatus:Status = .notPlaying
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var streamingAudioPlayerNode: AVAudioPlayerNode = AVAudioPlayerNode()
private (set) var streamingAudioFormat: AVAudioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 48000, channels: 2, interleaved: false)!
//MARK:- Constructor
override init() {
super.init()
}
//MARK:- Private methods
//MARK:- Public methods
func processData(_ data:Data) throws{
if currentStatus == .notPlaying{
do{
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.allowAirPlay])
try AVAudioSession.sharedInstance().setActive(true)
if #available(iOS 11.0, *) {
try audioEngine.enableManualRenderingMode(.realtime, format: streamingAudioFormat, maximumFrameCount: 3072)
}
audioEngine.attach(streamingAudioPlayerNode)
audioEngine.connect(streamingAudioPlayerNode, to: audioEngine.mainMixerNode, format: streamingAudioFormat)
currentStatus = .playing
}
catch{
print("\(logClassName) ERROR -> \(error.localizedDescription)")
}
}
audioPlayerQueue.async {
if let audioPCMBuffer = data.makePCMBuffer(format: self.streamingAudioFormat){
self.streamingAudioPlayerNode.scheduleBuffer(audioPCMBuffer, completionHandler: {
//TODO
})
if !self.audioEngine.isRunning{
try! self.audioEngine.start()
self.streamingAudioPlayerNode.play()
}
}
else{
print("\(self.logClassName) TEST -> Ignoring data to play ...")
}
}
}
func stop(){
audioEngine.stop()
audioEngine.detach(streamingAudioPlayerNode)
currentStatus = .notPlaying
}
}
管理传入数据的函数是'processData(_ data:Data)',它是从另一个class这样调用的:
let processingQueue = DispatchQueue(label: "processingQueue", qos: DispatchQoS.userInteractive)
var audioDataPlayer:AudioDataPlayer = AudioDataPlayer()
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
processingQueue.async {
try! self.audioDataPlayer.processData(data)
}
}
我已经从论坛和 apple documanetation 网站上获得了代码。但是,也许我仍然不太明白它是如何工作的,而且设备没有声音......
音频数据为48K,16bit,2声道格式。
有什么想法吗?
如果您的音频数据是 16 位(假设是整数),您需要使用 pcmFormatInt16
初始化 AVAudioFormat
而不是 pcmFormatFloat32
。
对于这种格式,非交错似乎有点奇怪,因此您可能必须将 interleaved
设置为 true
。
我已经尝试了一段时间如何从 iOS 中的 URLSessionDataTask 提供的数据流式传输实时音频。
我已经声明了一个自定义 class 来管理玩家操作,它看起来像这样:
import UIKit
import AVFoundation
class AudioDataPlayer: NSObject {
//MARK:- Variables
//MARK: Constants
enum Status{
case playing
case notPlaying
}
let audioPlayerQueue = DispatchQueue(label: "audioPlayerQueue", qos: DispatchQoS.userInteractive)
//MARK: Vars
private (set) var currentStatus:Status = .notPlaying
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var streamingAudioPlayerNode: AVAudioPlayerNode = AVAudioPlayerNode()
private (set) var streamingAudioFormat: AVAudioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 48000, channels: 2, interleaved: false)!
//MARK:- Constructor
override init() {
super.init()
}
//MARK:- Private methods
//MARK:- Public methods
func processData(_ data:Data) throws{
if currentStatus == .notPlaying{
do{
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.allowAirPlay])
try AVAudioSession.sharedInstance().setActive(true)
if #available(iOS 11.0, *) {
try audioEngine.enableManualRenderingMode(.realtime, format: streamingAudioFormat, maximumFrameCount: 3072)
}
audioEngine.attach(streamingAudioPlayerNode)
audioEngine.connect(streamingAudioPlayerNode, to: audioEngine.mainMixerNode, format: streamingAudioFormat)
currentStatus = .playing
}
catch{
print("\(logClassName) ERROR -> \(error.localizedDescription)")
}
}
audioPlayerQueue.async {
if let audioPCMBuffer = data.makePCMBuffer(format: self.streamingAudioFormat){
self.streamingAudioPlayerNode.scheduleBuffer(audioPCMBuffer, completionHandler: {
//TODO
})
if !self.audioEngine.isRunning{
try! self.audioEngine.start()
self.streamingAudioPlayerNode.play()
}
}
else{
print("\(self.logClassName) TEST -> Ignoring data to play ...")
}
}
}
func stop(){
audioEngine.stop()
audioEngine.detach(streamingAudioPlayerNode)
currentStatus = .notPlaying
}
}
管理传入数据的函数是'processData(_ data:Data)',它是从另一个class这样调用的:
let processingQueue = DispatchQueue(label: "processingQueue", qos: DispatchQoS.userInteractive)
var audioDataPlayer:AudioDataPlayer = AudioDataPlayer()
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
processingQueue.async {
try! self.audioDataPlayer.processData(data)
}
}
我已经从论坛和 apple documanetation 网站上获得了代码。但是,也许我仍然不太明白它是如何工作的,而且设备没有声音......
音频数据为48K,16bit,2声道格式。
有什么想法吗?
如果您的音频数据是 16 位(假设是整数),您需要使用 pcmFormatInt16
初始化 AVAudioFormat
而不是 pcmFormatFloat32
。
对于这种格式,非交错似乎有点奇怪,因此您可能必须将 interleaved
设置为 true
。