为什么我的多通道映射不能正常工作?
Why isn't my multichannel mapping working correctly?
我最近发布了这个关于在 iOS 中使用多路由的问题,我以为我已经解决了它,但是我发现它并不完全有效:AVAudioEngine Multichannel mapping
我遇到的问题是多路由仅适用于前两个输出通道。我正在尝试使其适用于 4 声道音频接口。
我已经设法使用 AVAudioPlayer 将音频路由到 USB 接口的每个输出:
var avplayer = AVAudioPlayer()
@IBAction func avAudioPlayerPlay(_ sender: Any)
{
let audioSession = AVAudioSession.sharedInstance()
let route = audioSession.currentRoute
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
//audio interface + headphone jack
let outputs:[AVAudioSessionChannelDescription] = [
route.outputs[0].channels![2], // 3rd channel on Audio Interface
route.outputs[1].channels![1] // Right Channel of Headphones
]
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
do
{
avplayer = try AVAudioPlayer(contentsOf: fileURL)
}
catch
{
print("play error", error)
return
}
avplayer.channelAssignments = outputs
let result = avplayer.play()
print(result)
}
但我无法使用 AVAudioEngine 让它工作:
private func getOutputChannelMapIndices(_ names:[String?]) -> [Int]
{
let session = AVAudioSession.sharedInstance()
let route = session.currentRoute
let outputPorts = route.outputs
var channelMapIndices:[Int] = []
for name in names
{
var chIndex = 0
for outputPort in outputPorts
{
guard let channels = outputPort.channels else
{
continue
}
for channel in channels
{
print(channel.channelName)
if channel.channelName == name
{
if names.count > channelMapIndices.count
{
channelMapIndices.append(chIndex)
}
}
chIndex += 1
}
}
}
return channelMapIndices
}
@IBAction func nodesPlay(_ sender: Any)
{
let channelNames = [
"UMC204HD 192k 3",
"Headphones Left",
"Headphones Right",
nil
]
let audioSession = AVAudioSession.sharedInstance()
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
let channelMapIndices = getOutputChannelMapIndices(channelNames)
print("channelMapIndices: ", channelMapIndices)
engine = AVAudioEngine()
output = engine.outputNode
mixer = engine.mainMixerNode
player = AVAudioPlayerNode()
engine.attach(player)
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
let file = try! AVAudioFile(forReading: fileURL)
let outputNumChannels = output.outputFormat(forBus: 0).channelCount
print("outputNumChannels:" , outputNumChannels)
var outputChannelMap:[Int] = Array(repeating: -1, count: Int(outputNumChannels))
let numberOfSourceChannels = file.processingFormat.channelCount
print("numberOfSourceChannels: ", numberOfSourceChannels)
var sourceChIndex = 0
for chIndex in channelMapIndices
{
if chIndex < outputNumChannels && sourceChIndex < numberOfSourceChannels
{
outputChannelMap[chIndex] = sourceChIndex
sourceChIndex += 1
}
}
print("outputChannelMap: ", outputChannelMap)
if let au = output.audioUnit
{
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
print("propSize:", propSize)
let result = AudioUnitSetProperty(au, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Global, 0, &outputChannelMap, propSize)
print("result: ", result)
}
let channelLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | UInt32(numberOfSourceChannels))
let format = AVAudioFormat(streamDescription: file.processingFormat.streamDescription, channelLayout: channelLayout)
engine.connect(player, to: mixer, format:format)
engine.connect(mixer, to: output, format:format)
player.scheduleFile(file, at: nil, completionHandler: nil)
do
{
try engine.start()
}
catch
{
print("can't start", error)
return
}
player.play()
}
如果有人能解释为什么我似乎无法播放任何音频以输出 3 或 4,我将不胜感激。
注意,很多代码是从这里翻译的:https://forums.developer.apple.com/thread/15416
我认为是线路问题
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
这是给你数组对象的大小,本质上是指针的大小,而不是数组中对象的大小。参见 the discussion in the Apple docs。
属性 的大小应该是数组中包含的通道数乘以 Int32
的大小,因为 AudioUnitSetProperty
是一个 C API 并且那将是相应 C 数组的大小。
let propSize = UInt32(MemoryLayout<Int32>.stride * outputChannelMap.count)
您还应该将 outputChannelMap
声明为 Int32
的数组,因为这是 kAudioOutputUnitProperty_ChannelMap
期望的类型:
var outputChannelMap:[Int32] = Array(repeating: -1, count: Int(outputNumChannels))
我最近发布了这个关于在 iOS 中使用多路由的问题,我以为我已经解决了它,但是我发现它并不完全有效:AVAudioEngine Multichannel mapping
我遇到的问题是多路由仅适用于前两个输出通道。我正在尝试使其适用于 4 声道音频接口。
我已经设法使用 AVAudioPlayer 将音频路由到 USB 接口的每个输出:
var avplayer = AVAudioPlayer()
@IBAction func avAudioPlayerPlay(_ sender: Any)
{
let audioSession = AVAudioSession.sharedInstance()
let route = audioSession.currentRoute
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
//audio interface + headphone jack
let outputs:[AVAudioSessionChannelDescription] = [
route.outputs[0].channels![2], // 3rd channel on Audio Interface
route.outputs[1].channels![1] // Right Channel of Headphones
]
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
do
{
avplayer = try AVAudioPlayer(contentsOf: fileURL)
}
catch
{
print("play error", error)
return
}
avplayer.channelAssignments = outputs
let result = avplayer.play()
print(result)
}
但我无法使用 AVAudioEngine 让它工作:
private func getOutputChannelMapIndices(_ names:[String?]) -> [Int]
{
let session = AVAudioSession.sharedInstance()
let route = session.currentRoute
let outputPorts = route.outputs
var channelMapIndices:[Int] = []
for name in names
{
var chIndex = 0
for outputPort in outputPorts
{
guard let channels = outputPort.channels else
{
continue
}
for channel in channels
{
print(channel.channelName)
if channel.channelName == name
{
if names.count > channelMapIndices.count
{
channelMapIndices.append(chIndex)
}
}
chIndex += 1
}
}
}
return channelMapIndices
}
@IBAction func nodesPlay(_ sender: Any)
{
let channelNames = [
"UMC204HD 192k 3",
"Headphones Left",
"Headphones Right",
nil
]
let audioSession = AVAudioSession.sharedInstance()
// set the session category
do
{
//try audioSession.setCategory(.multiRoute)
try audioSession.setCategory(.multiRoute, options: .mixWithOthers)
}
catch
{
print("unable to set category", error)
return
}
// activate the audio session - turns on multiroute I believe
do
{
try audioSession.setActive(true)
//try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
}
catch
{
print("unable to set active", error)
return
}
let channelMapIndices = getOutputChannelMapIndices(channelNames)
print("channelMapIndices: ", channelMapIndices)
engine = AVAudioEngine()
output = engine.outputNode
mixer = engine.mainMixerNode
player = AVAudioPlayerNode()
engine.attach(player)
guard let filePath: String = Bundle.main.path(forResource: "audio", ofType: "m4a") else { return }
let fileURL: URL = URL(fileURLWithPath: filePath)
let file = try! AVAudioFile(forReading: fileURL)
let outputNumChannels = output.outputFormat(forBus: 0).channelCount
print("outputNumChannels:" , outputNumChannels)
var outputChannelMap:[Int] = Array(repeating: -1, count: Int(outputNumChannels))
let numberOfSourceChannels = file.processingFormat.channelCount
print("numberOfSourceChannels: ", numberOfSourceChannels)
var sourceChIndex = 0
for chIndex in channelMapIndices
{
if chIndex < outputNumChannels && sourceChIndex < numberOfSourceChannels
{
outputChannelMap[chIndex] = sourceChIndex
sourceChIndex += 1
}
}
print("outputChannelMap: ", outputChannelMap)
if let au = output.audioUnit
{
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
print("propSize:", propSize)
let result = AudioUnitSetProperty(au, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Global, 0, &outputChannelMap, propSize)
print("result: ", result)
}
let channelLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | UInt32(numberOfSourceChannels))
let format = AVAudioFormat(streamDescription: file.processingFormat.streamDescription, channelLayout: channelLayout)
engine.connect(player, to: mixer, format:format)
engine.connect(mixer, to: output, format:format)
player.scheduleFile(file, at: nil, completionHandler: nil)
do
{
try engine.start()
}
catch
{
print("can't start", error)
return
}
player.play()
}
如果有人能解释为什么我似乎无法播放任何音频以输出 3 或 4,我将不胜感激。
注意,很多代码是从这里翻译的:https://forums.developer.apple.com/thread/15416
我认为是线路问题
let propSize = UInt32(MemoryLayout.size(ofValue: outputChannelMap))
这是给你数组对象的大小,本质上是指针的大小,而不是数组中对象的大小。参见 the discussion in the Apple docs。
属性 的大小应该是数组中包含的通道数乘以 Int32
的大小,因为 AudioUnitSetProperty
是一个 C API 并且那将是相应 C 数组的大小。
let propSize = UInt32(MemoryLayout<Int32>.stride * outputChannelMap.count)
您还应该将 outputChannelMap
声明为 Int32
的数组,因为这是 kAudioOutputUnitProperty_ChannelMap
期望的类型:
var outputChannelMap:[Int32] = Array(repeating: -1, count: Int(outputNumChannels))