无法从蓝牙设备流式传输音频
Unable to stream audio from bluetooth device
以下代码是我用来流式传输音频数据的代码。
func prepareStreamRecording() throws -> OSStatus {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(10)
// Describe the RemoteIO unit
var audioComponentDescription = AudioComponentDescription()
audioComponentDescription.componentType = kAudioUnitType_Output;
audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
audioComponentDescription.componentFlags = 0;
audioComponentDescription.componentFlagsMask = 0;
// Get the RemoteIO unit
let remoteIOComponent = AudioComponentFindNext(nil, &audioComponentDescription)
var status = AudioComponentInstanceNew(remoteIOComponent!, &remoteIOUnit)
if (status != noErr) {
return status
}
let bus1 : AudioUnitElement = 1
var oneFlag : UInt32 = 1
// Configure the RemoteIO unit for input
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
bus1,
&oneFlag,
UInt32(MemoryLayout<UInt32>.size));
if (status != noErr) {
return status
}
// Set format for mic input (bus 1) on RemoteIO's output scope
var asbd = AudioStreamBasicDescription()
asbd.mSampleRate = Double(16000)
asbd.mFormatID = kAudioFormatLinearPCM
asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
asbd.mBytesPerPacket = 2
asbd.mFramesPerPacket = 1
asbd.mBytesPerFrame = 2
asbd.mChannelsPerFrame = 1
asbd.mBitsPerChannel = 16
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
bus1,
&asbd,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
if (status != noErr) {
return status
}
// Set the recording callback
var callbackStruct = AURenderCallbackStruct()
callbackStruct.inputProc = recordingCallback
callbackStruct.inputProcRefCon = nil
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
bus1,
&callbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size));
if (status != noErr) {
return status
}
// Initialize the RemoteIO unit
return AudioUnitInitialize(remoteIOUnit!)
}
func startStreamRecording(handler: ((_ data: Data) -> Void)?) -> OSStatus {
streamHandler = handler;
if(remoteIOUnit == nil) {
return -1
}
return AudioOutputUnitStart(remoteIOUnit!)
}
音频数据将通过此回调接收
func recordingCallback(inRefCon:UnsafeMutableRawPointer, ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp:UnsafePointer<AudioTimeStamp>, inBusNumber:UInt32, inNumberFrames:UInt32, ioData:UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
var status = noErr
let channelCount : UInt32 = 1
var bufferList = AudioBufferList()
bufferList.mNumberBuffers = channelCount
let buffers = UnsafeMutableBufferPointer<AudioBuffer>(start: &bufferList.mBuffers,
count: Int(bufferList.mNumberBuffers))
buffers[0].mNumberChannels = 1
buffers[0].mDataByteSize = inNumberFrames * 2
buffers[0].mData = nil
// get the recorded samples
status = AudioUnitRender(AudioManager.shared.remoteIOUnit!,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
UnsafeMutablePointer<AudioBufferList>(&bufferList))
if (status != noErr) {
return status;
}
let data = Data(bytes: buffers[0].mData!, count: Int(buffers[0].mDataByteSize))
NSLog("recorded data length is \(data.count)")
NSLog("Recorded data part is \(data.subdata(in: 0..<50).hexadecimal())")
AudioManager.shared.streamHandler?(data)
return noErr
}
使用 phone 麦克风录音时代码有效。但是,当连接蓝牙麦克风时,记录的数据内容始终是00000000000000000000000000....
请注意,这段代码不是我写的。我从 Google 制作的关于使用他们的 Cloud Speech API.
的示例应用程序中得到它
我已经通过更改音频会话的首选 IO 缓冲持续时间的值解决了这个问题。目前我将它设置为 0.01。
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.01)
以下代码是我用来流式传输音频数据的代码。
func prepareStreamRecording() throws -> OSStatus {
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(10)
// Describe the RemoteIO unit
var audioComponentDescription = AudioComponentDescription()
audioComponentDescription.componentType = kAudioUnitType_Output;
audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
audioComponentDescription.componentFlags = 0;
audioComponentDescription.componentFlagsMask = 0;
// Get the RemoteIO unit
let remoteIOComponent = AudioComponentFindNext(nil, &audioComponentDescription)
var status = AudioComponentInstanceNew(remoteIOComponent!, &remoteIOUnit)
if (status != noErr) {
return status
}
let bus1 : AudioUnitElement = 1
var oneFlag : UInt32 = 1
// Configure the RemoteIO unit for input
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
bus1,
&oneFlag,
UInt32(MemoryLayout<UInt32>.size));
if (status != noErr) {
return status
}
// Set format for mic input (bus 1) on RemoteIO's output scope
var asbd = AudioStreamBasicDescription()
asbd.mSampleRate = Double(16000)
asbd.mFormatID = kAudioFormatLinearPCM
asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
asbd.mBytesPerPacket = 2
asbd.mFramesPerPacket = 1
asbd.mBytesPerFrame = 2
asbd.mChannelsPerFrame = 1
asbd.mBitsPerChannel = 16
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
bus1,
&asbd,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
if (status != noErr) {
return status
}
// Set the recording callback
var callbackStruct = AURenderCallbackStruct()
callbackStruct.inputProc = recordingCallback
callbackStruct.inputProcRefCon = nil
status = AudioUnitSetProperty(remoteIOUnit!,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
bus1,
&callbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size));
if (status != noErr) {
return status
}
// Initialize the RemoteIO unit
return AudioUnitInitialize(remoteIOUnit!)
}
func startStreamRecording(handler: ((_ data: Data) -> Void)?) -> OSStatus {
streamHandler = handler;
if(remoteIOUnit == nil) {
return -1
}
return AudioOutputUnitStart(remoteIOUnit!)
}
音频数据将通过此回调接收
func recordingCallback(inRefCon:UnsafeMutableRawPointer, ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp:UnsafePointer<AudioTimeStamp>, inBusNumber:UInt32, inNumberFrames:UInt32, ioData:UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
var status = noErr
let channelCount : UInt32 = 1
var bufferList = AudioBufferList()
bufferList.mNumberBuffers = channelCount
let buffers = UnsafeMutableBufferPointer<AudioBuffer>(start: &bufferList.mBuffers,
count: Int(bufferList.mNumberBuffers))
buffers[0].mNumberChannels = 1
buffers[0].mDataByteSize = inNumberFrames * 2
buffers[0].mData = nil
// get the recorded samples
status = AudioUnitRender(AudioManager.shared.remoteIOUnit!,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
UnsafeMutablePointer<AudioBufferList>(&bufferList))
if (status != noErr) {
return status;
}
let data = Data(bytes: buffers[0].mData!, count: Int(buffers[0].mDataByteSize))
NSLog("recorded data length is \(data.count)")
NSLog("Recorded data part is \(data.subdata(in: 0..<50).hexadecimal())")
AudioManager.shared.streamHandler?(data)
return noErr
}
使用 phone 麦克风录音时代码有效。但是,当连接蓝牙麦克风时,记录的数据内容始终是00000000000000000000000000....
请注意,这段代码不是我写的。我从 Google 制作的关于使用他们的 Cloud Speech API.
的示例应用程序中得到它我已经通过更改音频会话的首选 IO 缓冲持续时间的值解决了这个问题。目前我将它设置为 0.01。
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.01)