我在录制时崩溃了:"required condition is false: format.sampleRate == hwFormat.sampleRate" afterweb rtc call
i got crash when record : "required condition is false: format.sampleRate == hwFormat.sampleRate" afterweb rtc call
我的记录工作正常,但问题是在 WebRTC 调用后,我崩溃了
所需条件为假:format.sampleRate == hwFormat.sampleRate
这是我开始崩溃和安装 Tap 的方式:
func startRecord() {
self.filePath = nil
print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, options: .mixWithOthers)
} catch {
print("======== Error setting setCategory \(error.localizedDescription)")
}
do {
try session.setPreferredSampleRate(44100.0)
} catch {
print("======== Error setting rate \(error.localizedDescription)")
}
do {
try session.setPreferredIOBufferDuration(0.005)
} catch {
print("======== Error IOBufferDuration \(error.localizedDescription)")
}
do {
try session.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("========== Error starting session \(error.localizedDescription)")
}
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
sampleRate: 44100.0,
// sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
channels: 1,
interleaved: true)
audioEngine.connect(audioEngine.inputNode, to: mixer, format: format)
audioEngine.connect(mixer, to: audioEngine.mainMixerNode, format: format)
let dir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! as String
filePath = dir.appending("/\(UUID.init().uuidString).wav")
_ = ExtAudioFileCreateWithURL(URL(fileURLWithPath: filePath!) as CFURL,
kAudioFileWAVEType,(format?.streamDescription)!,nil,AudioFileFlags.eraseFile.rawValue,&outref)
mixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount((format?.sampleRate)!), format: format, block: { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
let audioBuffer : AVAudioBuffer = buffer
_ = ExtAudioFileWrite(self.outref!, buffer.frameLength, audioBuffer.audioBufferList)
})
try! audioEngine.start()
startMP3Rec(path: filePath!, rate: 128)
}
func stopRecord() {
self.audioFilePlayer.stop()
self.audioEngine.stop()
self.mixer.removeTap(onBus: 0)
self.stopMP3Rec()
ExtAudioFileDispose(self.outref!)
try? AVAudioSession.sharedInstance().setActive(false)
}
func startMP3Rec(path: String, rate: Int32) {
self.isMP3Active = true
var total = 0
var read = 0
var write: Int32 = 0
let mp3path = path.replacingOccurrences(of: "wav", with: "mp3")
var pcm: UnsafeMutablePointer<FILE> = fopen(path, "rb")
fseek(pcm, 4*1024, SEEK_CUR)
let mp3: UnsafeMutablePointer<FILE> = fopen(mp3path, "wb")
let PCM_SIZE: Int = 8192
let MP3_SIZE: Int32 = 8192
let pcmbuffer = UnsafeMutablePointer<Int16>.allocate(capacity: Int(PCM_SIZE*2))
let mp3buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: Int(MP3_SIZE))
let lame = lame_init()
lame_set_num_channels(lame, 1)
lame_set_mode(lame, MONO)
lame_set_in_samplerate(lame, 44100)
lame_set_brate(lame, rate)
lame_set_VBR(lame, vbr_off)
lame_init_params(lame)
DispatchQueue.global(qos: .default).async {
while true {
pcm = fopen(path, "rb")
fseek(pcm, 4*1024 + total, SEEK_CUR)
read = fread(pcmbuffer, MemoryLayout<Int16>.size, PCM_SIZE, pcm)
if read != 0 {
write = lame_encode_buffer(lame, pcmbuffer, nil, Int32(read), mp3buffer, MP3_SIZE)
fwrite(mp3buffer, Int(write), 1, mp3)
total += read * MemoryLayout<Int16>.size
fclose(pcm)
} else if !self.isMP3Active {
_ = lame_encode_flush(lame, mp3buffer, MP3_SIZE)
_ = fwrite(mp3buffer, Int(write), 1, mp3)
break
} else {
fclose(pcm)
usleep(50)
}
}
lame_close(lame)
fclose(mp3)
fclose(pcm)
self.filePathMP3 = mp3path
}
}
func stopMP3Rec() {
self.isMP3Active = false
}
作为第一次 运行 应用程序,我使用
记录最后一种格式
print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
--> return 0 -> 正常记录
下次return 44100 -> 正常记录
但是在 webrtc 调用之后,我得到了 48000,然后它在这一行崩溃了
self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)
我在 Whosebug 上花了 4 个小时,但没有解决方案适合我。
我不想要 48000 格式,因为我已经将示例设置为
sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
-> 我的输出很难听,我能认出我的声音:(
所以我认为 44100 是最好的
有人可以给我一些建议吗?谢谢
这行有问题。
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, ...
AVAudioCommonFormat.pcmFormatInt16
默认情况下不工作。
你应该使用.pcmFormatFloat32
而且 xcode 提示很明显,
崩溃线
self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)
打印出来就知道了mixer.inputFormat(forBus: 0 )
那么你得到的实际设备的采样率是48000。转换可以得到44100
只需使用AVAudioConverter
做下采样音频缓冲区。
let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus )
guard let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true), let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else{
return
}
if let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate)){
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
assert(status != .error)
print(convertedBuffer.format)
}
下样部分,更贴近你的情况。
let bus = 0
let inputNode = audioEngine.inputNode
let inputFormat = inputNode.outputFormat(forBus: bus)
let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)!
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)!
inputNode.installTap(onBus: bus, bufferSize: 1024, format: inputFormat){ (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
var newBufferAvailable = true
let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return buffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
// 44100 Hz buffer
print(convertedBuffer.format)
}
仅在 iOS 模拟器中看到:
我花了一个多小时去研究这个老鼠洞。我曾使用一些耳机(48K)进行逻辑音频会话,然后转到我的 iOS 模拟器为我的应用程序处理我的音频代码并开始出现此崩溃。拔下我的耳机,仍然崩溃。重新启动模拟器,从模拟器中删除应用程序,重新启动 XCode 和 machine,仍然崩溃。
最后我在 mac 上进入系统偏好设置,选择:
声音和输入,插入我的耳机,所以它显示“外部麦克风”。
还去了模拟器 I/O 将音频输入设置为“内部麦克风”
现在我的应用程序能够在模拟器中启动而不会在尝试创建 AKMicrophone() 时崩溃...
我尝试了接受的答案,但它对我不起作用。
我能够通过将 audioEngine
实例变量声明为 optional
来修复它。就在我需要监听或录制声音之前。我曾经给它分配一个 AVAudioEngine
类型的新对象。
结束录制会话后。我调用 audioEngine!.stop
然后将其分配给 nil
以释放对象。
我的记录工作正常,但问题是在 WebRTC 调用后,我崩溃了
所需条件为假:format.sampleRate == hwFormat.sampleRate
这是我开始崩溃和安装 Tap 的方式:
func startRecord() {
self.filePath = nil
print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, options: .mixWithOthers)
} catch {
print("======== Error setting setCategory \(error.localizedDescription)")
}
do {
try session.setPreferredSampleRate(44100.0)
} catch {
print("======== Error setting rate \(error.localizedDescription)")
}
do {
try session.setPreferredIOBufferDuration(0.005)
} catch {
print("======== Error IOBufferDuration \(error.localizedDescription)")
}
do {
try session.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("========== Error starting session \(error.localizedDescription)")
}
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
sampleRate: 44100.0,
// sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
channels: 1,
interleaved: true)
audioEngine.connect(audioEngine.inputNode, to: mixer, format: format)
audioEngine.connect(mixer, to: audioEngine.mainMixerNode, format: format)
let dir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! as String
filePath = dir.appending("/\(UUID.init().uuidString).wav")
_ = ExtAudioFileCreateWithURL(URL(fileURLWithPath: filePath!) as CFURL,
kAudioFileWAVEType,(format?.streamDescription)!,nil,AudioFileFlags.eraseFile.rawValue,&outref)
mixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount((format?.sampleRate)!), format: format, block: { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
let audioBuffer : AVAudioBuffer = buffer
_ = ExtAudioFileWrite(self.outref!, buffer.frameLength, audioBuffer.audioBufferList)
})
try! audioEngine.start()
startMP3Rec(path: filePath!, rate: 128)
}
func stopRecord() {
self.audioFilePlayer.stop()
self.audioEngine.stop()
self.mixer.removeTap(onBus: 0)
self.stopMP3Rec()
ExtAudioFileDispose(self.outref!)
try? AVAudioSession.sharedInstance().setActive(false)
}
func startMP3Rec(path: String, rate: Int32) {
self.isMP3Active = true
var total = 0
var read = 0
var write: Int32 = 0
let mp3path = path.replacingOccurrences(of: "wav", with: "mp3")
var pcm: UnsafeMutablePointer<FILE> = fopen(path, "rb")
fseek(pcm, 4*1024, SEEK_CUR)
let mp3: UnsafeMutablePointer<FILE> = fopen(mp3path, "wb")
let PCM_SIZE: Int = 8192
let MP3_SIZE: Int32 = 8192
let pcmbuffer = UnsafeMutablePointer<Int16>.allocate(capacity: Int(PCM_SIZE*2))
let mp3buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: Int(MP3_SIZE))
let lame = lame_init()
lame_set_num_channels(lame, 1)
lame_set_mode(lame, MONO)
lame_set_in_samplerate(lame, 44100)
lame_set_brate(lame, rate)
lame_set_VBR(lame, vbr_off)
lame_init_params(lame)
DispatchQueue.global(qos: .default).async {
while true {
pcm = fopen(path, "rb")
fseek(pcm, 4*1024 + total, SEEK_CUR)
read = fread(pcmbuffer, MemoryLayout<Int16>.size, PCM_SIZE, pcm)
if read != 0 {
write = lame_encode_buffer(lame, pcmbuffer, nil, Int32(read), mp3buffer, MP3_SIZE)
fwrite(mp3buffer, Int(write), 1, mp3)
total += read * MemoryLayout<Int16>.size
fclose(pcm)
} else if !self.isMP3Active {
_ = lame_encode_flush(lame, mp3buffer, MP3_SIZE)
_ = fwrite(mp3buffer, Int(write), 1, mp3)
break
} else {
fclose(pcm)
usleep(50)
}
}
lame_close(lame)
fclose(mp3)
fclose(pcm)
self.filePathMP3 = mp3path
}
}
func stopMP3Rec() {
self.isMP3Active = false
}
作为第一次 运行 应用程序,我使用
记录最后一种格式print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
--> return 0 -> 正常记录 下次return 44100 -> 正常记录
但是在 webrtc 调用之后,我得到了 48000,然后它在这一行崩溃了
self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)
我在 Whosebug 上花了 4 个小时,但没有解决方案适合我。
我不想要 48000 格式,因为我已经将示例设置为
sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
-> 我的输出很难听,我能认出我的声音:(
所以我认为 44100 是最好的
有人可以给我一些建议吗?谢谢
这行有问题。
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, ...
AVAudioCommonFormat.pcmFormatInt16
默认情况下不工作。
你应该使用.pcmFormatFloat32
而且 xcode 提示很明显,
崩溃线
self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)
打印出来就知道了mixer.inputFormat(forBus: 0 )
那么你得到的实际设备的采样率是48000。转换可以得到44100
只需使用AVAudioConverter
做下采样音频缓冲区。
let input = engine.inputNode
let bus = 0
let inputFormat = input.outputFormat(forBus: bus )
guard let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true), let converter = AVAudioConverter(from: inputFormat, to: outputFormat) else{
return
}
if let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate)){
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
assert(status != .error)
print(convertedBuffer.format)
}
下样部分,更贴近你的情况。
let bus = 0
let inputNode = audioEngine.inputNode
let inputFormat = inputNode.outputFormat(forBus: bus)
let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)!
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)!
inputNode.installTap(onBus: bus, bufferSize: 1024, format: inputFormat){ (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
var newBufferAvailable = true
let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return buffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
// 44100 Hz buffer
print(convertedBuffer.format)
}
仅在 iOS 模拟器中看到: 我花了一个多小时去研究这个老鼠洞。我曾使用一些耳机(48K)进行逻辑音频会话,然后转到我的 iOS 模拟器为我的应用程序处理我的音频代码并开始出现此崩溃。拔下我的耳机,仍然崩溃。重新启动模拟器,从模拟器中删除应用程序,重新启动 XCode 和 machine,仍然崩溃。
最后我在 mac 上进入系统偏好设置,选择: 声音和输入,插入我的耳机,所以它显示“外部麦克风”。
还去了模拟器 I/O 将音频输入设置为“内部麦克风”
现在我的应用程序能够在模拟器中启动而不会在尝试创建 AKMicrophone() 时崩溃...
我尝试了接受的答案,但它对我不起作用。
我能够通过将 audioEngine
实例变量声明为 optional
来修复它。就在我需要监听或录制声音之前。我曾经给它分配一个 AVAudioEngine
类型的新对象。
结束录制会话后。我调用 audioEngine!.stop
然后将其分配给 nil
以释放对象。