使用 16 位 PCM 在 iOS 中生成音调,AudioEngine.connect() 抛出 AUSetFormat:错误 -10868

Generating a tone in iOS with 16 bit PCM, AudioEngine.connect() throws AUSetFormat: error -10868

我有以下代码用于生成给定频率和持续时间的音频。它大致基于这个答案在 Android 上做同样的事情(感谢:@Steve Pomeroy):

import Foundation
import CoreAudio
import AVFoundation
import Darwin

   class AudioUtil {

    class func play(frequency: Int, durationMs: Int) -> Void {
        let sampleRateHz: Double = 8000.0
        let numberOfSamples = Int((Double(durationMs) / 1000 * sampleRateHz))
        let factor: Double = 2 * M_PI / (sampleRateHz/Double(frequency))

        // Generate an array of Doubles.
        var samples = [Double](count: numberOfSamples, repeatedValue: 0.0)

        for i in 1..<numberOfSamples {
            let sample = sin(factor * Double(i))
            samples[i] = sample
        }

        // Convert to a 16 bit PCM sound array.
        var index = 0
        var sound = [Byte](count: 2 * numberOfSamples, repeatedValue: 0)

        for doubleValue in samples {
            // Scale to maximum amplitude. Int16.max is 37,767.
            var value = Int16(doubleValue * Double(Int16.max))

            // In a 16 bit wav PCM, first byte is the low order byte.
            var firstByte = Int16(value & 0x00ff)
            var secondByteHighOrderBits = Int32(value) & 0xff00
            var secondByte = Int16(secondByteHighOrderBits >> 8) // Right shift.

            // println("\(doubleValue) -> \(value) -> \(firstByte), \(secondByte)")

            sound[index++] = Byte(firstByte)
            sound[index++] = Byte(secondByte)
        }

        let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatInt16, sampleRate: sampleRateHz, channels:AVAudioChannelCount(1), interleaved: false)
        let buffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: UInt32(sound.count), mData: &sound)
        let pcmBuffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: AVAudioFrameCount(sound.count))
        let audioEngine = AVAudioEngine()
        let audioPlayer = AVAudioPlayerNode()

        audioEngine.attachNode(audioPlayer)
        // Runtime error occurs here:
        audioEngine.connect(audioPlayer, to: audioEngine.mainMixerNode, format: format)
        audioEngine.startAndReturnError(nil)

        audioPlayer.play()
        audioPlayer.scheduleBuffer(pcmBuffer, atTime: nil, options: nil, completionHandler: nil)
    }
}

在 AVAudioEngine 上调用 connect() 时我在运行时得到的错误是:

ERROR:     [0x3bfcb9dc] AVAudioNode.mm:521: AUSetFormat: error -10868
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error -10868'

我生成的不是真正的 AVAudioCommonFormat.PCMFormatInt16 吗?

[编辑]

这是另一种更简单的尝试,仅使用一个缓冲区作为 PCMFormatFloat32。没有错误,但也没有声音。

import AVFoundation

class AudioManager:NSObject {

    let audioPlayer = AVAudioPlayerNode()

    lazy var audioEngine: AVAudioEngine = {
        let engine = AVAudioEngine()

        // Must happen only once.
        engine.attachNode(self.audioPlayer)

        return engine
    }()

    func play(frequency: Int, durationMs: Int, completionBlock:dispatch_block_t!) {
        var error: NSError?

        var mixer = audioEngine.mainMixerNode
        var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate)
        var numberOfSamples = AVAudioFrameCount((Float(durationMs) / 1000 * sampleRateHz))

        var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false)

        var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples)
        buffer.frameLength = numberOfSamples

        // Generate sine wave
        for var i = 0; i < Int(buffer.frameLength); i++ {
            var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz)

            // log.debug("val: \(val)")

            buffer.floatChannelData.memory[i] = val * 0.5
        }

        // Audio engine
        audioEngine.connect(audioPlayer, to: mixer, format: format)

        log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)")

        if !audioEngine.startAndReturnError(&error) {
            log.debug("Error: \(error)")
        }

        // Play player and buffer
        audioPlayer.play()
        audioPlayer.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock)
    }
}

感谢:托马斯·罗亚尔 (http://www.tmroyal.com/playing-sounds-in-swift-audioengine.html)

尝试将您的会话类别设置为“AVAudioSessionCategoryPlayAVAudioSessionCategoryPlayAndRecord”。 我正在使用录音和回放,并在录音似乎正常之前调用它。我猜它必须在你开始连接节点之前完成。

        var session = AVAudioSession.sharedInstance()
    session.setCategory(AVAudioSessionCategoryPlayAndRecord, error: &error)

关于使用PCMFormatFloat32也听不到声音的问题:

我已经为同一个问题苦苦挣扎了几天,终于找到了(或至少一个)问题:您需要手动设置 PCM 缓冲区的 frameLength

pcmBuffer.frameLength = AVAudioFrameCount(sound.count/2)

除以二表示每帧两个字节(16 位编码为两个字节)。

除此之外,我做的另一个更改,我还不知道它是否重要,是我制作了 class 的 AVAudioEngine 和 AVAudioPlayerNode 成员,以避免它们在播放结束前被销毁。

问题是当退出 play() 函数时,播放器正在清理并且从未完成(或几乎没有开始)播放。这是一个相当笨拙的解决方案:在从 play() 返回之前休眠与样本一样长的时间。

我会接受一个更好的答案,如果有人想 post 一个,我会通过不清理播放器来避免必须这样做。

import AVFoundation

class AudioManager: NSObject, AVAudioPlayerDelegate {

    let audioPlayerNode = AVAudioPlayerNode()

    var waveAudioPlayer: AVAudioPlayer?

    var playing: Bool! = false

    lazy var audioEngine: AVAudioEngine = {
        let engine = AVAudioEngine()

        // Must happen only once.
        engine.attachNode(self.audioPlayerNode)

        return engine
    }()

    func playWaveFromBundle(filename: String, durationInSeconds: NSTimeInterval) -> Void {
        var error: NSError?
        var sound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(filename, ofType: "wav")!)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        self.waveAudioPlayer = AVAudioPlayer(contentsOfURL: sound, error: &error)
        self.waveAudioPlayer!.delegate = self

        AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        log.verbose("Playing \(sound)")

        self.waveAudioPlayer!.prepareToPlay()

        playing = true

        if !self.waveAudioPlayer!.play() {
            log.error("Failed to play")
        }

        // If we don't block here, the player stops as soon as this function returns. While we'd prefer to wait for audioPlayerDidFinishPlaying() to be called here, it's never called if we block here. Instead, pass in the duration of the wave file and simply sleep for that long.
        /*
        while (playing!) {
            NSThread.sleepForTimeInterval(0.1) // seconds
        }
        */

        NSThread.sleepForTimeInterval(durationInSeconds)

        log.verbose("Done")
    }

    func play(frequency: Int, durationInMillis: Int, completionBlock:dispatch_block_t!) -> Void {
        var session = AVAudioSession.sharedInstance()
        var error: NSError?

        if !session.setCategory(AVAudioSessionCategoryPlayAndRecord, error: &error) {
            log.error("Error: \(error)")
            return
        }

        var mixer = audioEngine.mainMixerNode
        var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate)
        var numberOfSamples = AVAudioFrameCount((Float(durationInMillis) / 1000 * sampleRateHz))

        var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false)

        var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples)
        buffer.frameLength = numberOfSamples

        // Generate sine wave
        for var i = 0; i < Int(buffer.frameLength); i++ {
            var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz)

            // log.debug("val: \(val)")

            buffer.floatChannelData.memory[i] = val * 0.5
        }

        AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error)

        if error != nil {
            log.error("Error: \(error)")
            return
        }

        // Audio engine
        audioEngine.connect(audioPlayerNode, to: mixer, format: format)

        log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)")

        if !audioEngine.startAndReturnError(&error) {
            log.error("Error: \(error)")
            return
        }

        // TODO: Check we're not in the background. Attempting to play audio while in the background throws:
        //   *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error 561015905'

        // Play player and schedule buffer
        audioPlayerNode.play()
        audioPlayerNode.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock)

        // If we don't block here, the player stops as soon as this function returns.
        NSThread.sleepForTimeInterval(Double(durationInMillis) * 1000.0) // seconds
    }

    // MARK: AVAudioPlayerDelegate

    func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
        log.verbose("Success: \(flag)")

        playing = false
    }

    func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer!, error: NSError!) {
        log.verbose("Error: \(error)")

        playing = false
    }

    // MARK: NSObject overrides

    deinit {
        log.verbose("deinit")
    }

}

就上下文而言,此 AudioManager 是延迟加载的 属性 在我的 AppDelegate 上:

lazy var audioManager: AudioManager = {
        return AudioManager()
    }()

我遇到过和你一样的行为,这意味着我是在帮助自己 NSThread.sleepForTimeInterval()。现在我找到了适合我的解决方案。关键是,AudioEngine() 对象需要从函数 Play() 中初始化。它必须在 class 级别初始化,因此引擎可以工作并播放声音,即使在函数退出后(立即退出)。在我移动初始化音频引擎的线路后,即使没有等待也可以听到声音"helper"。希望对你有所帮助。

获得正确数量的样本(numberOfSamples): mixer.outputFormatForBus(0).sampleRate 返回 44100.0 在第二个示例中不需要乘以 1000。

对我来说,先调用 play() 然后在 playernode 上设置 scheduleBuffer 似乎不合逻辑。 我会做逆转。