AudioKit 4.3:录制音频,离线渲染,然后播放

AudioKit 4.3: record audio, render it offline, then play it

我正在尝试录制音频,然后使用 AudioKit.renderToFile 离线保存,然后使用 AKPlayer 播放原始录制的音频文件。

import UIKit
import AudioKit


class ViewController: UIViewController {

private var recordUrl:URL!
private var isRecording:Bool = false

public var player:AKPlayer!
private let format = AVAudioFormat(commonFormat: .pcmFormatFloat64, sampleRate: 44100, channels: 2, interleaved: true)!

private var amplitudeTracker:AKAmplitudeTracker!
private var boostedMic:AKBooster!
private var mic:AKMicrophone!
private var micMixer:AKMixer!
private var silence:AKBooster!
public var recorder: AKNodeRecorder!

@IBOutlet weak var recordButton: UIButton!

override func viewDidLoad() {
    super.viewDidLoad()
    //self.recordUrl = Bundle.main.url(forResource: "sound", withExtension: "caf")
    //self.startAudioPlayback(url: self.recordUrl!)
    self.recordUrl = self.urlForDocument("record.caf")
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func requestMic(completion: @escaping () -> Void) {
    AVAudioSession.sharedInstance().requestRecordPermission({ (granted: Bool) in
        
        if granted { completion()}
    })
}
public func switchToMicrophone() {
    stopEngine()
    do {
        try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
    } catch {
        AKLog("Could not set session category.")
    }
    mic = AKMicrophone()
    micMixer = AKMixer(mic)
    boostedMic = AKBooster(micMixer, gain: 5)
    amplitudeTracker = AKAmplitudeTracker(boostedMic)
    silence = AKBooster(amplitudeTracker, gain: 0)
    AudioKit.output = silence
    startEngine()
}

@IBAction func startStopRecording(_ sender: Any) {
    self.isRecording = !self.isRecording
    
    if self.isRecording {
        self.startRecording()
        self.recordButton.setTitle("Stop Recording", for: .normal)
    } else {
        self.stopRecording()
        self.recordButton.setTitle("Start Recording", for: .normal)
    }
}

func startRecording() {
    self.requestMic() {
        self.switchToMicrophone()
        if let url = self.recordUrl {
            do {
            let audioFile = try AKAudioFile(forWriting: url, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)

            self.recorder = try AKNodeRecorder(node: self.micMixer, file: audioFile)

            try self.recorder.reset()
            try self.recorder.record()
            } catch {
                print("error setting up recording", error)
            }
        }
    }
}

func stopRecording() {
    recorder.stop()
    startAudioPlayback(url: self.recordUrl)
}

@IBAction func saveToDisk(_ sender: Any) {
    if let source = self.player, let saveUrl = self.urlForDocument("pitchAudio.caf") {
        do {
            source.stop()
            
            let audioFile = try AKAudioFile(forWriting: saveUrl, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
            try AudioKit.renderToFile(audioFile, duration: source.duration, prerender: {
                source.play()
            })
            print("audio file rendered")
            
        } catch {
            print("error rendering", error)
        }
        
        // PROBLEM STARTS HERE //
        
        self.startAudioPlayback(url: self.recordUrl)
        
    }
}

public func startAudioPlayback(url:URL) {
    print("loading playback audio", url)
    self.stopEngine()
    
    do {
        try AKSettings.setSession(category: .playback)
        player = AKPlayer.init()
        try player.load(url: url)
    }
    catch {
        print("error setting up audio playback", error)
        return
    }
    
    player.prepare()
    player.isLooping = true
    self.setPitch(pitch: self.getPitch(), saveValue: false)
    AudioKit.output = player
    
    startEngine()
    startPlayer()
}


public func startPlayer() {
    if AudioKit.engine.isRunning { self.player.play() }
    else { print("audio engine not running, can't play") }
}

public func startEngine() {
    if !AudioKit.engine.isRunning {
        print("starting engine")
        do { try AudioKit.start() }
        catch {
            print("error starting audio", error)
        }
    }
}

public func stopEngine(){
    
    if AudioKit.engine.isRunning {
        print("stopping engine")
        do {
            try AudioKit.stop()
        }
        catch {
            print("error stopping audio", error)
        }
    }
    
    //playback doesn't work without this?
    mic = nil
}

@IBAction func changePitch(_ sender: UISlider) {
    self.setPitch(pitch:Double(sender.value))
}

public func getPitch() -> Double {
    return UserDefaults.standard.double(forKey: "pitchFactor")
}

public func setPitch(pitch:Double, saveValue:Bool = true) {
    player.pitch = pitch * 1000.0
    if saveValue {
        UserDefaults.standard.set(pitch, forKey: "pitchFactor")
        UserDefaults.standard.synchronize()
    }
}

func urlForDocument(_ named:String) -> URL? {
    let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
    let url = NSURL(fileURLWithPath: path)
    if let pathComponent = url.appendingPathComponent(named) {
        return pathComponent
    }
    return nil
}

}

调用顺序为switchToMicrophonestartRecordingstopRecordingstartAudioPlaybacksaveToDisk,再次是startAudioPlayback

请参阅github repo for full code in ViewController.swift

使用renderToFile功能后,播放器重启AudioKit时出现如下错误:


[mcmx] 338: input bus 0 sample rate is 0

[avae] AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1265:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875

[avae] AVAudioEngine.mm:149:-[AVAudioEngine prepare]: Engine@0x1c4008ae0: could not initialize, error = -10875

[mcmx] 338: input bus 0 sample rate is 0

[avae] AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1265:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875

error starting audio Error Domain=com.apple.coreaudio.avfaudio Code=-10875 "(null)" UserInfo={failed call=err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)} ***

如果我取出录音片段或脱机渲染,这一切都可以完美运行,但两者都包含在内。

问题可能出在您的执行顺序上,请尝试交换 startAudioPlayback、saveToDisk,以便它首先执行 saveToDisk,然后读回文件并播放它,startAudioPlayback。

编辑:到目前为止,通过尝试我相信我已经确定了问题所在。保存文件后,由于某种原因,作为录音的另一个临时文件将消失。我认为需要缩小范围,这是为什么。

或者在不中断当前播放文件的情况下将整个 saveToDisk 方法发送到后台线程。

在空闲时间我会尝试稍微调整一下,然后告诉你。

编辑 2: 检查这个 如果您无处可去,请尝试 post 您的问题: https://github.com/audiokit/AudioKit/issues/

另请查看本教程: https://www.raywenderlich.com/145770/audiokit-tutorial-getting-started

另外,向 Aurelius Prochazka 发送消息可能会有用,因为他是 AudioKit 的开发人员,可以提供帮助。

我能够通过将录制和回放合并到一个管道中来让它工作:

mixer = AKMixer(mic)
boostedMic = AKBooster(mixer, gain: 5)
amplitudeTracker = AKAmplitudeTracker(boostedMic)
micBooster = AKBooster(amplitudeTracker, gain: 0)

player = AKPlayer()
try? player.load(url: self.recordUrl)
player.prepare()
player.gain = 2.0

outputMixer = AKMixer(micBooster, player)
AudioKit.output = outputMixer