Swift:声音输出和麦克风输入 |使用 AudioKit |
Swift: Sound-Output & Microphone-Input | using AudioKit |
我正在使用 >Xcode 版本 9.2<
我正在使用 >AudioKit 版本 4.0.4<
我写了一些代码,您可以在下面找到它们应该能够
- 播放特定的声音
(frequency: 500.0HZ)
- "listen"到麦克风输入并实时计算频率
如果我调用 playSound()
或 receiveSound()
separated 一切看起来都很好并且确实如我预期的那样工作。但是之后调用 playSound()
和 receiveSound()
呢?正是在那里,我遇到了大问题。
这就是我想让代码正常工作的方式:
SystemClass.playSound() //play sound
DispatchQueue.main.asyncAfter(deadline: (DispatchTime.now() + 3.0)) {
SystemClass.receiveSound() //get microphone input 3 seconds later
}
let SystemClass: System = System()
class System {
public init() { }
func playSound() {
let sound = AKOscillator()
AudioKit.output = sound
AudioKit.start()
sound.frequency = 500.0
sound.amplitude = 0.5
sound.start()
DispatchQueue.main.asyncAfter(deadline: (DispatchTime.now() + 2.0)) {
sound.stop()
}
}
var tracker: AKFrequencyTracker!
func receiveSound() {
AudioKit.stop()
AKSettings.audioInputEnabled = true
let mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
let silence = AKBooster(tracker, gain: 0)
AudioKit.output = silence
AudioKit.start()
Timer.scheduledTimer( timeInterval: 0.1, target: self, selector: #selector(SystemClass.outputFrequency), userInfo: nil, repeats: true)
}
@objc func outputFrequency() {
print("Frequency: \(tracker.frequency)")
}
}
这些消息是我每次想 运行 代码(调用 playSound()
并在 3 秒后调用 receiveSound ()
时收到的一些编译器错误消息:
AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1266:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
AVAudioEngine.mm:149:-[AVAudioEngine prepare]: Engine@0x1c401bff0: could not initialize, error = -10875
[MediaRemote] [AVOutputContext] WARNING: AVF context unavailable for sharedSystemAudioContext
[AVAudioEngineGraph.mm:1266:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
Fatal error: AudioKit: Could not start engine. error: Error
Domain=com.apple.coreaudio.avfaudio Code=-10875 "(null)" UserInfo={failed call=err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)}.: file /Users/megastep/src/ak/AudioKit/AudioKit/Common/Internals/AudioKit.swift, line 243
我相信您的大部分问题是由于在使用它们的函数中局部声明了 AKNodes:
let sound = AKOscillator()
let mic = AKMicrophone()
let silence = AKBooster(tracker, gain: 0)
将它们声明为实例变量,如here所述。
我正在使用 >Xcode 版本 9.2<
我正在使用 >AudioKit 版本 4.0.4<
我写了一些代码,您可以在下面找到它们应该能够
- 播放特定的声音
(frequency: 500.0HZ)
- "listen"到麦克风输入并实时计算频率
如果我调用 playSound()
或 receiveSound()
separated 一切看起来都很好并且确实如我预期的那样工作。但是之后调用 playSound()
和 receiveSound()
呢?正是在那里,我遇到了大问题。
这就是我想让代码正常工作的方式:
SystemClass.playSound() //play sound
DispatchQueue.main.asyncAfter(deadline: (DispatchTime.now() + 3.0)) {
SystemClass.receiveSound() //get microphone input 3 seconds later
}
let SystemClass: System = System()
class System {
public init() { }
func playSound() {
let sound = AKOscillator()
AudioKit.output = sound
AudioKit.start()
sound.frequency = 500.0
sound.amplitude = 0.5
sound.start()
DispatchQueue.main.asyncAfter(deadline: (DispatchTime.now() + 2.0)) {
sound.stop()
}
}
var tracker: AKFrequencyTracker!
func receiveSound() {
AudioKit.stop()
AKSettings.audioInputEnabled = true
let mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
let silence = AKBooster(tracker, gain: 0)
AudioKit.output = silence
AudioKit.start()
Timer.scheduledTimer( timeInterval: 0.1, target: self, selector: #selector(SystemClass.outputFrequency), userInfo: nil, repeats: true)
}
@objc func outputFrequency() {
print("Frequency: \(tracker.frequency)")
}
}
这些消息是我每次想 运行 代码(调用 playSound()
并在 3 秒后调用 receiveSound ()
时收到的一些编译器错误消息:
AVAEInternal.h:103:_AVAE_CheckNoErr: [AVAudioEngineGraph.mm:1266:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
AVAudioEngine.mm:149:-[AVAudioEngine prepare]: Engine@0x1c401bff0: could not initialize, error = -10875
[MediaRemote] [AVOutputContext] WARNING: AVF context unavailable for sharedSystemAudioContext
[AVAudioEngineGraph.mm:1266:Initialize: (err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)): error -10875
Fatal error: AudioKit: Could not start engine. error: Error
Domain=com.apple.coreaudio.avfaudio Code=-10875 "(null)" UserInfo={failed call=err = AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph, kOutputChainOptimizedTraversal, *GetOutputNode(), isOutputChainActive)}.: file /Users/megastep/src/ak/AudioKit/AudioKit/Common/Internals/AudioKit.swift, line 243
我相信您的大部分问题是由于在使用它们的函数中局部声明了 AKNodes:
let sound = AKOscillator()
let mic = AKMicrophone()
let silence = AKBooster(tracker, gain: 0)
将它们声明为实例变量,如here所述。