Swift 中的音频队列服务播放器未调用回调

Audio Queue Services Player in Swift isn't calling callback

我已经研究音频队列服务大约一个星期了,并且我已经编写了来自 Apple 音频队列服务指南的 swift 版本。 我在线性 PCM 中录制并使用此方法保存到磁盘:

AudioFileCreateWithURL(url, kAudioFileWAVEType, &format,
                                          AudioFileFlags.dontPageAlignAudioData.union(.eraseFile), &audioFileID)

我的 AudioQueueOutputCallback 没有被调用,即使我可以验证我的 bufferSize 看起来足够大并且它正在传递实际数据。我没有收到任何 OSStatus 错误,看起来一切正常。 Swift 编写 AudioServiceQueues 的方式很少,如果我能正常工作,我很乐意打开我的代码的其余部分。

欢迎提出任何建议!

class SVNPlayer: SVNPlayback {

  var state: PlayerState!

  private let callback: AudioQueueOutputCallback = { aqData, inAQ, inBuffer in

    guard let userData = aqData else { return }
    let audioPlayer = Unmanaged<SVNPlayer>.fromOpaque(userData).takeUnretainedValue()

    guard audioPlayer.state.isRunning,
      let queue = audioPlayer.state.mQueue else { return }

    var buffer = inBuffer.pointee // dereference pointers

    var numBytesReadFromFile: UInt32 = 0
    var numPackets = audioPlayer.state.mNumPacketsToRead
    var mPacketDescIsNil = audioPlayer.state.mPacketDesc == nil // determine if the packetDesc

    if mPacketDescIsNil {
      audioPlayer.state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: 0)
    }

    AudioFileReadPacketData(audioPlayer.state.mAudioFile, false, &numBytesReadFromFile, // read the packet at the saved file
      &audioPlayer.state.mPacketDesc!, audioPlayer.state.mCurrentPacket,
      &numPackets, buffer.mAudioData)

    if numPackets > 0 {
      buffer.mAudioDataByteSize = numBytesReadFromFile
      AudioQueueEnqueueBuffer(queue, inBuffer, mPacketDescIsNil ? numPackets : 0,
                              &audioPlayer.state.mPacketDesc!)
      audioPlayer.state.mCurrentPacket += Int64(numPackets)
    } else {
      AudioQueueStop(queue, false)
      audioPlayer.state.isRunning = false
    }
  }

  init(inputPath: String, audioFormat: AudioStreamBasicDescription, numberOfBuffers: Int) throws {
    super.init()
    var format = audioFormat
    let pointer = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) // get an unmananged reference to self

    guard let audioFileUrl = CFURLCreateFromFileSystemRepresentation(nil,
                                                                     inputPath,
                                                                     CFIndex(strlen(inputPath)), false) else {
                                                                      throw MixerError.playerInputPath }

    var audioFileID: AudioFileID?

    try osStatus { AudioFileOpenURL(audioFileUrl, AudioFilePermissions.readPermission, 0, &audioFileID) }

    guard audioFileID != nil else { throw MixerError.playerInputPath }

    state = PlayerState(mDataFormat: audioFormat, // setup the player state with mostly initial values
      mQueue: nil,
      mAudioFile: audioFileID!,
      bufferByteSize: 0,
      mCurrentPacket: 0,
      mNumPacketsToRead: 0,
      isRunning: false,
      mPacketDesc: nil,
      onError: nil)

    var dataFormatSize = UInt32(MemoryLayout<AudioStreamBasicDescription>.stride)

    try osStatus { AudioFileGetProperty(audioFileID!, kAudioFilePropertyDataFormat, &dataFormatSize, &state.mDataFormat) }

    var queue: AudioQueueRef?

    try osStatus { AudioQueueNewOutput(&format, callback, pointer, CFRunLoopGetCurrent(), CFRunLoopMode.commonModes.rawValue, 0, &queue) } // setup output queue

    guard queue != nil else { throw MixerError.playerOutputQueue }

    state.mQueue = queue // add to playerState

    var maxPacketSize = UInt32()
    var propertySize = UInt32(MemoryLayout<UInt32>.stride)

    try osStatus { AudioFileGetProperty(state.mAudioFile, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize) }

    deriveBufferSize(maxPacketSize: maxPacketSize, seconds: 0.5, outBufferSize: &state.bufferByteSize, outNumPacketsToRead: &state.mNumPacketsToRead)

    let isFormatVBR = state.mDataFormat.mBytesPerPacket == 0  || state.mDataFormat.mFramesPerPacket == 0

    if isFormatVBR { //Allocating Memory for a Packet Descriptions Array
      let size =  UInt32(MemoryLayout<AudioStreamPacketDescription>.stride)
      state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0,
                                                       mVariableFramesInPacket: state.mNumPacketsToRead,
                                                       mDataByteSize: size)


    } // if CBR it stays set to null

    for _ in 0..<numberOfBuffers {  // Allocate and Prime Audio Queue Buffers
      let bufferRef = UnsafeMutablePointer<AudioQueueBufferRef?>.allocate(capacity: 1)
      let foo = state.mDataFormat.mBytesPerPacket * 1024 / UInt32(numberOfBuffers)
      try osStatus { AudioQueueAllocateBuffer(state.mQueue!, foo, bufferRef) } // allocate the buffer

      if let buffer = bufferRef.pointee {
        AudioQueueEnqueueBuffer(state.mQueue!, buffer, 0, nil)
      }
    }

    let gain: Float32 = 1.0  // Set an Audio Queue’s Playback Gain
    try osStatus { AudioQueueSetParameter(state.mQueue!, kAudioQueueParam_Volume, gain) }
  }

  func start() throws {
    state.isRunning = true // Start and Run an Audio Queue
    try osStatus { AudioQueueStart(state.mQueue!, nil) }
    while state.isRunning {
      CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.25, false)
    }
    CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1.0, false)
    state.isRunning = false
  }

  func stop() throws {
    guard state.isRunning,
      let queue = state.mQueue else { return }
    try osStatus { AudioQueueStop(queue, true) }
    try osStatus { AudioQueueDispose(queue, true) }
    try osStatus { AudioFileClose(state.mAudioFile) }

    state.isRunning = false
  }


  private func deriveBufferSize(maxPacketSize: UInt32, seconds: Float64, outBufferSize: inout UInt32, outNumPacketsToRead: inout UInt32){
    let maxBufferSize = UInt32(0x50000)
    let minBufferSize = UInt32(0x4000)

    if state.mDataFormat.mFramesPerPacket != 0 {
      let numPacketsForTime: Float64 = state.mDataFormat.mSampleRate / Float64(state.mDataFormat.mFramesPerPacket) * seconds
      outBufferSize = UInt32(numPacketsForTime) * maxPacketSize
    } else {
      outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize
    }

    if outBufferSize > maxBufferSize && outBufferSize > maxPacketSize {
      outBufferSize = maxBufferSize

    } else if  outBufferSize < minBufferSize {
      outBufferSize = minBufferSize
    }

    outNumPacketsToRead = outBufferSize / maxPacketSize
  }
}

我的播放器状态结构是:

struct PlayerState: PlaybackState {
  var mDataFormat: AudioStreamBasicDescription
  var mQueue: AudioQueueRef?
  var mAudioFile: AudioFileID
  var bufferByteSize: UInt32
  var mCurrentPacket: Int64
  var mNumPacketsToRead: UInt32
  var isRunning: Bool
  var mPacketDesc: AudioStreamPacketDescription?
  var onError: ((Error) -> Void)?
}

不要让一个空缓冲区排队,而是尝试调用您的 callback,这样它就会让一个(希望如此)完整的缓冲区排队。我不确定 runloop 的东西,但我相信你知道你在做什么。