使用 AudioQueue 播放断断续续的音频
Choppy audio playback with AudioQueue
我有以下代码可以打开 AudioQueue 以播放 16 位 pcm @ 44,100hz。它有一个非常奇怪的怪癖,一旦初始缓冲区被填满,它就会非常快速地播放然后得到 "choppy" 因为它等待更多字节来自网络。
因此,要么我以某种方式弄乱了将数据子范围复制到缓冲区的代码,要么我告诉 AudioQueue 播放速度比数据通过网络传输的速度快。
有人有什么想法吗?我已经被困了几天了。
//
// Created by Benjamin St Pierre on 15-01-02.
// Copyright (c) 2015 Lightning Strike Solutions. All rights reserved.
//
#import <MacTypes.h>
#import "MediaPlayer.h"
@implementation MediaPlayer
@synthesize sampleQueue;
void OutputBufferCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer) {
//Cast userData to MediaPlayer Objective-C class instance
MediaPlayer *mediaPlayer = (__bridge MediaPlayer *) inUserData;
// Fill buffer.
[mediaPlayer fillAudioBuffer:inBuffer];
// Re-enqueue buffer.
OSStatus err = AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
if (err != noErr)
NSLog(@"AudioQueueEnqueueBuffer() error %d", (int) err);
}
- (void)fillAudioBuffer:(AudioQueueBufferRef)inBuffer {
if (self.currentAudioPiece == nil || self.currentAudioPiece.duration >= self.currentAudioPieceIndex) {
//grab latest sample from sample queue
self.currentAudioPiece = sampleQueue.dequeue;
self.currentAudioPieceIndex = 0;
}
//Check for empty sample queue
if (self.currentAudioPiece == nil) {
NSLog(@"Empty sample queue");
memset(inBuffer->mAudioData, 0, kBufferByteSize);
return;
}
UInt32 bytesToRead = inBuffer->mAudioDataBytesCapacity;
while (bytesToRead > 0) {
UInt32 maxBytesFromCurrentPiece = self.currentAudioPiece.audioData.length - self.currentAudioPieceIndex;
//Take the min of what the current piece can provide OR what is needed to be read
UInt32 bytesToReadNow = MIN(maxBytesFromCurrentPiece, bytesToRead);
NSData *subRange = [self.currentAudioPiece.audioData subdataWithRange:NSMakeRange(self.currentAudioPieceIndex, bytesToReadNow)];
//Copy what you can before continuing loop
memcpy(inBuffer->mAudioData, subRange.bytes, subRange.length);
bytesToRead -= bytesToReadNow;
if (bytesToReadNow == maxBytesFromCurrentPiece) {
@synchronized (sampleQueue) {
self.currentAudioPiece = self.sampleQueue.dequeue;
self.currentAudioPieceIndex = 0;
}
} else {
self.currentAudioPieceIndex += bytesToReadNow;
}
}
inBuffer->mAudioDataByteSize = kBufferByteSize;
}
- (void)startMediaPlayer {
AudioStreamBasicDescription streamFormat;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mSampleRate = 44100.0;
streamFormat.mChannelsPerFrame = 2;
streamFormat.mBytesPerFrame = 4;
streamFormat.mFramesPerPacket = 1;
streamFormat.mBytesPerPacket = 4;
streamFormat.mBitsPerChannel = 16;
streamFormat.mReserved = 0;
streamFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger;
// New input queue
OSStatus err = AudioQueueNewOutput(&streamFormat, OutputBufferCallback, (__bridge void *) self, nil, nil, 0, &outputQueue);
if (err != noErr) {
NSLog(@"AudioQueueNewOutput() error: %d", (int) err);
}
int i;
// Enqueue buffers
AudioQueueBufferRef buffer;
for (i = 0; i < kNumberBuffers; i++) {
err = AudioQueueAllocateBuffer(outputQueue, kBufferByteSize, &buffer);
memset(buffer->mAudioData, 0, kBufferByteSize);
buffer->mAudioDataByteSize = kBufferByteSize;
if (err == noErr) {
err = AudioQueueEnqueueBuffer(outputQueue, buffer, 0, nil);
if (err != noErr) NSLog(@"AudioQueueEnqueueBuffer() error: %d", (int) err);
} else {
NSLog(@"AudioQueueAllocateBuffer() error: %d", (int) err);
return;
}
}
// Start queue
err = AudioQueueStart(outputQueue, nil);
if (err != noErr) NSLog(@"AudioQueueStart() error: %d", (int) err);
}
@end
我要在这里说一句,你的回放变得断断续续,因为你没有为你的数据推进写指针。我对 objective-C 的了解还不足以告诉您此语法是否正确,但我认为您需要添加以下内容:
while (bytesToRead > 0) {
....
memcpy(inBuffer->mAudioData, subRange.bytes, subRange.length);
bytesToRead -= bytesToReadNow;
inBuffer->mAudioData += bytesReadNow; // move the write pointer
...
}
我有以下代码可以打开 AudioQueue 以播放 16 位 pcm @ 44,100hz。它有一个非常奇怪的怪癖,一旦初始缓冲区被填满,它就会非常快速地播放然后得到 "choppy" 因为它等待更多字节来自网络。
因此,要么我以某种方式弄乱了将数据子范围复制到缓冲区的代码,要么我告诉 AudioQueue 播放速度比数据通过网络传输的速度快。
有人有什么想法吗?我已经被困了几天了。
//
// Created by Benjamin St Pierre on 15-01-02.
// Copyright (c) 2015 Lightning Strike Solutions. All rights reserved.
//
#import <MacTypes.h>
#import "MediaPlayer.h"
@implementation MediaPlayer
@synthesize sampleQueue;
void OutputBufferCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer) {
//Cast userData to MediaPlayer Objective-C class instance
MediaPlayer *mediaPlayer = (__bridge MediaPlayer *) inUserData;
// Fill buffer.
[mediaPlayer fillAudioBuffer:inBuffer];
// Re-enqueue buffer.
OSStatus err = AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
if (err != noErr)
NSLog(@"AudioQueueEnqueueBuffer() error %d", (int) err);
}
- (void)fillAudioBuffer:(AudioQueueBufferRef)inBuffer {
if (self.currentAudioPiece == nil || self.currentAudioPiece.duration >= self.currentAudioPieceIndex) {
//grab latest sample from sample queue
self.currentAudioPiece = sampleQueue.dequeue;
self.currentAudioPieceIndex = 0;
}
//Check for empty sample queue
if (self.currentAudioPiece == nil) {
NSLog(@"Empty sample queue");
memset(inBuffer->mAudioData, 0, kBufferByteSize);
return;
}
UInt32 bytesToRead = inBuffer->mAudioDataBytesCapacity;
while (bytesToRead > 0) {
UInt32 maxBytesFromCurrentPiece = self.currentAudioPiece.audioData.length - self.currentAudioPieceIndex;
//Take the min of what the current piece can provide OR what is needed to be read
UInt32 bytesToReadNow = MIN(maxBytesFromCurrentPiece, bytesToRead);
NSData *subRange = [self.currentAudioPiece.audioData subdataWithRange:NSMakeRange(self.currentAudioPieceIndex, bytesToReadNow)];
//Copy what you can before continuing loop
memcpy(inBuffer->mAudioData, subRange.bytes, subRange.length);
bytesToRead -= bytesToReadNow;
if (bytesToReadNow == maxBytesFromCurrentPiece) {
@synchronized (sampleQueue) {
self.currentAudioPiece = self.sampleQueue.dequeue;
self.currentAudioPieceIndex = 0;
}
} else {
self.currentAudioPieceIndex += bytesToReadNow;
}
}
inBuffer->mAudioDataByteSize = kBufferByteSize;
}
- (void)startMediaPlayer {
AudioStreamBasicDescription streamFormat;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mSampleRate = 44100.0;
streamFormat.mChannelsPerFrame = 2;
streamFormat.mBytesPerFrame = 4;
streamFormat.mFramesPerPacket = 1;
streamFormat.mBytesPerPacket = 4;
streamFormat.mBitsPerChannel = 16;
streamFormat.mReserved = 0;
streamFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger;
// New input queue
OSStatus err = AudioQueueNewOutput(&streamFormat, OutputBufferCallback, (__bridge void *) self, nil, nil, 0, &outputQueue);
if (err != noErr) {
NSLog(@"AudioQueueNewOutput() error: %d", (int) err);
}
int i;
// Enqueue buffers
AudioQueueBufferRef buffer;
for (i = 0; i < kNumberBuffers; i++) {
err = AudioQueueAllocateBuffer(outputQueue, kBufferByteSize, &buffer);
memset(buffer->mAudioData, 0, kBufferByteSize);
buffer->mAudioDataByteSize = kBufferByteSize;
if (err == noErr) {
err = AudioQueueEnqueueBuffer(outputQueue, buffer, 0, nil);
if (err != noErr) NSLog(@"AudioQueueEnqueueBuffer() error: %d", (int) err);
} else {
NSLog(@"AudioQueueAllocateBuffer() error: %d", (int) err);
return;
}
}
// Start queue
err = AudioQueueStart(outputQueue, nil);
if (err != noErr) NSLog(@"AudioQueueStart() error: %d", (int) err);
}
@end
我要在这里说一句,你的回放变得断断续续,因为你没有为你的数据推进写指针。我对 objective-C 的了解还不足以告诉您此语法是否正确,但我认为您需要添加以下内容:
while (bytesToRead > 0) {
....
memcpy(inBuffer->mAudioData, subRange.bytes, subRange.length);
bytesToRead -= bytesToReadNow;
inBuffer->mAudioData += bytesReadNow; // move the write pointer
...
}