如何使用音频单元循环缓冲区从套接字服务器播放 pcm 音频缓冲区

How to play pcm audio buffer from a socket server using audio unit circular buffer

希望有人能帮助我。我是 Objective-c 和 OSX 的新手,我正在尝试将通过套接字接收到的音频数据播放到我的音频队列中。我发现这个 link 解决了我的循环缓冲区问题。

然而,当我尝试 运行 我的项目时 returns 它 returns 一个错误 (OSStatus) -10865。这就是代码记录“启用 AudioUnit 输出总线时出错”的原因。

status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));

这是我的代码:

Test.h

#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "TPCircularBuffer.h"
@interface Test : Communicator

@property (nonatomic) AudioComponentInstance audioUnit;
@property (nonatomic) TPCircularBuffer circularBuffer;
-(TPCircularBuffer *) outputShouldUseCircularBuffer;
-(void) start;

@end

Test.m

#import "Test.h"
#define kOutputBus 0
#define kInputBus 1
@implementation Test{
    BOOL stopped;
}

static OSStatus OutputRenderCallback(void                        *inRefCon,
                                     AudioUnitRenderActionFlags  *ioActionFlags,
                                     const AudioTimeStamp        *inTimeStamp,
                                     UInt32                      inBusNumber,
                                     UInt32                      inNumberFrames,
                                     AudioBufferList             *ioData){


    Test *output = (__bridge Test*)inRefCon;


    TPCircularBuffer *circularBuffer = [output outputShouldUseCircularBuffer];
    if( !circularBuffer ){
        SInt32 *left  = (SInt32*)ioData->mBuffers[0].mData;
        for(int i = 0; i < inNumberFrames; i++ ){
            left[  i ] = 0.0f;
        }
        return noErr;
    };

    int32_t bytesToCopy = ioData->mBuffers[0].mDataByteSize;
    SInt16* outputBuffer = ioData->mBuffers[0].mData;

    uint32_t availableBytes;
    SInt16 *sourceBuffer = TPCircularBufferTail(circularBuffer, &availableBytes);

    int32_t amount = MIN(bytesToCopy,availableBytes);
    memcpy(outputBuffer, sourceBuffer, amount);

    TPCircularBufferConsume(circularBuffer,amount);

    return noErr;
}



-(void) start
{
    [self circularBuffer:&_circularBuffer withSize:24576*5];

    stopped = NO;

    [self setupAudioUnit];
   //  [super setup:@"http://localhost" port:5321];
}

-(void) setupAudioUnit
{
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;

    AudioComponent comp = AudioComponentFindNext(NULL, &desc);

    OSStatus status;

    status = AudioComponentInstanceNew(comp, &_audioUnit);

    if(status != noErr)
    {
        NSLog(@"Error creating AudioUnit instance");
    }

    //  Enable input and output on AURemoteIO
    //  Input is enabled on the input scope of the input element
    //  Output is enabled on the output scope of the output element

    UInt32 one = 1;

    status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));


    if(status != noErr)
    {
        NSLog(@"Error enableling AudioUnit output bus");
    }

    // Explicitly set the input and output client formats
    // sample rate = 44100, num channels = 1, format = 16 bit int point

    AudioStreamBasicDescription audioFormat = [self getAudioDescription];

    status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, sizeof(audioFormat));

    if(status != noErr)
    {
        NSLog(@"Error setting audio format");
    }

    AURenderCallbackStruct renderCallback;
    renderCallback.inputProc = OutputRenderCallback;
    renderCallback.inputProcRefCon = (__bridge void *)(self);

    status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &renderCallback, sizeof(renderCallback));

    if(status != noErr)
    {
        NSLog(@"Error setting rendering callback");
    }

    // Initialize the AURemoteIO instance
    status = AudioUnitInitialize(_audioUnit);

    if(status != noErr)
    {
        NSLog(@"Error initializing audio unit");
    }
}

- (AudioStreamBasicDescription)getAudioDescription {
    AudioStreamBasicDescription audioDescription = {0};
    audioDescription.mFormatID          = kAudioFormatLinearPCM;
    audioDescription.mFormatFlags       = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
    audioDescription.mChannelsPerFrame  = 1;
    audioDescription.mBytesPerPacket    = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
    audioDescription.mFramesPerPacket   = 1;
    audioDescription.mBytesPerFrame     = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
    audioDescription.mBitsPerChannel    = 8 * sizeof(SInt16);
    audioDescription.mSampleRate        = 44100.0;
    return audioDescription;
}

-(void)circularBuffer:(TPCircularBuffer *)circularBuffer withSize:(int)size {
    TPCircularBufferInit(circularBuffer,size);
}

-(void)appendDataToCircularBuffer:(TPCircularBuffer*)circularBuffer
              fromAudioBufferList:(AudioBufferList*)audioBufferList {
    TPCircularBufferProduceBytes(circularBuffer,
                                 audioBufferList->mBuffers[0].mData,
                                 audioBufferList->mBuffers[0].mDataByteSize);
}

-(void)freeCircularBuffer:(TPCircularBuffer *)circularBuffer {
    TPCircularBufferClear(circularBuffer);
    TPCircularBufferCleanup(circularBuffer);
}
-(TPCircularBuffer *) outputShouldUseCircularBuffer
{
    return &_circularBuffer;
}

-(void) stop
{

    OSStatus status = AudioOutputUnitStop(_audioUnit);

    if(status != noErr)
    {
        NSLog(@"Error stopping audio unit");
    }



    TPCircularBufferClear(&_circularBuffer);
    _audioUnit = nil;
    stopped = YES;
}


   -(void)stream:(NSStream *)stream handleEvent:(NSStreamEvent)event{


    switch (event) {

        case NSStreamEventOpenCompleted:
            NSLog(@"Stream opened");
            break;

        case NSStreamEventHasBytesAvailable:
            if (stream == [super inputStream]) {
                NSLog(@"NSStreamEventHasBytesAvailable");
                uint8_t buffer[1024];
                NSUInteger len;

                while ([[super inputStream] hasBytesAvailable]) {
                    len = [[super inputStream] read:buffer maxLength:sizeof(buffer)];
                    if (len > 0) {
                        //converting buffer to byte data
                        NSString *output = [[NSString alloc] initWithBytes:buffer length:len encoding:NSASCIIStringEncoding];

                        if (nil != output) {
                            //NSLog(@"server overideddddd said: %@", output);

                        }
                        NSData *data0 = [[NSData alloc] initWithBytes:buffer length:len];
                        if (nil != data0) {
                            SInt16* byteData = (SInt16*)malloc(len);
                            memcpy(byteData, [data0 bytes], len);

                            double sum = 0.0;
                            for(int i = 0; i < len/2; i++) {
                                sum += byteData[i] * byteData[i];
                            }


                            Byte* soundData = (Byte*)malloc(len);
                            memcpy(soundData, [data0 bytes], len);

                            if(soundData)
                            {
                                AudioBufferList *theDataBuffer = (AudioBufferList*) malloc(sizeof(AudioBufferList) *1);
                                theDataBuffer->mNumberBuffers = 1;
                                theDataBuffer->mBuffers[0].mDataByteSize = (UInt32)len;
                                theDataBuffer->mBuffers[0].mNumberChannels = 1;
                                theDataBuffer->mBuffers[0].mData = (SInt16*)soundData;
                                NSLog(@"soundData here");
                                [self appendDataToCircularBuffer:&_circularBuffer fromAudioBufferList:theDataBuffer];



                            }

                        }

                    }
                }
            }
            break;

        case NSStreamEventErrorOccurred:
            NSLog(@"Can't connect to server");
            break;

        case NSStreamEventEndEncountered:
            [stream close];
            [stream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
            break;

        default:
            NSLog(@"Unknown event");
    }
    [super stream:stream handleEvent:event];

}
@end 

如果有任何人提供将从套接字服务器返回的缓冲区播放到音频队列的示例,以便我能够听到来自套接字服务器的声音,我将不胜感激。

谢谢

您的代码似乎要求 kAudioUnitSubType_VoiceProcessingIO 音频单元。但是 kAudioUnitSubType_RemoteIO 更适合 iOS 仅用于播放音频样本缓冲区的音频单元。

此外,您的代码似乎没有首先 select 适当的音频会话类别并在播放音频之前激活它。请参阅 Apple 的文档以执行此操作:https://developer.apple.com/library/content/documentation/Audio/Conceptual/AudioSessionProgrammingGuide/Introduction/Introduction.html