如何使用 IAudioClient3 (WASAPI) Real-Time 工作 Queue API

How to use IAudioClient3 (WASAPI) with Real-Time Work Queue API

我正在研究 lowest-possible 延迟 MIDI 合成器软件。我知道 ASIO 和其他替代方案,但由于它们显然对 WASAPI 堆栈进行了重大改进(至少在共享模式下),我很想尝试一下。我首先编写了一个简单的 event-driven 版本的程序,但由于这不是在 Windows 10 上制作 low-latency 音频的推荐方法(根据 docs),我试图迁移到 Real-Time 工作 Queue API.

关于低延迟音频的文档指出建议使用 Real-Time Work Queue API or MFCreateMFByteStreamOnStreamEx with WASAPI in order for the OS to manage work items in a way that will avoid interference from non-audio subsystems. This seems like a good idea, but the latter option seems to require some managed code (demonstrated in this WindowsAudioSession example),我对此一无所知,最好避免使用(还有 header Robytestream.h IRandomAccessStream 的 defs 在我的系统上也没有找到)。

文档中包含的 RTWQ 示例不完整(无法编译),我已进行必要的添加以使其可编译:

class my_rtqueue : IRtwqAsyncCallback {

public:
    IRtwqAsyncResult* pAsyncResult;
    RTWQWORKITEM_KEY workItemKey;
    DWORD WorkQueueId;

    STDMETHODIMP GetParameters(DWORD* pdwFlags, DWORD* pdwQueue)
    {
        HRESULT hr = S_OK;
        *pdwFlags = 0;
        *pdwQueue = WorkQueueId;
        return hr;
    }

    //-------------------------------------------------------
    STDMETHODIMP Invoke(IRtwqAsyncResult* pAsyncResult)
    {
        HRESULT hr = S_OK;
        IUnknown* pState = NULL;
        WCHAR className[20];
        DWORD  bufferLength = 20;
        DWORD taskID = 0;
        LONG priority = 0;

        BYTE* pData;

        hr = render_info.renderclient->GetBuffer(render_info.buffer_framecount, &pData);
        ERROR_EXIT(hr);
        update_buffer((unsigned short*)pData, render_info.framesize_bytes / (2*sizeof(unsigned short))); // 2 channels, sizeof(unsigned short) == 2
        hr = render_info.renderclient->ReleaseBuffer(render_info.buffer_framecount, 0);
        ERROR_EXIT(hr);

        return S_OK;
    }

    STDMETHODIMP QueryInterface(const IID &riid, void **ppvObject) {
        return 0;
    }

    ULONG AddRef() {
        return 0;
    }

    ULONG Release() {
        return 0;
    }

    HRESULT queue(HANDLE event) {
        HRESULT hr;
        hr = RtwqPutWaitingWorkItem(event, 1, this->pAsyncResult, &this->workItemKey);
        return hr;
    }

    my_rtqueue() : workItemKey(0) {
        HRESULT hr = S_OK;
        IRtwqAsyncCallback* callback = NULL;
        DWORD taskId = 0;

        WorkQueueId = RTWQ_MULTITHREADED_WORKQUEUE;
        //WorkQueueId = RTWQ_STANDARD_WORKQUEUE;

        hr = RtwqLockSharedWorkQueue(L"Pro Audio", 0, &taskId, &WorkQueueId);
        ERROR_THROW(hr);

        hr = RtwqCreateAsyncResult(NULL, reinterpret_cast<IRtwqAsyncCallback*>(this), NULL, &pAsyncResult);
        ERROR_THROW(hr);

    }

    int stop() {
        HRESULT hr;
        if (pAsyncResult)
            pAsyncResult->Release();

        if (0xFFFFFFFF != this->WorkQueueId) {
            hr = RtwqUnlockWorkQueue(this->WorkQueueId);
            if (FAILED(hr)) {
                printf("Failed with RtwqUnlockWorkQueue 0x%x\n", hr);
                return 0;
            }
        }
        return 1;
    }

};

因此,实际的 WASAPI 代码(HRESULT 为清楚起见省略了错误检查):

void thread_main(LPVOID param) {

    HRESULT hr;
    REFERENCE_TIME hnsRequestedDuration = 0;
    IMMDeviceEnumerator* pEnumerator = NULL;
    IMMDevice* pDevice = NULL;
    IAudioClient3* pAudioClient = NULL;
    IAudioRenderClient* pRenderClient = NULL;
    WAVEFORMATEX* pwfx = NULL;
    HANDLE hEvent = NULL;
    HANDLE hTask = NULL;
    UINT32 bufferFrameCount;
    BYTE* pData;
    DWORD flags = 0;

    hr = RtwqStartup();

    // also, hr is checked for errors every step of the way

    hr = CoInitialize(NULL);

    hr = CoCreateInstance(
        CLSID_MMDeviceEnumerator, NULL,
        CLSCTX_ALL, IID_IMMDeviceEnumerator,
        (void**)&pEnumerator);

    hr = pEnumerator->GetDefaultAudioEndpoint(
        eRender, eConsole, &pDevice);

    hr = pDevice->Activate(
        IID_IAudioClient, CLSCTX_ALL,
        NULL, (void**)&pAudioClient);


    WAVEFORMATEX wave_format = {};
    wave_format.wFormatTag = WAVE_FORMAT_PCM;
    wave_format.nChannels = 2;
    wave_format.nSamplesPerSec = 48000;
    wave_format.nAvgBytesPerSec = 48000 * 2 * 16 / 8;
    wave_format.nBlockAlign = 2 * 16 / 8;
    wave_format.wBitsPerSample = 16;

    UINT32 DP, FP, MINP, MAXP;
    hr = pAudioClient->GetSharedModeEnginePeriod(&wave_format, &DP, &FP, &MINP, &MAXP);
    printf("DefaultPeriod: %u, Fundamental period: %u, min_period: %u, max_period: %u\n", DP, FP, MINP, MAXP);

    hr = pAudioClient->InitializeSharedAudioStream(AUDCLNT_STREAMFLAGS_EVENTCALLBACK, MINP, &wave_format, 0);

    my_rtqueue* workqueue = NULL;
    try {
        workqueue = new my_rtqueue();
    }
    catch (...) {
        hr = E_ABORT;
        ERROR_EXIT(hr);
    }

    hr = pAudioClient->GetBufferSize(&bufferFrameCount);

    PWAVEFORMATEX wf = &wave_format;
    UINT32 current_period;
    pAudioClient->GetCurrentSharedModeEnginePeriod(&wf, &current_period);

    INT32 FrameSize_bytes = bufferFrameCount * wave_format.nChannels * wave_format.wBitsPerSample / 8;
    printf("bufferFrameCount: %u, FrameSize_bytes: %d, current_period: %u\n", bufferFrameCount, FrameSize_bytes, current_period);

    hr = pAudioClient->GetService(
        IID_IAudioRenderClient,
        (void**)&pRenderClient);

    render_info.framesize_bytes = FrameSize_bytes;
    render_info.buffer_framecount = bufferFrameCount;
    render_info.renderclient = pRenderClient;

    hEvent = CreateEvent(nullptr, false, false, nullptr);
    if (hEvent == INVALID_HANDLE_VALUE) { ERROR_EXIT(0); }

    hr = pAudioClient->SetEventHandle(hEvent);

    const size_t num_samples = FrameSize_bytes / sizeof(unsigned short);

    DWORD taskIndex = 0;
    hTask = AvSetMmThreadCharacteristics(TEXT("Pro Audio"), &taskIndex);

    if (hTask == NULL) {
        hr = E_FAIL;
    }

    hr = pAudioClient->Start();  // Start playing.

    running = 1;
    while (running) {
        workqueue->queue(hEvent);
    }

    workqueue->stop();
    hr = RtwqShutdown();

    delete workqueue;

    running = 0;

    return 1;
}

这似乎有点工作(即正在输出音频),但在 每隔 调用 my_rtqueue::Invoke()IAudioRenderClient::GetBuffer() returns HRESULT of 0x88890006 (-> AUDCLNT_E_BUFFER_TOO_LARGE),实际的音频输出肯定不是我想要的。

我的代码有什么问题?这是将 RTWQ 与 WASAPI 一起使用的正确方法吗?

原来我的代码有很多问题,none 其中确实与 Rtwq 有关。最大的问题是我假设共享模式音频流使用的是 16 位整数样本,而实际上我的音频设置为 32 位浮点格式 (WAVE_FORMAT_IEEE_FLOAT)。当前活动的共享模式格式、句点等应该像这样获取:

WAVEFORMATEX *wavefmt = NULL;
UINT32 current_period = 0;
hr = pAudioClient->GetCurrentSharedModeEnginePeriod((WAVEFORMATEX**)&wavefmt, &current_period);

wavefmt 现在包含当前共享模式的输出格式信息。如果 wFormatTag 字段等于 WAVE_FORMAT_EXTENSIBLE,则需要将 WAVEFORMATEX 转换为 WAVEFORMATEXTENSIBLE 以查看实际格式。在此之后,需要获取当前设置支持的最小周期,如下所示:

UINT32 DP, FP, MINP, MAXP;
hr = pAudioClient->GetSharedModeEnginePeriod(wavefmt, &DP, &FP, &MINP, &MAXP);

然后使用新的 InitializeSharedAudioStream 函数初始化音频流:

hr = pAudioClient->InitializeSharedAudioStream(AUDCLNT_STREAMFLAGS_EVENTCALLBACK, MINP, wavefmt, NULL);

...获取缓冲区的实际大小:

hr = pAudioClient->GetBufferSize(&render_info.buffer_framecount);

并在 Get/ReleaseBuffer 逻辑中使用 GetCurrentPadding

UINT32 pad = 0;
hr = render_info.audioclient->GetCurrentPadding(&pad);

int actual_size = (render_info.buffer_framecount - pad);

hr = render_info.renderclient->GetBuffer(actual_size, &pData);
if (SUCCEEDED(hr)) {
    update_buffer((float*)pData, actual_size);
    hr = render_info.renderclient->ReleaseBuffer(actual_size, 0);
    ERROR_EXIT(hr);
}

documentation for IAudioClient::Initialize 声明了以下关于共享模式流的内容(我假设它也适用于新的 IAudioClient3):

Each time the thread awakens, it should call IAudioClient::GetCurrentPadding to determine how much data to write to a rendering buffer or read from a capture buffer. In contrast to the two buffers that the Initialize method allocates for an exclusive-mode stream that uses event-driven buffering, a shared-mode stream requires a single buffer.

使用 GetCurrentPadding 解决了 AUDCLNT_E_BUFFER_TOO_LARGE 的问题,并且用 32 位浮点样本而不是 16 位整数来填充缓冲区使得输出在我的系统上听起来不错(尽管效果是相当时髦!)。

如果有人提出 better/more 使用 Rtwq API 的正确方法,我很乐意听取他们的意见。