用来自 AVPacket 的字节数据馈送 MediaCodec:输出缓冲区的问题

Feeding MediaCodec with byte data from AVPacket: problems with output buffers

我的任务描述: 我正在 Android (API >= 17) 上开发视频播放器。它必须同时适用于 HLSmulticast 视频。此外,它必须支持多个音轨。

为什么我决定使用ffmpeg:

我的想法: 我在一个循环中使用 ffmpeg 解复用流。我使用 videoStream->codec->extradata 获得 CSD,然后正确配置 MediaFormat。在每次迭代中,当我有新视频 AVPacket 可用时,我会使用 av_bitstream_filter_inith264_mp4toannexb 过滤它的缓冲区。然后我调用 java 方法 onNewVideoData,在其中我得到 AVPacket 字节数组。我清除可用的输入缓冲区,然后用新数据填充它。我也得到了 pts。由于我有一个没有开始的流,另外,我通过从所有以下 pts' 中减去第一个 AVPacketpts 来计算新的 pts'。第一个 pts 我分配给 0。然后我调用 queueInputBuffer 将缓冲区发送到解码器。

我使用两个线程:一个用于获取数据并将其提交到输入缓冲区,另一个用于将数据发布到 Surface

完整播放器c代码:

#include <jni.h>
#include <android/log.h>
#include <stddef.h>

#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/buffer.h>

#define TAG "ffmpegPlayer"

struct
{
    const char* url;
    jint width;
    jint height;
    jfloat aspectRatio;
    jint streamsCount;
    AVFormatContext* formatContext;
    AVStream* videoStream;
} context;

AVPacket packet;
AVBitStreamFilterContext* avBitStreamFilterContext;

JNIEXPORT jbyteArray JNICALL Java_com_example_app_FfmpegPlayer_getCsdNative(JNIEnv* env, jobject x)
{
    jbyteArray arr = (*env)->NewByteArray(env, context.videoStream->codec->extradata_size);
    (*env)->SetByteArrayRegion(env, arr, 0, context.videoStream->codec->extradata_size, (jbyte*)context.videoStream->codec->extradata);

    return arr;
}

JNIEXPORT jint JNICALL Java_com_example_app_FfmpegPlayer_getWidthNative(JNIEnv* env, jobject x)
{
    return context.width;
}

JNIEXPORT jint JNICALL Java_com_example_app_FfmpegPlayer_getHeightNative(JNIEnv* env, jobject x)
{
    return context.height;
}

JNIEXPORT jfloat JNICALL Java_com_example_app_FfmpegPlayer_getAspectRatioNative(JNIEnv* env, jobject x)
{
    return context.aspectRatio;
}

JNIEXPORT jfloat JNICALL Java_com_example_app_FfmpegPlayer_getStreamsCountNative(JNIEnv* env, jobject x)
{
    return context.streamsCount;
}

JNIEXPORT jlong JNICALL Java_com_example_app_FfmpegPlayer_getPtsNative(JNIEnv* env, jobject obj)
{
    return packet.pts * av_q2d(context.videoStream->time_base) * 1000000;
}

JNIEXPORT jboolean JNICALL Java_com_example_app_FfmpegPlayer_initNative(JNIEnv* env, jobject obj, const jstring u)
{
    av_register_all();
    avBitStreamFilterContext = av_bitstream_filter_init("h264_mp4toannexb");

    const char* url = (*env)->GetStringUTFChars(env, u , NULL);
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Init: %s", url);

    AVFormatContext* formatContext = NULL;
    if (avformat_open_input(&formatContext, url, NULL, NULL) < 0) {
        __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to open input");
        return JNI_FALSE;
    }

    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to find stream info");
        return JNI_FALSE;
    }

    AVInputFormat * iformat = formatContext->iformat;
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "format: %s", iformat->name);

    context.streamsCount = formatContext->nb_streams;
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Streams count: %d", formatContext->nb_streams);

    int i = 0;
    AVStream* videoStream = NULL;
    AVDictionaryEntry* lang;
    for (i = 0; i < formatContext->nb_streams; i++) {
        int codecType = formatContext->streams[i]->codec->codec_type;
        if (videoStream == NULL && codecType == AVMEDIA_TYPE_VIDEO) {
            videoStream = formatContext->streams[i];
        }
        else if (codecType == AVMEDIA_TYPE_AUDIO) {
            lang = av_dict_get(formatContext->streams[i]->metadata, "language", NULL, 0);
            if (lang != NULL) {
                __android_log_print(ANDROID_LOG_DEBUG, TAG, "Audio stream %d: %s", i, lang->value);
            }
        }
    }
    if (videoStream == NULL) {
        __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to find video stream");
        return JNI_FALSE;
    }
    context.videoStream = videoStream;
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Video stream:  %d", videoStream->index);

    AVCodecContext *codecContext = formatContext->streams[videoStream->index]->codec;

    __android_log_print(ANDROID_LOG_DEBUG, TAG, "width: %d, height: %d", codecContext->width, codecContext->height);
    context.width = codecContext->width;
    context.height = codecContext->height;

    AVRational aspectRatio = codecContext->sample_aspect_ratio;
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "aspect ratio: %d/%d", aspectRatio.num, aspectRatio.den);
    context.aspectRatio = aspectRatio.num / aspectRatio.den;

    context.formatContext = formatContext;

    return JNI_TRUE;
}

void filterPacket()
{
    av_bitstream_filter_filter(avBitStreamFilterContext, context.videoStream->codec, NULL, &packet.data, &packet.size, packet.data, packet.size, packet.flags);
}

JNIEXPORT void JNICALL Java_com_example_app_FfmpegPlayer_startNative(JNIEnv* env, jobject obj)
{
    jclass cl = (*env)->GetObjectClass(env, obj);
    jmethodID updateMethodId = (*env)->GetMethodID(env, cl, "onNewVideoData", "()V");

    while (av_read_frame(context.formatContext, &packet) >= 0) {
        if (context.formatContext == NULL) {
            return;
        }
        if (packet.stream_index == context.videoStream->index) {
            filterPacket();
            (*env)->CallVoidMethod(env, obj, updateMethodId);
        }
    }
}

JNIEXPORT jbyteArray JNICALL Java_com_example_app_FfmpegPlayer_getVideoDataNative(JNIEnv* env, jobject obj)
{
    AVBufferRef *buf = packet.buf;

    jbyteArray arr = (*env)->NewByteArray(env, buf->size);
    (*env)->SetByteArrayRegion(env, arr, 0, buf->size, (jbyte*)buf->data);

    return arr;
}

完整的Java-代码:

package com.example.app;


import android.media.MediaCodec;
import android.media.MediaFormat;
import android.view.Surface;

import java.nio.ByteBuffer;

public class FfmpegPlayer {

    static {
        System.loadLibrary("avutil-54");
        System.loadLibrary("swscale-3");
        System.loadLibrary("swresample-1");
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avformat-56");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("ffmpeg-player");
    }

    private native boolean initNative(String url);
    private native boolean startNative();
    private native int getWidthNative();
    private native int getHeightNative();
    private native float getAspectRatioNative();
    private native byte[] getVideoDataNative();
    private native long getPtsNative();
    private native byte[] getCsdNative();

    private String source;
    private PlayerThread playerThread;
    private int width;
    private int height;
    private MediaCodec decoder;
    private ByteBuffer[] inputBuffers;
    private Surface surface;
    private long firstPtsTime;

    public PlanetaPlayer(Surface surface) {
        this.surface = surface;
    }

    public void setDataSource(String source) {
        if (!initNative(source)) {
            return;
        }
        width = getWidthNative();
        height = getHeightNative();
        MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height);
        format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
        format.setByteBuffer("csd-0", ByteBuffer.wrap(getCsdNative()));
        LogUtils.log("CSD: ");
        outputAsHex(getCsdNative());
        try {
            decoder = MediaCodec.createDecoderByType("video/avc");
            decoder.configure(format, surface, null, 0);
            decoder.start();

            playerThread = new PlayerThread();
            playerThread.start();

            new OutputThread().run();
        }
        catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void onNewVideoData() {
        int index = decoder.dequeueInputBuffer(0);
        if (index >= 0) {
            byte[] data = getVideoDataNative();
            ByteBuffer byteBuffer = decoder.getInputBuffers()[index];
            byteBuffer.clear();
            byteBuffer.put(data);
            long pts = getPtsNative();

            LogUtils.log("Input AVPacket pts: " + pts);
            LogUtils.log("Input AVPacket data length: " + data.length);
            LogUtils.log("Input AVPacket data: ");
            outputAsHex(data);

            if (firstPtsTime == 0) {
                firstPtsTime = pts;
                pts = 0;
            }
            else {
                pts -= firstPtsTime;
            }
            decoder.queueInputBuffer(index, 0, data.length, pts, 0);
        }
    }

    private void outputAsHex(byte[] data) {
        String[] test = new String[data.length];
        for (int i = 0; i < data.length; i++) {
            test[i] = String.format("%02x", data[i]);
        }
        LogUtils.log(test);
    }

    private class PlayerThread extends Thread {
        @Override
        public void run() {
            super.run();

            startNative();
        }
    }

    private class OutputThread extends Thread {

        @Override
        public void run() {
            super.run();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            while (true) {
                int index = decoder.dequeueOutputBuffer(info, 0);
                if (index >= 0) {
                    ByteBuffer buffer = decoder.getOutputBuffers()[index];
                    buffer.position(info.offset);
                    buffer.limit(info.offset + info.size);
                    byte[] test = new byte[info.size];
                    for (int i = 0; i < info.size; i++) {
                        test[i] = buffer.get(i);
                    }
                    LogUtils.log("Output info: size=" + info.size + ", presentationTimeUs=" + info.presentationTimeUs + ",offset=" + info.offset + ",flags=" + info.flags);
                    LogUtils.log("Output data: ");
                    outputAsHex(test);
                    decoder.releaseOutputBuffer(index, true);
                }
            }
        }
    }
}

问题: 对于测试,我使用了带有以下视频流的 TS 文件:

Codec: H264 - MPEG-4 AVC (part 10) (h264)
Resolution: 720x578
Frame rate: 25
Decoded format: Planar 4:2:0 YUV

CSD 如下:

[00, 00, 00, 01, 09, 10, 00, 00, 00, 01, 27, 4d, 40, 1e, 9a, 62, 01, 68, 48, b0, 44, 20, a0, a0, a8, 00, 00, 03, 00, 08, 00, 00, 03, 01, 94, a0, 00, 00, 00, 01, 28, ee, 3c, 80]

在不同的设备上我有不同的结果。但是我无法在 Surface.

上显示视频

输入:

Input AVPacket pts: 351519222
Input AVPacket data length: 54941
Input AVPacket data: [00, 00, 00, 01, 09, 10, 00, 00, 00, 01, 27, 4d, 40, 1e, 9a, 62, 01, 68, 48, b0, 44, 20, a0, a0, a8, 00, 00, 03, 00, 08, 00, 00, 03, 01, 94, a0, 00, 00, 00, 01,...]
------------------------------------
Input AVPacket pts: 351539222
Input AVPacket data length: 9605
Input AVPacket data: [00, 00, 00, 01, 09, 30, 00, 00, 00, 01, 06, 01, 01, 24, 80, 00, 00, 00, 01, 21, e3, bd, da, e4, 46, c5, 8b, 6b, 7d, 07, 59, 23, 6f, 92, e9, fb, 3b, b9, 4d, f9,...]
------------------------------------
Input AVPacket pts: 351439222
Input AVPacket data length: 1985
Input AVPacket data: [00, 00, 00, 01, 09, 50, 00, 00, 00, 01, 06, 01, 01, 14, 80, 00, 00, 00, 01, 21, a8, f2, 74, 69, 14, 54, 4d, c5, 8b, e8, 42, 52, ac, 80, 53, b4, 4d, 24, 1f, 6c,...]
------------------------------------
Input AVPacket pts: 351459222
Input AVPacket data length: 2121
Input AVPacket data: [00, 00, 00, 01, 09, 50, 00, 00, 00, 01, 06, 01, 01, 24, 80, 00, 00, 00, 01, 21, a8, f3, 74, e9, 0b, 8b, 17, e8, 43, f8, 10, 88, ca, 2b, 11, 53, c8, 31, f0, 0b,...]
... on and on

Asus Zenfone (Android 5.0.2) 输出线程(解码后,只有 8 字节数据的 25 个缓冲区的奇怪结果):

Output info: size=8, presentationTimeUs=-80001,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac]
---------------------------
Output info: size=8, presentationTimeUs=0,offset=0,flags=1
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac]
---------------------------
Output info: size=8, presentationTimeUs=720000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac]
---------------------------
Output info: size=8, presentationTimeUs=780000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=840000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=960000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, 3f, 8b, ac]
---------------------------
Output info: size=8, presentationTimeUs=1040000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, f8, 76, 85, ac]
---------------------------
Output info: size=8, presentationTimeUs=1180000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, 87, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=1260000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e8, b5, d2, ac]
---------------------------
Output info: size=8, presentationTimeUs=1800000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac]
---------------------------
Output info: size=8, presentationTimeUs=1860000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, c0, 84, ac]
---------------------------
Output info: size=8, presentationTimeUs=2080000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=3440000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=3520000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac]
---------------------------
Output info: size=8, presentationTimeUs=4160000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac]
---------------------------
Output info: size=8, presentationTimeUs=4300000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, 3f, 8b, ac]
---------------------------
Output info: size=8, presentationTimeUs=4400000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac]
---------------------------
Output info: size=8, presentationTimeUs=4480000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, f8, 76, 85, ac]
---------------------------
Output info: size=8, presentationTimeUs=4680000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=4720000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, c0, 84, ac]
---------------------------
Output info: size=8, presentationTimeUs=4760000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e0, 87, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=4800000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 58, 54, 83, ac]
---------------------------
Output info: size=8, presentationTimeUs=5040000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, e8, b5, d2, ac]
---------------------------
Output info: size=8, presentationTimeUs=5100000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac]
---------------------------
Output info: size=8, presentationTimeUs=5320000,offset=0,flags=0
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac]
---------------------------
Output info: size=8, presentationTimeUs=5380000,offset=0,flags=1
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac]

其他华硕 Zenfone 日志:

01-25 17:11:36.859 4851-4934/com.example.app I/OMXClient: Using client-side OMX mux.
01-25 17:11:36.865 317-1075/? I/OMX-VDEC-1080P: component_init: OMX.qcom.video.decoder.avc : fd=43
01-25 17:11:36.867 317-1075/? I/OMX-VDEC-1080P: Capabilities: driver_name = msm_vidc_driver, card = msm_vdec_8974, bus_info = , version = 1, capabilities = 4003000
01-25 17:11:36.881 317-1075/? I/OMX-VDEC-1080P: omx_vdec::component_init() success : fd=43
01-25 17:11:36.885 4851-4934/com.example.app I/ACodec: [OMX.qcom.video.decoder.avc] DRC Mode: Dynamic Buffer Mode
01-25 17:11:36.893 317-20612/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.933 317-12269/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.933 317-12269/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.935 317-5559/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.957 317-5559/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.957 4851-4934/com.example.app I/ExtendedCodec: Decoder will be in frame by frame mode
01-25 17:11:36.963 317-1075/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.963 317-1075/? E/C2DColorConvert: unknown format passed for luma alignment number
01-25 17:11:36.964 317-20612/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.describeColorFormat not implemented
01-25 17:11:37.072 317-20612/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.describeColorFormat not implemented
01-25 17:11:37.072 4851-4934/com.example.app W/ACodec: do not know color format 0x7fa30c04 = 2141391876

华硕 Nexus 7 (Android 6.0.1) 崩溃:

01-25 17:23:06.921 11602-11695/com.example.app I/OMXClient: Using client-side OMX mux.
01-25 17:23:06.952 11602-11694/com.example.app I/MediaCodec: [OMX.qcom.video.decoder.avc] setting surface generation to 11880449
01-25 17:23:06.954 194-194/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.storeANWBufferInMetadata not implemented
01-25 17:23:06.954 194-194/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.storeMetaDataInBuffers not implemented
01-25 17:23:06.954 194-194/? E/OMXNodeInstance: getExtensionIndex(45:qcom.decoder.avc, OMX.google.android.index.storeMetaDataInBuffers) ERROR: NotImplemented(0x80001006)
01-25 17:23:06.954 11602-11695/com.example.app E/ACodec: [OMX.qcom.video.decoder.avc] storeMetaDataInBuffers failed w/ err -2147483648
01-25 17:23:06.963 11602-11695/com.example.app D/SurfaceUtils: set up nativeWindow 0xa0b7a108 for 720x576, color 0x7fa30c03, rotation 0, usage 0x42002900
01-25 17:23:06.967 194-604/? E/OMX-VDEC-1080P: GET_MV_BUFFER_SIZE returned: Size: 122880 and alignment: 8192
01-25 17:23:07.203 11602-11695/com.example.app W/AHierarchicalStateMachine: Warning message AMessage(what = 'omxI') = {
                                                                         int32_t type = 0
                                                                         int32_t event = 2130706432
                                                                         int32_t data1 = 1
                                                                         int32_t data2 = 0
                                                                       } unhandled in root state.
01-25 17:23:07.232 11602-11695/com.example.app D/SurfaceUtils: set up nativeWindow 0xa0b7a108 for 720x576, color 0x7fa30c03, rotation 0, usage 0x42002900
01-25 17:23:07.241 194-194/? E/OMX-VDEC-1080P: GET_MV_BUFFER_SIZE returned: Size: 122880 and alignment: 8192
01-25 17:23:07.242 194-194/? E/OMX-VDEC-1080P: Insufficient sized buffer given for playback, expected 671744, got 663552
01-25 17:23:07.242 194-194/? E/OMXNodeInstance: useBuffer(45:qcom.decoder.avc, Output:1 671744@0xb60a0860) ERROR: BadParameter(0x80001005)
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: registering GraphicBuffer 0 with OMX IL component failed: -2147483648
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: Failed to allocate output port buffers after port reconfiguration: (-2147483648)
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: signalError(omxError 0x80001001, internalError -2147483648)
01-25 17:23:07.243 11602-11694/com.example.app E/MediaCodec: Codec reported err 0x80001001, actionCode 0, while in state 6
01-25 17:23:07.245 11602-11602/com.example.app W/System.err: java.lang.IllegalStateException
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at android.media.MediaCodec.native_dequeueOutputBuffer(Native Method)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at android.media.MediaCodec.dequeueOutputBuffer(MediaCodec.java:2379)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at com.example.app.FfmpegPlayer$OutputThread.run(FfmpegPlayer.java:122)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at com.example.app.FfmpegPlayer.setDataSource(FfmpegPlayer.java:66)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at com.example.app.activities.TestActivity.surfaceCreated(TestActivity.java:151)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at android.view.SurfaceView.updateWindow(SurfaceView.java:583)
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:     at android.view.SurfaceView.onPreDraw(SurfaceView.java:177)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.ViewTreeObserver.dispatchOnPreDraw(ViewTreeObserver.java:944)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2055)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1107)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:6013)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.Choreographer$CallbackRecord.run(Choreographer.java:858)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.Choreographer.doCallbacks(Choreographer.java:670)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.Choreographer.doFrame(Choreographer.java:606)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:844)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.os.Handler.handleCallback(Handler.java:739)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.os.Handler.dispatchMessage(Handler.java:95)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.os.Looper.loop(Looper.java:148)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at android.app.ActivityThread.main(ActivityThread.java:5417)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at java.lang.reflect.Method.invoke(Native Method)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726)
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616)

另一个设备总是有空的输出缓冲区,认为索引 >= 0;

我做错了什么?

两件事:

首先,对于视频流和播放你应该使用ExoPlayer

ExoPlayer 开箱即用地支持 HLS。 ExoPlayer 是高度模块化的,应该可以直接为多播视频创建自定义 SampleSource,或许可以重用 ffmpeg demux。

其次,看你的代码我明白了

01-25 17:23:07.242 194-194/? E/OMX-VDEC-1080P: Insufficient sized buffer given for playback, expected 671744, got 663552

可能 KEY_MAX_INPUT_SIZE 设置不正确。看看如何ExoPlayer sets KEY_MAX_INPUT_SIZE ,即

// Round up width/height to an integer number of macroblocks.
maxPixels = ((maxWidth + 15) / 16) * ((maxHeight + 15) / 16) * 16 * 16;
minCompressionRatio = 2;
// ...
int maxInputSize = (maxPixels * 3) / (2 * minCompressionRatio);
format.setInteger(android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);