在 GlSurfaceView 上播放的视频每 16 或 17 帧挂起 500 毫秒

Video played on GlSurfaceView hangs for 500ms every 16 or 17 frames

我在应用程序中使用的 MediaPlayer 似乎开始每 16 或 17 帧挂起一次。

我正在使用 GlSurfaceView 渲染 MediaPlayer 播放的帧。一切都在以快速的 fps 速率渲染良好。该应用程序过去 运行 很好,但自从有些日子以来,我发现视频每 16 或 17 帧至少挂起 500 毫秒。

程序看起来是这样的,我在 Xperia Z1 上。为了确保代码中没有回归,我使用教程重新启动,但这种滞后行为仍然存在。

无论我是否使用 lockJava 中的 synchronized),或者是否使用 Rendermode.WhenDirty,这在回放中绝对没有任何改变。

该程序只是一个 activity 和具有此自定义视图的布局。不涉及其他代码。 (顺便说一句,该演示未使用 C# 标准进行重构,因为这是一个临时片段,请不要讨论重构。)

public class CustomVideoView : GLSurfaceView {

    VideoRender mRenderer;
    private MediaPlayer mMediaPlayer = null;
    private string filePath = "";
    private Uri uri = null;
    private Context _context;

    public CustomVideoView(Context context, IAttributeSet attrs) : base(context, attrs) { 
        _context = context;
        init ();
    }

    public CustomVideoView(Context context, IAttributeSet attrs, int defStyle) : base(context, attrs) {
        _context = context;
        init ();
    }

    public CustomVideoView(Context context) : base(context, null) {
        _context = context;
        init ();
    }


    public void init() {
        SetEGLContextClientVersion (2);

        Holder.SetFormat (Format.Translucent);
        SetEGLConfigChooser (8, 8, 8, 8, 16, 0);
        filePath = "/storage/sdcard1/download/cat3.mp4";
        mRenderer = new VideoRender (_context, mMediaPlayer, filePath, false, this);
        SetRenderer (mRenderer);
        //RenderMode = Rendermode.WhenDirty;
    }

    public override void OnResume() {
        base.OnResume ();
    }

    public override void OnPause() {
        base.OnPause ();
    }

    protected override void OnDetachedFromWindow() {
        // TODO Auto-generated method stub
        base.OnDetachedFromWindow ();

        if (mMediaPlayer != null) {
            mMediaPlayer.Stop ();
            mMediaPlayer.Release ();
        }
    }

    private class VideoRender : Java.Lang.Object, GLSurfaceView.IRenderer, SurfaceTexture.IOnFrameAvailableListener {

        private string TAG = "VideoRender";
        private const int FLOAT_SIZE_BYTES = 4;
        private const int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 3 * FLOAT_SIZE_BYTES;
        private const int TEXTURE_VERTICES_DATA_STRIDE_BYTES = 2 * FLOAT_SIZE_BYTES;
        private const int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
        private const int TRIANGLE_VERTICES_DATA_UV_OFFSET = 0;
        private float[] mTriangleVerticesData = { -1.0f, -1.0f, 0, 1.0f,
            -1.0f, 0, -1.0f, 1.0f, 0, 1.0f, 1.0f, 0, };

        private float[] mTextureVerticesData = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f };

        private FloatBuffer mTriangleVertices;

        // extra
        private FloatBuffer mTextureVertices;

        private string mVertexShader = "uniform mat4 uMVPMatrix;\n"
            + "uniform mat4 uSTMatrix;\n" + "attribute vec4 aPosition;\n"
            + "attribute vec4 aTextureCoord;\n"
            + "varying vec2 vTextureCoord;\n" + "void main() {\n"
            + "  gl_Position = uMVPMatrix * aPosition;\n"
            + "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + "}\n";

        private string mFragmentShader = "#extension GL_OES_EGL_image_external : require\n"
            + "precision mediump float;\n"
            + "varying vec2 vTextureCoord;\n"
            + "uniform samplerExternalOES sTexture;\n"
            + "void main() {\n"
            + "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
            + "}\n";

        private float[] mMVPMatrix = new float[16];
        private float[] mSTMatrix = new float[16];
        private float[] projectionMatrix = new float[16];

        private int mProgram;
        private int mTextureID;
        private int muMVPMatrixHandle;
        private int muSTMatrixHandle;
        private int maPositionHandle;
        private int maTextureHandle;

        private SurfaceTexture mSurface;
        private bool updateSurface = false;
        private MediaPlayer mMediaPlayer;
        private string _filePath;
        private bool _isStreaming = false;
        private Context _context;
        private CustomVideoView _customVideoView;

        private int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

        public VideoRender(Context context, MediaPlayer mediaPlayer, string filePath, bool isStreaming, CustomVideoView customVideoView) {
            _customVideoView = customVideoView;
            _filePath = filePath;
            _isStreaming = isStreaming;
            _context = context;
            mMediaPlayer = mediaPlayer;

            mTriangleVertices = ByteBuffer
                .AllocateDirect(
                    mTriangleVerticesData.Length * FLOAT_SIZE_BYTES)
                .Order(ByteOrder.NativeOrder()).AsFloatBuffer();
            mTriangleVertices.Put(mTriangleVerticesData).Position(0);

            // extra
            mTextureVertices = ByteBuffer
                .AllocateDirect(mTextureVerticesData.Length * FLOAT_SIZE_BYTES)
                .Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTextureVertices.Put(mTextureVerticesData).Position(0);

            Android.Opengl.Matrix.SetIdentityM(mSTMatrix, 0);
        }

        public void OnDrawFrame(Javax.Microedition.Khronos.Opengles.IGL10 glUnused) {

            lock (syncLock) {
                if (updateSurface) {
                    mSurface.UpdateTexImage ();
                    mSurface.GetTransformMatrix (mSTMatrix);
                    updateSurface = false;
                }
            }

            GLES20.GlClearColor (255.0f, 255.0f, 255.0f, 1.0f);
            GLES20.GlClear (GLES20.GlDepthBufferBit
                | GLES20.GlColorBufferBit);

            GLES20.GlUseProgram (mProgram);
            checkGlError ("glUseProgram");

            GLES20.GlActiveTexture (GLES20.GlTexture0);
            GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID);

            mTriangleVertices.Position (TRIANGLE_VERTICES_DATA_POS_OFFSET);
            GLES20.GlVertexAttribPointer (maPositionHandle, 3, GLES20.GlFloat,
                false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
                mTriangleVertices);
            checkGlError ("glVertexAttribPointer maPosition");
            GLES20.GlEnableVertexAttribArray (maPositionHandle);
            checkGlError ("glEnableVertexAttribArray maPositionHandle");

            mTextureVertices.Position (TRIANGLE_VERTICES_DATA_UV_OFFSET);
            GLES20.GlVertexAttribPointer (maTextureHandle, 2, GLES20.GlFloat,
                false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, mTextureVertices);

            checkGlError ("glVertexAttribPointer maTextureHandle");
            GLES20.GlEnableVertexAttribArray (maTextureHandle);
            checkGlError ("glEnableVertexAttribArray maTextureHandle");

            Android.Opengl.Matrix.SetIdentityM (mMVPMatrix, 0);

            GLES20.GlUniformMatrix4fv (muMVPMatrixHandle, 1, false, mMVPMatrix,
                0);
            GLES20.GlUniformMatrix4fv (muSTMatrixHandle, 1, false, mSTMatrix, 0);

            GLES20.GlDrawArrays (GLES20.GlTriangleStrip, 0, 4);
            checkGlError ("glDrawArrays");


            GLES20.GlFinish ();

        }

        public void OnSurfaceChanged(Javax.Microedition.Khronos.Opengles.IGL10 glUnused, int width, int height) {

            GLES20.GlViewport (0, 0, width, height);

            Android.Opengl.Matrix.FrustumM (projectionMatrix, 0, -1.0f, 1.0f, -1.0f, 1.0f,
                1.0f, 10.0f);

        }


        public void OnSurfaceCreated(Javax.Microedition.Khronos.Opengles.IGL10 gl,Javax.Microedition.Khronos.Egl.EGLConfig config) {

            mProgram = createProgram (mVertexShader, mFragmentShader);
            if (mProgram == 0) {
                return;
            }
            maPositionHandle = GLES20
                .GlGetAttribLocation (mProgram, "aPosition");
            checkGlError ("glGetAttribLocation aPosition");
            if (maPositionHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for aPosition");
            }
            maTextureHandle = GLES20.GlGetAttribLocation (mProgram,
                "aTextureCoord");
            checkGlError ("glGetAttribLocation aTextureCoord");
            if (maTextureHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for aTextureCoord");
            }

            muMVPMatrixHandle = GLES20.GlGetUniformLocation (mProgram,
                "uMVPMatrix");
            checkGlError ("glGetUniformLocation uMVPMatrix");
            if (muMVPMatrixHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for uMVPMatrix");
            }

            muSTMatrixHandle = GLES20.GlGetUniformLocation (mProgram,
                "uSTMatrix");
            checkGlError ("glGetUniformLocation uSTMatrix");
            if (muSTMatrixHandle == -1) {
                throw new RuntimeException (
                    "Could not get attrib location for uSTMatrix");
            }

            int[] textures = new int[1];
            GLES20.GlGenTextures (1, textures, 0);

            mTextureID = textures [0];
            GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID);
            checkGlError ("glBindTexture mTextureID");

            GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES,
                GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES,
                GLES20.GlTextureMagFilter, GLES20.GlLinear);

            mSurface = new SurfaceTexture (mTextureID);
            // mSurface.SetOnFrameAvailableListener (this);
            mSurface.FrameAvailable += (object sender, SurfaceTexture.FrameAvailableEventArgs e) => {
                OnFrameAvailable(e.SurfaceTexture);
            };
            Surface surface = new Surface (mSurface);

            mMediaPlayer = new MediaPlayer ();

            if (System.IO.File.Exists(_filePath)) {
                try {
                    if (!_isStreaming) {
                        mMediaPlayer.SetDataSource (_filePath); 
                    } else {
                        throw new System.NotImplementedException();
                        //mMediaPlayer.SetDataSource (_context, new Uri.Builder().AppendPath(_filePath));
                    }

                } catch (IllegalArgumentException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (SecurityException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (IllegalStateException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.PrintStackTrace ();
                }
            }

            mMediaPlayer.SetSurface (surface);
            surface.Release ();

            try {
                mMediaPlayer.Prepare ();
            } catch (IOException t) {
                Log.Error (TAG, "media player prepare failed");
            }

            lock (syncLock) {
                updateSurface = false;
            }

            mMediaPlayer.Start ();

        }

        private readonly object syncLock = new object ();

        public void OnFrameAvailable(SurfaceTexture surface) {
            lock (syncLock) {
                updateSurface = true;               
            }

            _customVideoView.RequestRender ();
        }

        private int loadShader(int shaderType, string source) {
            int shader = GLES20.GlCreateShader (shaderType);
            if (shader != 0) {
                GLES20.GlShaderSource (shader, source);
                GLES20.GlCompileShader (shader);
                int[] compiled = new int[1];
                GLES20.GlGetShaderiv (shader, GLES20.GlCompileStatus, compiled, 0);
                if (compiled [0] == 0) {
                    Log.Error (TAG, "Could not compile shader " + shaderType + ":");
                    Log.Error (TAG, GLES20.GlGetShaderInfoLog (shader));
                    GLES20.GlDeleteShader (shader);
                    shader = 0;
                }
            }
            return shader;
        }

        private int createProgram(string vertexSource, string fragmentSource) {
            int vertexShader = loadShader (GLES20.GlVertexShader, vertexSource);
            if (vertexShader == 0) {
                return 0;
            }
            int pixelShader = loadShader (GLES20.GlFragmentShader,
                fragmentSource);

            if (pixelShader == 0) {
                return 0;
            }

            int program = GLES20.GlCreateProgram ();
            if (program != 0) {
                GLES20.GlAttachShader (program, vertexShader);
                checkGlError ("glAttachShader");
                GLES20.GlAttachShader (program, pixelShader);
                checkGlError ("glAttachShader");
                GLES20.GlLinkProgram (program);
                int[] linkStatus = new int[1];
                GLES20.GlGetProgramiv (program, GLES20.GlLinkStatus,
                    linkStatus, 0);
                if (linkStatus [0] != GLES20.GlTrue) {
                    Log.Error (TAG, "Could not link program: ");
                    Log.Error (TAG, GLES20.GlGetProgramInfoLog (program));
                    GLES20.GlDeleteProgram (program);
                    program = 0;
                }
            }
            return program;
        }

        private void checkGlError(string op) {
            int error;
            while ((error = GLES20.GlGetError ()) != GLES20.GlNoError) {
                Log.Error (TAG, op + ": glError " + error);
                throw new RuntimeException (op + ": glError " + error);
            }
        }
    }
}

调试时,这里是计算出的总体 FPS,在此帧率下一帧应该播放多长时间,以及它实际持续多长时间。我们可以看到延迟的长度,最终在320ms左右。

[fps] 15.0627 - norm=66 - cur=44.712
[fps] 15.09347 - norm=66 - cur=45.017
[fps] 15.12472 - norm=66 - cur=44.437
[fps] 15.17346 - norm=65 - cur=32.413
[fps] 15.20476 - norm=65 - cur=44.01
[fps] 15.2337 - norm=65 - cur=45.506
[fps] 15.26154 - norm=65 - cur=46.177
[fps] 14.8815 - norm=67 - cur=334.503
[fps] 14.93206 - norm=66 - cur=29.971
[fps] 14.96286 - norm=66 - cur=44.071
[fps] 14.99153 - norm=66 - cur=45.445
[fps] 15.03538 - norm=66 - cur=34.213
[fps] 15.0695 - norm=66 - cur=41.142
[fps] 15.09754 - norm=66 - cur=44.468
[fps] 15.12501 - norm=66 - cur=45.628
[fps] 15.17139 - norm=65 - cur=31.558
[fps] 15.20057 - norm=65 - cur=44.01
[fps] 15.22785 - norm=65 - cur=45.231
[fps] 15.25471 - norm=65 - cur=45.384
[fps] 15.30203 - norm=65 - cur=30.093
[fps] 15.32664 - norm=65 - cur=46.636
[fps] 15.35203 - norm=65 - cur=45.933
[fps] 15.37996 - norm=65 - cur=44.041
[fps] 15.42686 - norm=64 - cur=29.3
[fps] 15.47278 - norm=64 - cur=30.001
[fps] 15.49799 - norm=64 - cur=45.384

[编辑]

重新启动 phone 解决问题。所以要么 phone 本身缺少 RAM 或存储空间,要么我没有正确编写示例并且 CPU 工作过度。

使用普通的 SurfaceView 自己处理 EGL 设置和线程管理可能更容易。如果您只是 blitting 视频帧,那么拥有专用渲染线程的价值不大。 (有关示例,请参阅 Grafika。)

如果您坚持使用 GLSurfaceView,则不想或不需要在 onDrawFrame() 结束时调用 glFinish()。这是一个同步调用,它将暂停您的线程,直到 GLES 完成绘制。 GLSurfaceView 将在 onDrawFrame() returns.

之后调用 eglSwapBuffers()

无论如何,300 毫秒的停顿不太可能是 GLES 的结果。 MediaPlayer 停止运行,或者系统中的其他东西正在唤醒并消耗所有可用的 CPU 资源。如果您想进一步调试,可以尝试 systrace