如何将 Buffer[] 绘制到 Android 上的 TextureView?

How to draw Buffer[] to a TextureView on Android?

我正在使用 JavaCV 的 FFmpegFrameGrabber to retrieve frames from a video file. This FFmpegFrameGrabber return a Frame which basically contain a Buffer[] 来保存视频帧的图像像素。

因为性能是我的首要任务,所以我想用OpenGL ES直接显示这个Buffer[]而不是把它转换成Bitmap

要显示的视图只占用不到一半的屏幕并且遵循 OpenGL ES document:

Developers who want to incorporate OpenGL ES graphics in a small portion of their layouts should take a look at TextureView.

所以我想 TextureView 是这个任务的正确选择。但是我还没有找到太多关于这个的资源(其中大部分是相机预览示例)。

请问如何将Buffer[]画成TextureView?如果这不是最有效的方法,我愿意尝试您的替代方法。


更新: 所以目前我的设置是这样的:

在我的 VideoActivity 中,我重复提取包含 ByteBuffer 的视频 Frame,然后将其发送到我的 MyGLRenderer2 以转换为 OpenGLES 的纹理:

...
mGLSurfaceView = (GLSurfaceView)findViewById(R.id.gl_surface_view);
mGLSurfaceView.setEGLContextClientVersion(2);
mRenderer = new MyGLRenderer2(this);
mGLSurfaceView.setRenderer(mRenderer);
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
...

private void grabCurrentFrame(final long currentPosition){
    if(mCanSeek){
        new AsyncTask(){

            @Override
            protected void onPreExecute() {
                super.onPreExecute();
                mCanSeek = false;
            }

            @Override
            protected Object doInBackground(Object[] params) {
                try {
                    Frame frame = mGrabber.grabImage();
                    setCurrentFrame((ByteBuffer)frame.image[0]);
                }
                catch (Exception e) {
                    e.printStackTrace();
                }
                return null;
            }

            @Override
            protected void onPostExecute(Object o) {
                super.onPostExecute(o);
                mCanSeek = true;
                }
            }
        }.execute();
    }
}

private void setCurrentFrame(ByteBuffer buffer){
    mRenderer.setTexture(buffer);
}

MyGLRenderer2 看起来像这样:

public class MyGLRenderer2 implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer2";
private FullFrameTexture mFullFrameTexture;

public MyGLRenderer2(Context context){
    super();
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
    GLES20.glViewport(0,0,width, height);
    GLES20.glClearColor(0,0,0,1);
    mFullFrameTexture = new FullFrameTexture();
}

@Override
public void onDrawFrame(GL10 gl) {
    createFrameTexture(mCurrentBuffer, 1280, 720, GLES20.GL_RGB); //should not need to be a power of 2 since I use GL_CLAMP_TO_EDGE
    mFullFrameTexture.draw(textureHandle);
    if(mCurrentBuffer != null){
        mCurrentBuffer.clear();
    }
}

//test
private ByteBuffer mCurrentBuffer;

public void setTexture(ByteBuffer buffer){
    mCurrentBuffer = buffer.duplicate();
    mCurrentBuffer.position(0);
}

private int[] textureHandles = new int[1];
private int textureHandle;

public void createFrameTexture(ByteBuffer data, int width, int height, int format) {
    GLES20.glGenTextures(1, textureHandles, 0);
    textureHandle = textureHandles[0];
    GlUtil.checkGlError("glGenTextures");

    // Bind the texture handle to the 2D texture target.
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);

    // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
    // is smaller or larger than the source image.
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GlUtil.checkGlError("loadImageTexture");

    // Load the data from the buffer into the texture handle.
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
            width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
    GlUtil.checkGlError("loadImageTexture");
}

}

FullFrameTexture 看起来像这样:

public class FullFrameTexture {
private static final String VERTEXT_SHADER =
    "uniform mat4 uOrientationM;\n" +
        "uniform mat4 uTransformM;\n" +
        "attribute vec2 aPosition;\n" +
        "varying vec2 vTextureCoord;\n" +
        "void main() {\n" +
        "gl_Position = vec4(aPosition, 0.0, 1.0);\n" +
        "vTextureCoord = (uTransformM * ((uOrientationM * gl_Position + 1.0) * 0.5)).xy;" +
        "}";

private static final String FRAGMENT_SHADER =
    "precision mediump float;\n" +
        "uniform sampler2D sTexture;\n" +
        "varying vec2 vTextureCoord;\n" +
        "void main() {\n" +
        "gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
        "}";

private final byte[] FULL_QUAD_COORDINATES = {-1, 1, -1, -1, 1, 1, 1, -1};

private ShaderProgram shader;

private ByteBuffer fullQuadVertices;

private final float[] orientationMatrix = new float[16];
private final float[] transformMatrix = new float[16];

public FullFrameTexture() {
    if (shader != null) {
        shader = null;
    }

    shader = new ShaderProgram(EglUtil.getInstance());

    shader.create(VERTEXT_SHADER, FRAGMENT_SHADER);

    fullQuadVertices = ByteBuffer.allocateDirect(4 * 2);

    fullQuadVertices.put(FULL_QUAD_COORDINATES).position(0);

    Matrix.setRotateM(orientationMatrix, 0, 0, 0f, 0f, 1f);
    Matrix.setIdentityM(transformMatrix, 0);
}

public void release() {
    shader = null;
    fullQuadVertices = null;
}

public void draw(int textureId) {
    shader.use();

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);

    int uOrientationM = shader.getAttributeLocation("uOrientationM");
    int uTransformM = shader.getAttributeLocation("uTransformM");

    GLES20.glUniformMatrix4fv(uOrientationM, 1, false, orientationMatrix, 0);
    GLES20.glUniformMatrix4fv(uTransformM, 1, false, transformMatrix, 0);

    // Trigger actual rendering.
    renderQuad(shader.getAttributeLocation("aPosition"));

    shader.unUse();
}

private void renderQuad(int aPosition) {
    GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, fullQuadVertices);
    GLES20.glEnableVertexAttribArray(aPosition);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}

}

现在我可以在应用程序崩溃前的很短时间内显示一些框架(颜色也不对)。

完成您要求的最有效方法是将您的像素转换为 OpenGL ES 纹理,然后在 TextureView 上渲染它。要使用的函数是 glTexImage2D().

你可以在Grafika中找到一些示例,它使用函数上传一些生成的纹理。看看createImageTexture()。如果您的应用中还没有 GLES 代码,Grafika 的 gles 包可能会有用。

FWIW,将视频帧直接解码到从 TextureView 的 SurfaceTexture 创建的 Surface 会更有效,但我不知道 JavaCV 是否支持。

编辑: 如果您不介意使用 NDK,另一种方法是使用 ANativeWindow. Create a Surface 作为 TextureView 的 SurfaceTexture,将其传递给本机代码,然后调用 ANativeWindow_fromSurface() 获取 ANativeWindow。使用 ANativeWindow_setBuffersGeometry() 设置大小和颜色格式。锁定缓冲区,复制像素,解锁缓冲区到post它。我不 认为 这需要内部额外的数据副本,并且可能比 glTexImage2D() 方法有一些优势。