GLSurfaceView 停止调用 onDrawFrame

GLSurfaceView stops calling onDrawFrame

我正在使用 GLSurfaceView 来显示 Camera 提要,因为当我在设备上使用外部摄像头(通过 USB 插入)时,预览是镜像的,我只能 "unmirror" 它带有 GLSurfaceView。它工作得很好,但一段时间后(可能是 2 分钟或 10 分钟)视图不再显示任何更新,只是冻结。我将日志放在 public void onDrawFrame(GL10 gl) 方法中,我可以看到它经常被调用,直到它冻结,然后再没有调用。这些是它冻结时我得到的唯一日志:

11-29 11:01:39.761 2431-2506/com.xxx D/MyGLSurfaceView: #onDrawFrame
11-29 11:01:58.154 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 9(288B) AllocSpace objects, 3(3MB) LOS objects, 39% free, 6MB/10MB, paused 6.019ms total 25.478ms
11-29 11:02:05.265 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 6(192B) AllocSpace objects, 5(6MB) LOS objects, 39% free, 7MB/12MB, paused 5.228ms total 49.221ms
11-29 11:02:36.496 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 11(336B) AllocSpace objects, 3(3MB) LOS objects, 39% free, 6MB/10MB, paused 7.752ms total 28.387ms
11-29 11:03:10.488 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 242(12KB) AllocSpace objects, 3(3MB) LOS objects, 39% free, 7MB/12MB, paused 12.263ms total 41.651ms
11-29 11:03:24.016 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 18(720B) AllocSpace objects, 5(6MB) LOS objects, 39% free, 6MB/10MB, paused 7.029ms total 26.553ms
11-29 11:03:58.696 2431-2446/com.xxx I/art: Background partial concurrent mark sweep GC freed 18(640B) AllocSpace objects, 3(3MB) LOS objects, 39% free, 6MB/10MB, paused 6.227ms total 27.462ms
11-29 11:04:44.049 2431-2446/com.xxx I/art: Background sticky concurrent mark sweep GC freed 234(12KB) AllocSpace objects, 3(3MB) LOS objects, 29% free, 8MB/12MB, paused 8.683ms total 17.247ms

我在某处获取了一些示例代码来使用 GLSurfaveView,因为我不知道它是如何工作的,代码是这样的:

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

class MyGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener, Camera.PreviewCallback {
  private static final String TAG = MyGLSurfaceView.class.getSimpleName();

  private static final String vss = "attribute vec2 vPosition;\n" +
    "attribute vec2 vTexCoord;\n" +
    "varying vec2 texCoord;\n" +
    "void main() {\n" +
    "  texCoord = vTexCoord;\n" +
    "  gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n" +
    "}";

  private static final String fss = "#extension GL_OES_EGL_image_external : require\n" +
    "precision mediump float;\n" +
    "uniform samplerExternalOES sTexture;\n" +
    "varying vec2 texCoord;\n" +
    "void main() {\n" +
    "  gl_FragColor = texture2D(sTexture,texCoord);\n" +
    "}";

  private SurfaceTexture mSurfaceTexture;
  private boolean mUpdateTexture = false;
  private FloatBuffer mVertex;
  private FloatBuffer mTextCoordinates;
  private int[] mTexture;
  private int hProgram;

  private Camera mCamera;
  private Camera.Size cameraSize;

  public MyGLSurfaceView(Context context) {
    this(context, null);
  }

  public MyGLSurfaceView(Context context, AttributeSet attrs) {
    super(context, attrs);
    this.init();
  }

  private void init() {
    setEGLContextClientVersion(2);
    setRenderer(this);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

    float[] vtmp = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f};
    this.mVertex = ByteBuffer.allocateDirect(8 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    this.mVertex.put(vtmp);
    this.mVertex.position(0);

    float[] ttmp = {1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f};
    this.mTextCoordinates = ByteBuffer.allocateDirect(8 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    this.mTextCoordinates.put(ttmp);
    this.mTextCoordinates.position(0);
  }

  @Override
  public void onResume() {
    Log.d(TAG, "#onResume");
    super.onResume();
  }

  @Override
  public void onPause() {
    Log.d(TAG, "#onPause");
    super.onPause();
    this.releaseCamera();
  }

  @Override
  public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    this.mUpdateTexture = true;
    this.requestRender();
  }

  @Override
  public void onPreviewFrame(byte[] data, Camera camera) {
    // process bytes here
  }

  @Override
  public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    Log.d(TAG, "#onSurfaceCreated");

    this.initTexture();
    this.mSurfaceTexture = new SurfaceTexture(mTexture[0]);
    this.mSurfaceTexture.setOnFrameAvailableListener(this);
    this.mSurfaceTexture.getTransformMatrix(new float[16]);

    try {
      this.openCamera();
    }
    catch (IOException e) {
      e.printStackTrace();
      this.releaseCamera();
    }

    GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
  }

  @Override
  public void onSurfaceChanged(GL10 gl, int width, int height) {
    Log.d(TAG, "#onSurfaceChanged");

    GLES20.glViewport(0, 0, width, height);

    this.mCamera.startPreview();
    this.mCamera.setPreviewCallback(this);

    this.hProgram = loadShader();
  }

  @Override
  public void onDrawFrame(GL10 gl) {
    Log.d(TAG, "#onDrawFrame");

    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    synchronized (this) {
      if (this.mUpdateTexture) {
        this.mSurfaceTexture.updateTexImage();
        this.mUpdateTexture = false;
      }
    }

    GLES20.glUseProgram(this.hProgram);

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, this.mTexture[0]);

    int th = GLES20.glGetUniformLocation(this.hProgram, "sTexture");
    GLES20.glUniform1i(th, 0);

    int ph = GLES20.glGetAttribLocation(this.hProgram, "vPosition");
    GLES20.glVertexAttribPointer(ph, 2, GLES20.GL_FLOAT, false, 4 * 2, this.mVertex);
    GLES20.glEnableVertexAttribArray(ph);

    int tch = GLES20.glGetAttribLocation(this.hProgram, "vTexCoord");
    GLES20.glVertexAttribPointer(tch, 2, GLES20.GL_FLOAT, false, 4 * 2, this.mTextCoordinates);
    GLES20.glEnableVertexAttribArray(tch);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    GLES20.glFlush();
  }

  private void initTexture() {
    this.mTexture = new int[1];
    GLES20.glGenTextures(1, this.mTexture, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, this.mTexture[0]);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
  }

  private static int loadShader() {
    int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
    GLES20.glShaderSource(vshader, vss);
    GLES20.glCompileShader(vshader);
    int[] compiled = new int[1];
    GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
      Log.e("Shader", "Could not compile vshader");
      Log.v("Shader", "Could not compile vshader:" + GLES20.glGetShaderInfoLog(vshader));
      GLES20.glDeleteShader(vshader);
      vshader = 0;
    }

    int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
    GLES20.glShaderSource(fshader, fss);
    GLES20.glCompileShader(fshader);
    GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
      Log.e("Shader", "Could not compile fshader");
      Log.v("Shader", "Could not compile fshader:" + GLES20.glGetShaderInfoLog(fshader));
      GLES20.glDeleteShader(fshader);
      fshader = 0;
    }

    int program = GLES20.glCreateProgram();
    GLES20.glAttachShader(program, vshader);
    GLES20.glAttachShader(program, fshader);
    GLES20.glLinkProgram(program);

    return program;
  }

  private void openCamera() throws IOException {
    this.mCamera = Camera.open();

    final Camera.Parameters parameters = this.mCamera.getParameters();
    parameters.setPreviewSize(1280, 720);
    this.mCamera.setParameters(parameters);

    this.mCamera.setPreviewTexture(this.mSurfaceTexture);
  }

  private void releaseCamera() {
    synchronized (this) {
      try {
        if (this.mCamera != null) {
          this.mCamera.stopPreview();
          this.mCamera.setPreviewCallback(null);
          this.mCamera.release();
          this.mCamera = null;
        }
      }
      catch (Exception e) {
        Log.e(TAG, e.getMessage());
      }
    }
  }
}

所以,看起来核心问题是图像处理(从 onPreviewFrame() 回调启动)导致主线程拥塞。

运行 主线程上的摄像头是一种不好的做法,并且会在不同的设备上造成许多奇怪的影响。新的 camera2 API 可以正确处理这个问题,但是如果出于任何原因你必须使用 deprecated 相机 API,请将 Camera.open() 和所有回调移动到 background HandlerThread.

另一个优化:切换到 setPreviewCallbackWithBuffer() 以减少相机缓冲区分配和垃圾收集成本。