将 3d 对象渲染到 Cameraview

Render 3d Objects into Cameraview

我尝试开发一个移动纸板应用程序,它将 3d 对象渲染到相机视图(某种 ar)中。

我使用了这个项目并尝试在相机中渲染一个简单的立方体: https://github.com/Sveder/CardboardPassthrough/

我没有让它工作,背景总是黑色或应用程序损坏。

如果有任何帮助或建议,我将不胜感激。

谢谢

这就是我所拥有的

Origin CardboardPassthrough

这是工作代码,带有立方体

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Bundle;
import android.os.Vibrator;
import android.util.Log;
import android.view.KeyEvent;
import com.google.vrtoolkit.cardboard.*;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class Card extends CardboardActivity implements CardboardView.StereoRenderer, SurfaceTexture.OnFrameAvailableListener {

private static final float CAMERA_Z = 0.01f;
private static final float TIME_DELTA = 0.3f;

private static final float YAW_LIMIT = 0.12f;
private static final float PITCH_LIMIT = 0.12f;

//---------------------------------------------------
private int intCurrentI = -1;
private int intCurrentI1 = -1;
//---------------------------------------------------

// We keep the light always position just above the user.
private final float[] mLightPosInWorldSpace = new float[]{0.0f, 2.0f, 0.0f, 1.0f};
private final float[] mLightPosInEyeSpace = new float[4];

private static final int COORDS_PER_VERTEX = 3;

private final WorldLayoutData DATA = new WorldLayoutData();

private FloatBuffer mCubeVertices;
private FloatBuffer mCubeColors;
private FloatBuffer mCubeFoundColors;
private FloatBuffer mCubeNormals;

private int mGlProgram;
private int mPositionParam;
private int mNormalParam;
private int mColorParam;
private int mModelViewProjectionParam;
private int mLightPosParam;
private int mModelViewParam;
private int mModelParam;
private int mIsFloorParam;

private float[] mModelCube;
private float[] mCamera;
private float[] mView;
private float[] mHeadView;
private float[] mModelViewProjection;
private float[] mModelView;

private float[] mModelCube2;

private float[] mModelFloor;

private float mObjectDistance = 12f;
private float mFloorDepth = 20f;

private Vibrator mVibrator;

private CardboardOverlayView mOverlayView;

private SurfaceTexture surface;
private Camera camera;
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private final String vertexShaderCode =
        "attribute vec4 position;" +
                "attribute vec2 inputTextureCoordinate;" +
                "varying vec2 textureCoordinate;" +
                "void main()" +
                "{" +
                "gl_Position = position;" +
                "textureCoordinate = inputTextureCoordinate;" +
                "}";

private final String fragmentShaderCode =
        "#extension GL_OES_EGL_image_external : require\n" +
                "precision mediump float;" +
                "varying vec2 textureCoordinate;                            \n" +
                "uniform samplerExternalOES s_texture;               \n" +
                "void main(void) {" +
                "  gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
                //"  gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
                "}";
private int texture;
private CardboardView cardboardView;

/**
 * Converts a raw text file, saved as a resource, into an OpenGL ES shader
 *
 * @param type  The type of shader we will be creating.
 * @param resId The resource ID of the raw text file about to be turned into a shader.
 * @return
 */
private int loadGLShader(int type, int resId) {
    String code = readRawTextFile(resId);
    int shader = GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, code);
    GLES20.glCompileShader(shader);

    // Get the compilation status.
    final int[] compileStatus = new int[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

    // If the compilation failed, delete the shader.
    if (compileStatus[0] == 0) {
        GLES20.glDeleteShader(shader);
        shader = 0;
    }

    if (shader == 0) {
        throw new RuntimeException("Error creating shader.");
    }

    return shader;
}

/**
 * Checks if we've had an error inside of OpenGL ES, and if so what that error is.
 *
 * @param func
 */
private static void checkGLError(String func) {
    int error;
    while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
        throw new RuntimeException(func + ": glError " + error);
    }
}

/**
 * Sets the view to our CardboardView and initializes the transformation matrices we will use
 * to render our scene.
 *
 * @param savedInstanceState
 */
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.common_ui);


    cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
    /*********************/
    cardboardView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
    /*********************/
    cardboardView.setRenderer(this);

    setCardboardView(cardboardView);

    /* 2014-10-16 */
    mModelCube2 = new float[16];
    /* 2014-10-16 */

    mModelCube = new float[16];
    mCamera = new float[16];
    mView = new float[16];
    mModelViewProjection = new float[16];
    mModelView = new float[16];
    mModelFloor = new float[16];
    mHeadView = new float[16];
    mVibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);


    mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
    mOverlayView.show3DToast("VR-Test");
}

@Override
public void onRendererShutdown() {
}

@Override
public void onSurfaceChanged(int width, int height) {
}

/**
 * Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
 * arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
 *
 * @param config The EGL configuration used when creating the surface.
 */
@Override
public void onSurfaceCreated(EGLConfig config) {
    GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well

    ByteBuffer bbVertices = ByteBuffer.allocateDirect(DATA.CUBE_COORDS.length * 4);
    bbVertices.order(ByteOrder.nativeOrder());
    mCubeVertices = bbVertices.asFloatBuffer();
    mCubeVertices.put(DATA.CUBE_COORDS);
    mCubeVertices.position(0);

    ByteBuffer bbColors = ByteBuffer.allocateDirect(DATA.CUBE_COLORS.length * 4);
    bbColors.order(ByteOrder.nativeOrder());
    mCubeColors = bbColors.asFloatBuffer();
    mCubeColors.put(DATA.CUBE_COLORS);
    mCubeColors.position(0);

    ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(DATA.CUBE_FOUND_COLORS.length * 4);
    bbFoundColors.order(ByteOrder.nativeOrder());
    mCubeFoundColors = bbFoundColors.asFloatBuffer();
    mCubeFoundColors.put(DATA.CUBE_FOUND_COLORS);
    mCubeFoundColors.position(0);

    ByteBuffer bbNormals = ByteBuffer.allocateDirect(DATA.CUBE_NORMALS.length * 4);
    bbNormals.order(ByteOrder.nativeOrder());
    mCubeNormals = bbNormals.asFloatBuffer();
    mCubeNormals.put(DATA.CUBE_NORMALS);
    mCubeNormals.position(0);

    int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
    int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);

    mGlProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mGlProgram, vertexShader);
    GLES20.glAttachShader(mGlProgram, gridShader);
    GLES20.glLinkProgram(mGlProgram);

    texture = createTexture();
    startCamera(texture);

    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    // Object first appears directly in front of user
    Matrix.setIdentityM(mModelCube, 0);
    Matrix.translateM(mModelCube, 0, 0, 0, -mObjectDistance);

    Matrix.setIdentityM(mModelCube2, 0);
    Matrix.translateM(mModelCube2, 0, -10.0f, -10.0f, -mObjectDistance - 12.0f);

    Matrix.setIdentityM(mModelFloor, 0);
    Matrix.translateM(mModelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user

    checkGLError("onSurfaceCreated");
}

/**
 * Converts a raw text file into a string.
 *
 * @param resId The resource ID of the raw text file about to be turned into a shader.
 * @return
 */
private String readRawTextFile(int resId) {
    InputStream inputStream = getResources().openRawResource(resId);
    try {
        BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
        StringBuilder sb = new StringBuilder();
        String line;
        while ((line = reader.readLine()) != null) {
            sb.append(line).append("\n");
        }
        reader.close();
        return sb.toString();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return "";
}

/**
 * Prepares OpenGL ES before we draw a frame.
 *
 * @param headTransform The head transformation in the new frame.
 */
@Override
public void onNewFrame(HeadTransform headTransform) {
    GLES20.glUseProgram(mGlProgram);

    mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
    mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos");
    mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");
    mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
    mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");

    // Build the Model part of the ModelView matrix.
    Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

    Matrix.rotateM(mModelCube2, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
    //--------------------------------------


    // Build the camera matrix and apply it to the ModelView.
    Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    headTransform.getHeadView(mHeadView, 0);

    checkGLError("onReadyToDraw");
}

/**
 * Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
 * a parameter.
 *
 * @param transform The transformations to apply to render this eye.
 */
@Override
public void onDrawEye(EyeTransform transform) {
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position");
    mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal");
    mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color");

    GLES20.glEnableVertexAttribArray(mPositionParam);
    GLES20.glEnableVertexAttribArray(mNormalParam);
    GLES20.glEnableVertexAttribArray(mColorParam);
    checkGLError("mColorParam");

    // Apply the eye transformation to the camera.
    Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);

    // Set the position of the light
    Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
    GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1],
            mLightPosInEyeSpace[2]);

    // Build the ModelView and ModelViewProjection matrices
    // for calculating cube position and light.
    Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube, 0);
    Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
            mModelView, 0);
    drawCube(1);

    //--------------------------------------
    Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube2, 0);
    Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
            mModelView, 0);
    drawCube(0);
    //--------------------------------------
}

@Override
public void onFinishFrame(Viewport viewport) {
}

public void drawCube(int i1) {
    // This is not the floor!
    GLES20.glUniform1f(mIsFloorParam, 0f);

    // Set the Model in the shader, used to calculate lighting
    if (i1 == 1) {
        GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelCube, 0);
    } else if (i1 == 0) {
        //--2014-10-16 ??--------------------------------
        GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelCube2, 0);
        //-------------------------------------------------
    }
    // Set the ModelView in the shader, used to calculate lighting
    GLES20.glUniformMatrix4fv(mModelViewParam, 1, false, mModelView, 0);

    // Set the position of the cube
    GLES20.glVertexAttribPointer(mPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, 0, mCubeVertices);

    // Set the ModelViewProjection matrix in the shader.
    GLES20.glUniformMatrix4fv(mModelViewProjectionParam, 1, false, mModelViewProjection, 0);

    // Set the normal positions of the cube, again for shading
    GLES20.glVertexAttribPointer(mNormalParam, 3, GLES20.GL_FLOAT,
            false, 0, mCubeNormals);

    if (isLookingAtObject(i1)) {
        GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
                0, mCubeFoundColors);
        if (i1 == 1)
            intCurrentI1 = i1;
        else
            intCurrentI1 = -1;

        intCurrentI = i1;

        System.out.println("drawCube->intCurrentI2:" + intCurrentI);
    } else {
        GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
                0, mCubeColors);
        intCurrentI = -1;
    }
    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
    checkGLError("Drawing cube");

    if (intCurrentI1 != -1)
        intCurrentI = intCurrentI1;

    System.out.println("drawCube_out_if->intCurrentI4:" + intCurrentI);
}

private boolean isLookingAtObject(int i1) {
    float[] initVec = {0, 0, 0, 1.0f};
    float[] objPositionVec = new float[4];


    System.out.println("isLookingAtObject1->i1:" + i1);
    // Convert object space to camera space. Use the headView from onNewFrame.
    if (i1 == 1) {
        Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube, 0);
        Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0);
        intCurrentI = i1;
    } else if (i1 == 0) {
        Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube2, 0);
        Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0);
        intCurrentI = i1;
    }
    float pitch = (float) Math.atan2(objPositionVec[1], -objPositionVec[2]);
    float yaw = (float) Math.atan2(objPositionVec[0], -objPositionVec[2]);

    boolean bool1 = (Math.abs(pitch) < PITCH_LIMIT) && (Math.abs(yaw) < YAW_LIMIT);
    return bool1;
}

public void startCamera(int texture) {
    surface = new SurfaceTexture(texture);
    surface.setOnFrameAvailableListener(this);

    camera = Camera.open();

    try {
        camera.setPreviewTexture(surface);
        camera.startPreview();
    } catch (IOException ioe) {
        Log.w("MainActivity", "CAM LAUNCH FAILED");
    }
}

static private int createTexture() {
    int[] texture = new int[1];

    GLES20.glGenTextures(1, texture, 0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);

    return texture[0];
}

@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    this.cardboardView.requestRender();
}
}

编辑 2016 年 7 月 11 日

我将顶点切成两半,以查看它后面的可能顶点。
但我认为问题是顶点和片段着色器

立方体仅在我使用此顶点和片段着色器时出现

simple_fragment.shader

precision mediump float;
varying vec4 v_Color;

void main() {
  gl_FragColor = v_Color;
}

light_vertex.shader

uniform mat4 u_MVP;
uniform mat4 u_MVMatrix;
uniform mat4 u_Model;
uniform vec3 u_LightPos;
uniform float u_IsFloor;
attribute vec4 a_Position;
attribute vec4 a_Color;
attribute vec3 a_Normal;
varying vec4 v_Color;
varying vec3 v_Grid;
varying float v_isFloor;

void main()
{
   vec3 modelVertex = vec3(u_Model * a_Position);
   v_Grid = modelVertex;

   vec3 modelViewVertex = vec3(u_MVMatrix * a_Position);
   vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));
   float distance = length(u_LightPos - modelViewVertex);
   vec3 lightVector = normalize(u_LightPos - modelViewVertex);
   float diffuse = max(dot(modelViewNormal, lightVector), 0.5   );
   diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));
   v_Color = a_Color * diffuse;
   gl_Position = u_MVP * a_Position;

   v_isFloor = u_IsFloor;
}

并且相机仅在我使用此着色器时出现

vertex.shader

attribute vec4 position;
attribute vec2 inputTextureCoordinate;
varying vec2 textureCoordinate;
void main()
{
     gl_Position = position;
     textureCoordinate = inputTextureCoordinate;
}

fragment.shader

#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 textureCoordinate;
varying vec4 v_Color;
uniform samplerExternalOES s_texture;
void main(void) {
    gl_FragColor = texture2D( s_texture, textureCoordinate );                    
}

我不知道如何修复着色器

只是一个开放的建议。几个月前,我为大学作业开发了一个 AR 项目。在我的例子中,我使用了一个名为 Vuforia and integrated it with Unity 的工具来让它在移动设备上工作。您可以让您的应用在 Android 和 iOS 设备上运行。 Unity 和 Vuforia 的最新版本都有助于 AR 项目的开发,因为它目前正处于炒作阶段。

根据您需要 AR 项目执行的工作以及您使用 Unity 的经验,学习曲线会增加。就我而言,我为新石器时代的遗址增加了屋顶的建造。我还使用了一个名为 makehuman and Blender 的第三方软件来创建一个行走的人。在我所有的项目中,我根本不需要接触一行代码:)

希望对您有所帮助。

我建议您禁用 glEnable(GL_DEPTH_TEST) 以在前景中渲染背景对象,然后使用以下方法在着色器之间切换:

GLES20.glUseProgram();

对于您的示例,这可能是:

@Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well

ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);

ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);


ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);

//Cube
ByteBuffer bbVertices = ByteBuffer.allocateDirect(DATA.CUBE_COORDS.length * 4);
bbVertices.order(ByteOrder.nativeOrder());
mCubeVertices = bbVertices.asFloatBuffer();
mCubeVertices.put(DATA.CUBE_COORDS);
mCubeVertices.position(0);

ByteBuffer bbColors = ByteBuffer.allocateDirect(DATA.CUBE_COLORS.length * 4);
bbColors.order(ByteOrder.nativeOrder());
mCubeColors = bbColors.asFloatBuffer();
mCubeColors.put(DATA.CUBE_COLORS);
mCubeColors.position(0);

ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(DATA.CUBE_FOUND_COLORS.length * 4);
bbFoundColors.order(ByteOrder.nativeOrder());
mCubeFoundColors = bbFoundColors.asFloatBuffer();
mCubeFoundColors.put(DATA.CUBE_FOUND_COLORS);
mCubeFoundColors.position(0);

ByteBuffer bbNormals = ByteBuffer.allocateDirect(DATA.CUBE_NORMALS.length * 4);
bbNormals.order(ByteOrder.nativeOrder());
mCubeNormals = bbNormals.asFloatBuffer();
mCubeNormals.put(DATA.CUBE_NORMALS);
mCubeNormals.position(0);

int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

mCameraProgram = GLES20.glCreateProgram();             // create empty OpenGL ES Program
GLES20.glAttachShader(mCameraProgram, vertexShader);   // add the vertex shader to program
GLES20.glAttachShader(mCameraProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mCameraProgram);

vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);

mCubeProgram = GLES20.glCreateProgram();             // create empty OpenGL ES Program
GLES20.glAttachShader(mCubeProgram, vertexShader);   // add the vertex shader to program
GLES20.glAttachShader(mCubeProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mCubeProgram);

texture = createTexture();
startCamera(texture);

Matrix.setIdentityM(mModelCube, 0);
Matrix.translateM(mModelCube, 0, 0, 0, -mObjectDistance);

checkGLError("onSurfaceCreated");
}
@Override
public void onNewFrame(HeadTransform headTransform) {
    GLES20.glUseProgram(mCubeProgram);

mModelViewProjectionParam = GLES20.glGetUniformLocation(mCubeProgram, "u_MVP");
mLightPosParam = GLES20.glGetUniformLocation(mCubeProgram, "u_LightPos");
mModelViewParam = GLES20.glGetUniformLocation(mCubeProgram, "u_MVMatrix");
mModelParam = GLES20.glGetUniformLocation(mCubeProgram, "u_Model");
mIsFloorParam = GLES20.glGetUniformLocation(mCubeProgram, "u_IsFloor");

// Build the Model part of the ModelView matrix.
Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

// Build the camera matrix and apply it to the ModelView.
Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

headTransform.getHeadView(mHeadView, 0);

GLES20.glUseProgram(mCameraProgram);
float[] mtx = new float[16];
//GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surface.updateTexImage();
surface.getTransformMatrix(mtx);
}

@Override
public void onDrawEye(EyeTransform transform) {
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

//Camera
GLES20.glUseProgram(mCameraProgram);

GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);


mPositionHandle = GLES20.glGetAttribLocation(mCameraProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
        false, vertexStride, vertexBuffer);


mTextureCoordHandle = GLES20.glGetAttribLocation(mCameraProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
        false, vertexStride, textureVerticesBuffer);

mColorHandle = GLES20.glGetAttribLocation(mCameraProgram, "s_texture");


GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
        GLES20.GL_UNSIGNED_SHORT, drawListBuffer);


// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);

//cube
GLES20.glUseProgram(mCubeProgram);

mPositionParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Position");
mNormalParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Normal");
mColorParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Color");

GLES20.glEnableVertexAttribArray(mPositionParam);
GLES20.glEnableVertexAttribArray(mNormalParam);
GLES20.glEnableVertexAttribArray(mColorParam);

// Set the position of the light
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1],
        mLightPosInEyeSpace[2]);

Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube, 0);
Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
        mModelView, 0);
drawCube(1);

Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);