Android OpenGL ES 纹理半球体
Android OpenGL ES textured half sphere
我必须开发一个等距柱状图像查看器,例如 Ricoh Theta app。
我正在 Android 上使用 Open GL ES(1.0,但如果需要我可以更改为 2.0)。
现在,我已经成功创建了半球体(基于 this answer),代码如下:
public class HalfSphere {
// ---------------------------------------------------------------------------------------------
// region Attributes
private final int[] mTextures = new int[1];
float[][] mVertices;
int mNbStrips;
int mNbVerticesPerStrips;
private final List<FloatBuffer> mVerticesBuffer = new ArrayList<>();
private final List<ByteBuffer> mIndicesBuffer = new ArrayList<>();
private final List<FloatBuffer> mTextureBuffer = new ArrayList<>();
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Constructor
public HalfSphere(int nbStrips, int nbVerticesPerStrips, float radius) {
// Generate the vertices:
mNbStrips = nbStrips;
mNbVerticesPerStrips = nbVerticesPerStrips;
mVertices = new float[mNbStrips * mNbVerticesPerStrips][3];
for (int i = 0; i < mNbStrips; i++) {
for (int j = 0; j < mNbVerticesPerStrips; j++) {
mVertices[i * mNbVerticesPerStrips + j][0] = (float) (radius * Math.cos(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][1] = (float) (radius * Math.sin(i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][2] = (float) (radius * Math.sin(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(i * Math.PI / mNbStrips));
}
}
// Populate the buffers:
for(int i = 0; i < mNbStrips - 1; i++) {
for(int j = 0; j < mNbVerticesPerStrips; j++) {
byte[] indices = {
0, 1, 2, // first triangle (bottom left - top left - top right)
0, 2, 3 // second triangle (bottom left - top right - bottom right)
};
float[] p1 = mVertices[i * mNbVerticesPerStrips + j];
float[] p2 = mVertices[i * mNbVerticesPerStrips + (j + 1) % mNbVerticesPerStrips];
float[] p3 = mVertices[(i + 1) * mNbVerticesPerStrips + (j + 1) % mNbVerticesPerStrips];
float[] p4 = mVertices[(i + 1) * mNbVerticesPerStrips + j];
float[] quad = {
p1[0], p1[1], p1[2],
p2[0], p2[1], p2[2],
p3[0], p3[1], p3[2],
p4[0], p4[1], p4[2]
};
mVerticesBuffer.add(floatArrayToFloatBuffer(quad));
mTextureBuffer.add(floatArrayToFloatBuffer(quad));
mIndicesBuffer.add(byteArrayToByteBuffer(indices));
}
}
}
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Draw
public void draw(final GL10 gl) {
// bind the previously generated texture.
gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]);
// Point to our buffers.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation, clockwise in this case.
gl.glFrontFace(GL10.GL_CW);
for(int i = 0; i < mVerticesBuffer.size(); i++) {
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVerticesBuffer.get(i));
gl.glTexCoordPointer(3, GL10.GL_FLOAT, 0, mTextureBuffer.get(i));
gl.glDrawElements(GL10.GL_TRIANGLE_STRIP, 6, GL10.GL_UNSIGNED_BYTE, mIndicesBuffer.get(i)); // GL_TRIANGLE_STRIP / GL_LINE_LOOP
}
// Disable the client state before leaving.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Utils
public void loadGLTexture(GL10 gl, Bitmap texture) {
// Generate one texture pointer, and bind it to the texture array.
gl.glGenTextures(1, this.mTextures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]);
// Create nearest filtered texture.
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap.
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, texture, 0);
texture.recycle();
}
public FloatBuffer floatArrayToFloatBuffer(float[] array) {
ByteBuffer vbb = ByteBuffer.allocateDirect(array.length * 4);
vbb.order(ByteOrder.nativeOrder()); // use the device hardware's native byte order
FloatBuffer fb = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
fb.put(array); // add the coordinates to the FloatBuffer
fb.position(0); // set the buffer to read the first coordinate
return fb;
}
public ByteBuffer byteArrayToByteBuffer(byte[] array) {
ByteBuffer vbb = ByteBuffer.allocateDirect(array.length * 4);
vbb.order(ByteOrder.nativeOrder()); // use the device hardware's native byte order
vbb.put(array); // add the coordinates to the FloatBuffer
vbb.position(0); // set the buffer to read the first coordinate
return vbb;
}
// endregion
// ---------------------------------------------------------------------------------------------
}
当然,纹理应用不正确,因为我使用的是顶点坐标。有人看到如何正确地做到这一点吗?当用户平移时,我还需要能够 "move" 纹理。
编辑:按照 codetiger 的建议,执行 lat/180 和 lon/360,然后标准化为 [0..1]。现在,我正在尝试添加平移。在经度(水平)上平移时有效:
但在纬度上(垂直)平移时不是:
我只是在用户平移时添加 0..1 之间的值。我尝试使用给出的公式 here 但没有成功。有什么想法吗?
如果有帮助,这就是我想要的(通过 Ricoh Theta 应用程序获得):
为了使球体成为完整的 360 度球体,您可以替换下面的线条。
mVertices[i * mNbVerticesPerStrips + j][0] = (float) (radius * Math.cos(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(2 * i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][1] = (float) (radius * Math.sin(2 * i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][2] = (float) (radius * Math.sin(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(2 * i * Math.PI / mNbStrips));
唯一的变化是使用 2 * Math.PI / mNbStrips
作为第二个角度而不是 Math.PI / mNbStrips
要旋转图像,您可以使用
旋转球体
gl.glRotatef(angle, 1.0f, 0.0f, 0.0f);
更新:
要获得球体的正确纹理坐标,对于标准失真球体纹理,您可以使用 (lat/180, lon/360) 并将其归一化以获得 [0..1]。如此处所述
我必须开发一个等距柱状图像查看器,例如 Ricoh Theta app。
我正在 Android 上使用 Open GL ES(1.0,但如果需要我可以更改为 2.0)。
现在,我已经成功创建了半球体(基于 this answer),代码如下:
public class HalfSphere {
// ---------------------------------------------------------------------------------------------
// region Attributes
private final int[] mTextures = new int[1];
float[][] mVertices;
int mNbStrips;
int mNbVerticesPerStrips;
private final List<FloatBuffer> mVerticesBuffer = new ArrayList<>();
private final List<ByteBuffer> mIndicesBuffer = new ArrayList<>();
private final List<FloatBuffer> mTextureBuffer = new ArrayList<>();
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Constructor
public HalfSphere(int nbStrips, int nbVerticesPerStrips, float radius) {
// Generate the vertices:
mNbStrips = nbStrips;
mNbVerticesPerStrips = nbVerticesPerStrips;
mVertices = new float[mNbStrips * mNbVerticesPerStrips][3];
for (int i = 0; i < mNbStrips; i++) {
for (int j = 0; j < mNbVerticesPerStrips; j++) {
mVertices[i * mNbVerticesPerStrips + j][0] = (float) (radius * Math.cos(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][1] = (float) (radius * Math.sin(i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][2] = (float) (radius * Math.sin(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(i * Math.PI / mNbStrips));
}
}
// Populate the buffers:
for(int i = 0; i < mNbStrips - 1; i++) {
for(int j = 0; j < mNbVerticesPerStrips; j++) {
byte[] indices = {
0, 1, 2, // first triangle (bottom left - top left - top right)
0, 2, 3 // second triangle (bottom left - top right - bottom right)
};
float[] p1 = mVertices[i * mNbVerticesPerStrips + j];
float[] p2 = mVertices[i * mNbVerticesPerStrips + (j + 1) % mNbVerticesPerStrips];
float[] p3 = mVertices[(i + 1) * mNbVerticesPerStrips + (j + 1) % mNbVerticesPerStrips];
float[] p4 = mVertices[(i + 1) * mNbVerticesPerStrips + j];
float[] quad = {
p1[0], p1[1], p1[2],
p2[0], p2[1], p2[2],
p3[0], p3[1], p3[2],
p4[0], p4[1], p4[2]
};
mVerticesBuffer.add(floatArrayToFloatBuffer(quad));
mTextureBuffer.add(floatArrayToFloatBuffer(quad));
mIndicesBuffer.add(byteArrayToByteBuffer(indices));
}
}
}
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Draw
public void draw(final GL10 gl) {
// bind the previously generated texture.
gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]);
// Point to our buffers.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation, clockwise in this case.
gl.glFrontFace(GL10.GL_CW);
for(int i = 0; i < mVerticesBuffer.size(); i++) {
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVerticesBuffer.get(i));
gl.glTexCoordPointer(3, GL10.GL_FLOAT, 0, mTextureBuffer.get(i));
gl.glDrawElements(GL10.GL_TRIANGLE_STRIP, 6, GL10.GL_UNSIGNED_BYTE, mIndicesBuffer.get(i)); // GL_TRIANGLE_STRIP / GL_LINE_LOOP
}
// Disable the client state before leaving.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
// endregion
// ---------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------------
// region Utils
public void loadGLTexture(GL10 gl, Bitmap texture) {
// Generate one texture pointer, and bind it to the texture array.
gl.glGenTextures(1, this.mTextures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, this.mTextures[0]);
// Create nearest filtered texture.
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap.
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, texture, 0);
texture.recycle();
}
public FloatBuffer floatArrayToFloatBuffer(float[] array) {
ByteBuffer vbb = ByteBuffer.allocateDirect(array.length * 4);
vbb.order(ByteOrder.nativeOrder()); // use the device hardware's native byte order
FloatBuffer fb = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
fb.put(array); // add the coordinates to the FloatBuffer
fb.position(0); // set the buffer to read the first coordinate
return fb;
}
public ByteBuffer byteArrayToByteBuffer(byte[] array) {
ByteBuffer vbb = ByteBuffer.allocateDirect(array.length * 4);
vbb.order(ByteOrder.nativeOrder()); // use the device hardware's native byte order
vbb.put(array); // add the coordinates to the FloatBuffer
vbb.position(0); // set the buffer to read the first coordinate
return vbb;
}
// endregion
// ---------------------------------------------------------------------------------------------
}
当然,纹理应用不正确,因为我使用的是顶点坐标。有人看到如何正确地做到这一点吗?当用户平移时,我还需要能够 "move" 纹理。
编辑:按照 codetiger 的建议,执行 lat/180 和 lon/360,然后标准化为 [0..1]。现在,我正在尝试添加平移。在经度(水平)上平移时有效:
但在纬度上(垂直)平移时不是:
我只是在用户平移时添加 0..1 之间的值。我尝试使用给出的公式 here 但没有成功。有什么想法吗?
如果有帮助,这就是我想要的(通过 Ricoh Theta 应用程序获得):
为了使球体成为完整的 360 度球体,您可以替换下面的线条。
mVertices[i * mNbVerticesPerStrips + j][0] = (float) (radius * Math.cos(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(2 * i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][1] = (float) (radius * Math.sin(2 * i * Math.PI / mNbStrips));
mVertices[i * mNbVerticesPerStrips + j][2] = (float) (radius * Math.sin(j * 2 * Math.PI / mNbVerticesPerStrips) * Math.cos(2 * i * Math.PI / mNbStrips));
唯一的变化是使用 2 * Math.PI / mNbStrips
作为第二个角度而不是 Math.PI / mNbStrips
要旋转图像,您可以使用
旋转球体gl.glRotatef(angle, 1.0f, 0.0f, 0.0f);
更新: 要获得球体的正确纹理坐标,对于标准失真球体纹理,您可以使用 (lat/180, lon/360) 并将其归一化以获得 [0..1]。如此处所述