无法像任何其他 3D 图形软件一样在 OpenGL (C++) 中可视化具有光滑表面的简单头部网格?
Unable to visualize a simple head mesh with smooth surface in OpenGL (C++) like any other 3D graphic software?
我正在使用现代 OpenGL 可视化从 .obj 文件导入的头部网格,其法向量由 Blender 创建。当我在任何 3D 对象查看器中加载该模型时,一切看起来都很好
但是当我尝试自己使用 OpenGL 对其进行可视化时,表面看起来并不光滑(看起来凹凸不平),而且光线出现在头部的错误一侧。为了测试我的光照渲染代码,我将它应用在一个盒子上,它工作得很好,但在面部网格上却没有(光源的位置由头部网格后面的小白框可视化)
对于长方体,我为每个顶点创建了三个实例,但每次都有一个不同的法线对应于关联的边。我首先通过为预期的四边形设置具有不同法线的多个顶点实例,对头部网格应用相同的方法。当网格出现凹凸不平时,我怀疑将不同的法线与同一顶点相关联可能不是一个好主意,尤其是在像头部网格这样的光滑表面上。因为它可能会导致光线方向的剧烈变化。所以我使用 Assimp 加载 .Obj,希望它能重新排列顶点,使每个顶点都有唯一的规格(纹理坐标、法线、位置等),并使用相关的重新排列的索引上传到我的索引缓冲区中。但是什么都没有改变。我不确定我是在创建顶点缓冲区时还是在我的着色器中犯了错误,但我直接从我编写的内容中导入它们来可视化立方体,如下所示。
struct Vertex {
glm::vec3 position;
glm::vec2 texCoords;
glm::vec3 normal;
};
struct Mesh {
vector<Vertex> vertices;
vector<unsigned int> indices;
};
unsigned int createMeshVertexArray() {
unsigned int posComponents = 3; // xyz
unsigned int uvComponents = 2; // uv
unsigned int normComponents = 3; // ijk
unsigned int vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, _meshes[0].vertices.size() * sizeof(Vertex), &_meshes[0].vertices[0], GL_STATIC_DRAW); // perhaps gl_dynamic_draw is better
unsigned int posAttribId = 0;
unsigned int uvAttribId = 1;
unsigned int normAttribId = 2;
int posOffset = 0 * sizeof(float);
int uvOffset = (posOffset + posComponents) * sizeof(float);
int normOffset = (uvOffset + uvComponents) * sizeof(float);
size_t stride = (posComponents + uvComponents + normComponents) * sizeof(float);
glEnableVertexAttribArray(posAttribId);
glEnableVertexAttribArray(uvAttribId);
glEnableVertexAttribArray(normAttribId);
glVertexAttribPointer(posAttribId, posComponents, GL_FLOAT, GL_FALSE, stride, (const void*)posOffset);
glVertexAttribPointer(uvAttribId, uvComponents, GL_FLOAT, GL_FALSE, stride, (const void*)uvOffset);
glVertexAttribPointer(normAttribId, normComponents, GL_FLOAT, GL_FALSE, stride, (const void*)normOffset);
return vao;
}
unsigned int createMeshIndexBuffer() {
unsigned int ibo;
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, _meshes[0].indices.size() * sizeof(unsigned int), &_meshes[0].indices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // unbind to make sure nothing else is accidentally added to this index buffer later
return ibo;
}
主函数和渲染循环。
int main(void) {
//=============================================================================
//------------------ light source and camera position
//=============================================================================
float cameraOffsetY = 1.5;
float radius = 8.0;
glm::vec3 lightPos(0.0, 3.0, 1.0);
glm::vec3 cameraPos(0.0, cameraOffsetY, radius);
glm::vec3 cameraFront(0.0, cameraOffsetY, 0.0);
glm::vec3 cameraUp(0.0, 1.0, 0.0);
//=============================================================================
//------------------ projection matrix
//=============================================================================
float f = windowWidth; // focal distance
float fov = 2 * atan(windowWidth / (float)(2 * f)); // in radian
float aspectRatio = windowWidth / (float)windowHeight;
glm::mat4 proj = glm::perspective(fov, aspectRatio, 0.1f, 100.0f);
//=============================================================================
//------------------ head mesh
//=============================================================================
glm::mat4 meshModel = glm::translate(glm::mat4(1.0), glm::vec3(2.0, 0.0, -2.0));
meshModel = glm::rotate(meshModel, glm::radians(-30.0f), glm::vec3(0.0, 1.0, 0.0));
float faceSc = 1.0;
meshModel = glm::scale(meshModel, glm::vec3(faceSc, faceSc, faceSc));
glVisual.loadModel("../files/pose_17_with_normals.obj");
unsigned int meshVao = glVisual.createMeshVertexArray(); // vertex array
unsigned int meshIbo = glVisual.createMeshIndexBuffer(); // index buffer
unsigned int meshSbo = glVisual.handleShaders("../files/faceMesh.glsl"); // shader
unsigned int meshLoc_mvp = glGetUniformLocation(meshSbo, "mvp");
unsigned int meshModelLoc = glGetUniformLocation(meshSbo, "model");
unsigned int meshNormMatLoc = glGetUniformLocation(meshSbo, "nMat");
unsigned int meshColorLoc = glGetUniformLocation(meshSbo, "objectColor");
unsigned int meshLightPosLoc = glGetUniformLocation(meshSbo, "lightPos");
unsigned int meshCamPosLoc = glGetUniformLocation(meshSbo, "cameraPos");
glm::mat3 meshNormMat = glm::mat3(glm::transpose(glm::inverse(meshModel)));
glUniformMatrix4fv(meshModelLoc, 1, GL_FALSE, glm::value_ptr(meshModel));
glUniformMatrix3fv(meshNormMatLoc, 1, GL_FALSE, glm::value_ptr(meshNormMat));
glUniform3f(meshColorLoc, 0.6, 0.6, 0.8);
glUniform3f(meshLightPosLoc, lightPos[0], lightPos[1], lightPos[2]);
size_t numOfMeshVisualVerts = NUM_OF_FACES * 4;
while (true) {
glm::mat4 view = glm::lookAt(cameraPos, cameraFront, cameraUp);
glBindVertexArray(meshVao);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshIbo);
glUseProgram(meshSbo);
glm::mat4 mvp = proj * view * meshModel;
glUniform3f(meshCamPosLoc, cameraPos[0], cameraPos[1], cameraPos[2]);
glUniformMatrix4fv(meshLoc_mvp, 1, GL_FALSE, glm::value_ptr(mvp));
glDrawArrays(GL_QUADS, 0, numOfMeshVisualVerts);
glfwSwapBuffers(window);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear buffer
glfwPollEvents(); // Poll for and process events
if (glfwWindowShouldClose(window))
break;
}
glfwTerminate();
return 0;
}
顶点和片段着色器。
#shader vertex
#version 330 core
layout(location = 0) in vec3 n_position;
layout(location = 1) in vec2 n_uv;
layout(location = 2) in vec3 n_normal;
out vec3 faceNormal;
out vec3 fragPos; // fragment position
uniform mat4 mvp; // mvp matrix
uniform mat4 model; // model matrix
uniform mat3 nMat; // normal matrix
void main() {
gl_Position = mvp * vec4(n_position, 1.0);
fragPos = vec3(model * vec4(n_position, 1.0));
faceNormal = nMat * n_normal;
};
#shader fragment
#version 330 core
layout(location = 0) out vec4 color;
in vec3 faceNormal;
in vec3 fragPos; // fragment position in world coordinates
uniform vec3 objectColor;
uniform vec3 lightPos;
uniform vec3 cameraPos;
void main() {
vec3 lightColor = vec3(1.0, 1.0, 1.0);
float ambientCoeff = 0.3;
vec3 normVec = normalize(faceNormal);
vec3 lightDir = normalize(lightPos - fragPos);
float diff = max(dot(normVec, lightDir), 0.0);
vec3 diffuseCoeff = diff * lightColor;
float specularStrength = 0.1; // depends on the material
int shininess = 4; // how much to spread on the surface
vec3 viewDir = normalize(cameraPos - fragPos);
vec3 reflectDir = reflect(-lightDir, normVec);
float reflectAmount = pow(max(dot(viewDir, reflectDir), 0.0), shininess);
vec3 specularCoeff = specularStrength * reflectAmount * lightColor;
color = vec4((ambientCoeff + diffuseCoeff + specularCoeff) * objectColor, 1.0);
// color = vec4(objectColor, 1.0);
};
正如@Spektre 在评论部分建议的那样,问题出在我为我的顶点缓冲区设置布局的方式上。具体来说,我错误地计算了我的正常属性偏移量,如下所示。
unsigned int posComponents = 3; // xyz
unsigned int uvComponents = 2; // uv
unsigned int normComponents = 3; // ijk
int posOffset = 0 * sizeof(float);
int uvOffset = (posOffset + posComponents) * sizeof(float);
int normOffset = (uvOffset + uvComponents) * sizeof(float);
glVertexAttribPointer(posAttribId, posComponents, GL_FLOAT, GL_FALSE, stride, (const void*)posOffset);
glVertexAttribPointer(uvAttribId, uvComponents, GL_FLOAT, GL_FALSE, stride, (const void*)uvOffset);
glVertexAttribPointer(normAttribId, normComponents, GL_FLOAT, GL_FALSE, stride, (const void*)normOffset);
而应该这样做
int normOffset = (posComponents + uvComponents) * sizeof(float);
这是一个愚蠢的错误,但我很高兴它现在已得到修复。
我正在使用现代 OpenGL 可视化从 .obj 文件导入的头部网格,其法向量由 Blender 创建。当我在任何 3D 对象查看器中加载该模型时,一切看起来都很好
但是当我尝试自己使用 OpenGL 对其进行可视化时,表面看起来并不光滑(看起来凹凸不平),而且光线出现在头部的错误一侧。为了测试我的光照渲染代码,我将它应用在一个盒子上,它工作得很好,但在面部网格上却没有(光源的位置由头部网格后面的小白框可视化)
对于长方体,我为每个顶点创建了三个实例,但每次都有一个不同的法线对应于关联的边。我首先通过为预期的四边形设置具有不同法线的多个顶点实例,对头部网格应用相同的方法。当网格出现凹凸不平时,我怀疑将不同的法线与同一顶点相关联可能不是一个好主意,尤其是在像头部网格这样的光滑表面上。因为它可能会导致光线方向的剧烈变化。所以我使用 Assimp 加载 .Obj,希望它能重新排列顶点,使每个顶点都有唯一的规格(纹理坐标、法线、位置等),并使用相关的重新排列的索引上传到我的索引缓冲区中。但是什么都没有改变。我不确定我是在创建顶点缓冲区时还是在我的着色器中犯了错误,但我直接从我编写的内容中导入它们来可视化立方体,如下所示。
struct Vertex {
glm::vec3 position;
glm::vec2 texCoords;
glm::vec3 normal;
};
struct Mesh {
vector<Vertex> vertices;
vector<unsigned int> indices;
};
unsigned int createMeshVertexArray() {
unsigned int posComponents = 3; // xyz
unsigned int uvComponents = 2; // uv
unsigned int normComponents = 3; // ijk
unsigned int vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, _meshes[0].vertices.size() * sizeof(Vertex), &_meshes[0].vertices[0], GL_STATIC_DRAW); // perhaps gl_dynamic_draw is better
unsigned int posAttribId = 0;
unsigned int uvAttribId = 1;
unsigned int normAttribId = 2;
int posOffset = 0 * sizeof(float);
int uvOffset = (posOffset + posComponents) * sizeof(float);
int normOffset = (uvOffset + uvComponents) * sizeof(float);
size_t stride = (posComponents + uvComponents + normComponents) * sizeof(float);
glEnableVertexAttribArray(posAttribId);
glEnableVertexAttribArray(uvAttribId);
glEnableVertexAttribArray(normAttribId);
glVertexAttribPointer(posAttribId, posComponents, GL_FLOAT, GL_FALSE, stride, (const void*)posOffset);
glVertexAttribPointer(uvAttribId, uvComponents, GL_FLOAT, GL_FALSE, stride, (const void*)uvOffset);
glVertexAttribPointer(normAttribId, normComponents, GL_FLOAT, GL_FALSE, stride, (const void*)normOffset);
return vao;
}
unsigned int createMeshIndexBuffer() {
unsigned int ibo;
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, _meshes[0].indices.size() * sizeof(unsigned int), &_meshes[0].indices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // unbind to make sure nothing else is accidentally added to this index buffer later
return ibo;
}
主函数和渲染循环。
int main(void) {
//=============================================================================
//------------------ light source and camera position
//=============================================================================
float cameraOffsetY = 1.5;
float radius = 8.0;
glm::vec3 lightPos(0.0, 3.0, 1.0);
glm::vec3 cameraPos(0.0, cameraOffsetY, radius);
glm::vec3 cameraFront(0.0, cameraOffsetY, 0.0);
glm::vec3 cameraUp(0.0, 1.0, 0.0);
//=============================================================================
//------------------ projection matrix
//=============================================================================
float f = windowWidth; // focal distance
float fov = 2 * atan(windowWidth / (float)(2 * f)); // in radian
float aspectRatio = windowWidth / (float)windowHeight;
glm::mat4 proj = glm::perspective(fov, aspectRatio, 0.1f, 100.0f);
//=============================================================================
//------------------ head mesh
//=============================================================================
glm::mat4 meshModel = glm::translate(glm::mat4(1.0), glm::vec3(2.0, 0.0, -2.0));
meshModel = glm::rotate(meshModel, glm::radians(-30.0f), glm::vec3(0.0, 1.0, 0.0));
float faceSc = 1.0;
meshModel = glm::scale(meshModel, glm::vec3(faceSc, faceSc, faceSc));
glVisual.loadModel("../files/pose_17_with_normals.obj");
unsigned int meshVao = glVisual.createMeshVertexArray(); // vertex array
unsigned int meshIbo = glVisual.createMeshIndexBuffer(); // index buffer
unsigned int meshSbo = glVisual.handleShaders("../files/faceMesh.glsl"); // shader
unsigned int meshLoc_mvp = glGetUniformLocation(meshSbo, "mvp");
unsigned int meshModelLoc = glGetUniformLocation(meshSbo, "model");
unsigned int meshNormMatLoc = glGetUniformLocation(meshSbo, "nMat");
unsigned int meshColorLoc = glGetUniformLocation(meshSbo, "objectColor");
unsigned int meshLightPosLoc = glGetUniformLocation(meshSbo, "lightPos");
unsigned int meshCamPosLoc = glGetUniformLocation(meshSbo, "cameraPos");
glm::mat3 meshNormMat = glm::mat3(glm::transpose(glm::inverse(meshModel)));
glUniformMatrix4fv(meshModelLoc, 1, GL_FALSE, glm::value_ptr(meshModel));
glUniformMatrix3fv(meshNormMatLoc, 1, GL_FALSE, glm::value_ptr(meshNormMat));
glUniform3f(meshColorLoc, 0.6, 0.6, 0.8);
glUniform3f(meshLightPosLoc, lightPos[0], lightPos[1], lightPos[2]);
size_t numOfMeshVisualVerts = NUM_OF_FACES * 4;
while (true) {
glm::mat4 view = glm::lookAt(cameraPos, cameraFront, cameraUp);
glBindVertexArray(meshVao);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshIbo);
glUseProgram(meshSbo);
glm::mat4 mvp = proj * view * meshModel;
glUniform3f(meshCamPosLoc, cameraPos[0], cameraPos[1], cameraPos[2]);
glUniformMatrix4fv(meshLoc_mvp, 1, GL_FALSE, glm::value_ptr(mvp));
glDrawArrays(GL_QUADS, 0, numOfMeshVisualVerts);
glfwSwapBuffers(window);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear buffer
glfwPollEvents(); // Poll for and process events
if (glfwWindowShouldClose(window))
break;
}
glfwTerminate();
return 0;
}
顶点和片段着色器。
#shader vertex
#version 330 core
layout(location = 0) in vec3 n_position;
layout(location = 1) in vec2 n_uv;
layout(location = 2) in vec3 n_normal;
out vec3 faceNormal;
out vec3 fragPos; // fragment position
uniform mat4 mvp; // mvp matrix
uniform mat4 model; // model matrix
uniform mat3 nMat; // normal matrix
void main() {
gl_Position = mvp * vec4(n_position, 1.0);
fragPos = vec3(model * vec4(n_position, 1.0));
faceNormal = nMat * n_normal;
};
#shader fragment
#version 330 core
layout(location = 0) out vec4 color;
in vec3 faceNormal;
in vec3 fragPos; // fragment position in world coordinates
uniform vec3 objectColor;
uniform vec3 lightPos;
uniform vec3 cameraPos;
void main() {
vec3 lightColor = vec3(1.0, 1.0, 1.0);
float ambientCoeff = 0.3;
vec3 normVec = normalize(faceNormal);
vec3 lightDir = normalize(lightPos - fragPos);
float diff = max(dot(normVec, lightDir), 0.0);
vec3 diffuseCoeff = diff * lightColor;
float specularStrength = 0.1; // depends on the material
int shininess = 4; // how much to spread on the surface
vec3 viewDir = normalize(cameraPos - fragPos);
vec3 reflectDir = reflect(-lightDir, normVec);
float reflectAmount = pow(max(dot(viewDir, reflectDir), 0.0), shininess);
vec3 specularCoeff = specularStrength * reflectAmount * lightColor;
color = vec4((ambientCoeff + diffuseCoeff + specularCoeff) * objectColor, 1.0);
// color = vec4(objectColor, 1.0);
};
正如@Spektre 在评论部分建议的那样,问题出在我为我的顶点缓冲区设置布局的方式上。具体来说,我错误地计算了我的正常属性偏移量,如下所示。
unsigned int posComponents = 3; // xyz
unsigned int uvComponents = 2; // uv
unsigned int normComponents = 3; // ijk
int posOffset = 0 * sizeof(float);
int uvOffset = (posOffset + posComponents) * sizeof(float);
int normOffset = (uvOffset + uvComponents) * sizeof(float);
glVertexAttribPointer(posAttribId, posComponents, GL_FLOAT, GL_FALSE, stride, (const void*)posOffset);
glVertexAttribPointer(uvAttribId, uvComponents, GL_FLOAT, GL_FALSE, stride, (const void*)uvOffset);
glVertexAttribPointer(normAttribId, normComponents, GL_FLOAT, GL_FALSE, stride, (const void*)normOffset);
而应该这样做
int normOffset = (posComponents + uvComponents) * sizeof(float);
这是一个愚蠢的错误,但我很高兴它现在已得到修复。