GLSL 从我的制服中取回奇怪的值,它似乎也设置了错误的值

GLSL getting odd values back from my uniform and it seems to be set with the wrong value too

我在顶点着色器中使用 uniform 时遇到问题

代码如下

// gcc main.c -o main `pkg-config --libs --cflags glfw3` -lGL -lm


#include <GLFW/glfw3.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>

void gluErrorString(const char* why,GLenum errorCode);
void checkShader(GLuint status, GLuint shader, const char* which);


float verts[] = {
    -0.5f,   0.5f,   0.0f,
     0.5f,   0.5f,   0.0f,
     0.5f,  -0.5f,   0.0f,
    -0.5f,  -0.5f,   0.0f
};

const char* vertex_shader =
    "#version 330\n"
    "in vec3 vp;\n"
    "uniform float u_time;\n"
    "\n"
    "void main () {\n"
    "  vec4 p = vec4(vp, 1.0);\n"
    "  p.x = p.x + u_time;\n"
    "  gl_Position = p;\n"
    "}";

const char* fragment_shader =
    "#version 330\n"
    "out vec4 frag_colour;\n"
    "void main () {\n"
    "  frag_colour = vec4 (0.5, 0.0, 0.5, 1.0);\n"
    "}";




int main () {

    if (!glfwInit ()) {
        fprintf (stderr, "ERROR: could not start GLFW3\n");
        return 1;
    }

    glfwWindowHint (GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint (GLFW_CONTEXT_VERSION_MINOR, 2);
    //glfwWindowHint (GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
    glfwWindowHint (GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    GLFWwindow* window = glfwCreateWindow (640, 480, "Hello Triangle", NULL, NULL);
    if (!window) {
        fprintf (stderr, "ERROR: could not open window with GLFW3\n");
        glfwTerminate();
        return 1;
    }

    glfwMakeContextCurrent (window);

    // vert arrays group vert buffers together unlike GLES2 (no vert arrays)
    // we *must* have one of these even if we only need 1 vert buffer
    GLuint vao = 0;
    glGenVertexArrays (1, &vao);
    glBindVertexArray (vao);

    GLuint vbo = 0;
    glGenBuffers (1, &vbo);
    glBindBuffer (GL_ARRAY_BUFFER, vbo);
    // each vert takes 3 float * 4 verts in the fan = 12 floats
    glBufferData (GL_ARRAY_BUFFER, 12 * sizeof (float), verts, GL_STATIC_DRAW);
    gluErrorString("buffer data",glGetError());

    glEnableVertexAttribArray (0);
    glBindBuffer (GL_ARRAY_BUFFER, vbo);
    // 3 components per vert
    glVertexAttribPointer (0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
    gluErrorString("attrib pointer",glGetError());


    GLuint vs = glCreateShader (GL_VERTEX_SHADER);
    glShaderSource (vs, 1, &vertex_shader, NULL);
    glCompileShader (vs);
    GLint success = 0;
    glGetShaderiv(vs, GL_COMPILE_STATUS, &success);
    checkShader(success, vs, "Vert Shader");

    GLuint fs = glCreateShader (GL_FRAGMENT_SHADER);
    glShaderSource (fs, 1, &fragment_shader, NULL);
    glCompileShader (fs);
    glGetShaderiv(fs, GL_COMPILE_STATUS, &success);
    checkShader(success, fs, "Frag Shader"); 

    GLuint shader_program = glCreateProgram ();
    glAttachShader (shader_program, fs);
    glAttachShader (shader_program, vs);
    glLinkProgram (shader_program);
    gluErrorString("Link prog",glGetError());

    glUseProgram (shader_program);
    gluErrorString("use prog",glGetError());


    GLuint uniT = glGetUniformLocation(shader_program,"u_time"); // ask gl to assign uniform id
    gluErrorString("get uniform location",glGetError());    

    printf("uniT=%i\n",uniT);

    glEnable (GL_DEPTH_TEST);
    glDepthFunc (GL_LESS);
    float t=0;



    while (!glfwWindowShouldClose (window)) {

        glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        gluErrorString("clear",glGetError());

        glUseProgram (shader_program);
        gluErrorString("use prog",glGetError());

        t=t+0.01f;
        glUniform1f( uniT, (GLfloat)sin(t));
        gluErrorString("set uniform",glGetError());

        float val;
        glGetUniformfv(shader_program,  uniT,  &val);
        gluErrorString("get uniform",glGetError());
        printf("val=%f ",val);

        glBindVertexArray (vao);
        gluErrorString("bind array",glGetError());

        glDrawArrays (GL_TRIANGLE_FAN, 0, 4);
        gluErrorString("draw arrays",glGetError());

        glfwPollEvents ();
        glfwSwapBuffers (window);
        gluErrorString("swap buffers",glGetError());
    }

    glfwTerminate();
    return 0;
}


void checkShader(GLuint status, GLuint shader, const char* which) {
    if (status==GL_TRUE) return;
    int length;
    char buffer[1024];
    glGetShaderInfoLog(shader, sizeof(buffer), &length, buffer);
    fprintf (stderr,"%s Error: %s\n", which,buffer);
    glfwTerminate();
    exit(-1);
}

struct token_string
{
    GLuint Token;
    const char *String;
};

static const struct token_string Errors[] = {
    { GL_NO_ERROR, "no error" },
    { GL_INVALID_ENUM, "invalid enumerant" },
    { GL_INVALID_VALUE, "invalid value" },
    { GL_INVALID_OPERATION, "invalid operation" },
    { GL_STACK_OVERFLOW, "stack overflow" },
    { GL_STACK_UNDERFLOW, "stack underflow" },
    { GL_OUT_OF_MEMORY, "out of memory" },
    { GL_TABLE_TOO_LARGE, "table too large" },
#ifdef GL_EXT_framebuffer_object
    { GL_INVALID_FRAMEBUFFER_OPERATION_EXT, "invalid framebuffer operation" },
#endif

    { ~0, NULL } /* end of list indicator */
};

void gluErrorString(const char* why,GLenum errorCode)
{
    if (errorCode== GL_NO_ERROR) return;
    int i;
    for (i = 0; Errors[i].String; i++) {
        if (Errors[i].Token == errorCode) { 
            fprintf (stderr,"error: %s - %s\n",why,Errors[i].String);
            glfwTerminate();
            exit(-1);            
        }
    }

}

当代码运行时,四边形闪烁,就好像制服正在获取垃圾值,同时获取制服的值显示一些奇怪的值,如 36893488147419103232.000000,它应该只是一个简单的正弦值

您的代码问题仅与 GL 间接相关 - 您的 GL 代码没有问题。

但是,您使用的是现代 OpenGL 函数,而没有将函数指针作为扩展加载。这可能适用于某些平台,但不适用于其他平台。 MacOS 确实保证这些函数在系统的 GL 库中导出。在 windows 上,Microsoft opengl32.dll 从不包含 GL 1.1 以外的功能 - 您的代码不会 link 在那里。在 Linux,你介于两者之间。只有 this old Linux OpenGL ABI document,保证 OpenGL 1.2 函数必须由库导出。实际上,Linux 上的大多数 GL 实现的库都导出所有内容(但该功能存在并不意味着它受支持)。但是你永远不应该直接 link 这些功能,因为没有人保证任何事情。

然而,故事并没有就此结束:您显然是在导出符号的实现上这样做的。但是,您没有包含正确的 headers。而且您的编译器设置很差。在 C 中,调用之前未声明的函数是有效的(但风格很差)。编译器将假定它 returns int 并且所有参数都是 ints。实际上,您正在调用这些函数,但编译器会将参数转换为 int。

您会注意到,如果您将编译器设置为产生一些警告,例如 gcc 上的 -Wall

a.c: In function ‘main’:
a.c:74: warning: implicit declaration of function ‘glGenVertexArrays’
a.c:75: warning: implicit declaration of function ‘glBindVertexArray’
[...]

但是,代码编译和 links,我可以重现你描述的结果(我在这里使用 Linux/Nvidia)。

要解决此问题,您应该使用 OpenGL Loader Library. For example, I got your code working by using GLEW。我所要做的就是在文件的最顶部添加

#define GLEW_NO_GLU // because you re-implemented some glu-like functions with a different interface
#include <glew.h>

正在打电话

glewExperimental=GL_TRUE;
if (glewInit() != GLEW_OK) {
    fprintf (stderr, "ERROR: failed to initialize GLEW\n");
    glfwTerminate();
    return 1;
}
glGetError(); // read away error generated by GLEW, it is broken in core profiles...

GLEW headers 包含所有函数的声明,因此不再发生隐式类型转换。 GLEW 可能不是核心配置文件的最佳选择,但我只是使用它,因为这是我最熟悉的加载器。