1
votes

I was writing a shader in glsl, when I ran into an issue where a feature I needed isn't available in the version I am using. I ran glxinfo | grep 'OpenGL'and got this output:

OpenGL vendor string: Intel Open Source Technology Center
OpenGL renderer string: Mesa DRI Intel(R) Sandybridge Mobile 
OpenGL core profile version string: 3.3 (Core Profile) Mesa 18.3.6
OpenGL core profile shading language version string: 3.30
OpenGL core profile context flags: (none)
OpenGL core profile profile mask: core profile
OpenGL core profile extensions:
OpenGL version string: 3.0 Mesa 18.3.6
OpenGL shading language version string: 1.30
OpenGL context flags: (none)
OpenGL extensions:
OpenGL ES profile version string: OpenGL ES 3.0 Mesa 18.3.6
OpenGL ES profile shading language version string: OpenGL ES GLSL ES 3.00

According to the output, I have access to OpenGL core 3.3 and GLSL core 3.30, as shown here:

OpenGL core profile version string: 3.3 (Core Profile) Mesa 18.3.6
OpenGL core profile shading language version string: 3.30

The only problem is, whenever I try using GLSL core 330, I get an error that says: GLSL 3.30 is not supported. Supported versions are: 1.10, 1.20, 1.30, 1.00 ES, and 3.00 ES.

I am creating an OpenGL context with GLFW. The line that creates the context is: glfwMakeContextCurrent(window);

Does anyone know a way to allow me to access OpenGL core or somehow update the drivers to give me access to later versions of OpenGL?

____________________________________________________________________________

Extra Information:

Format: Code : output

glGetString(GL_VERSION) : 3.0 Mesa 18.3.6
glGetString (GL_SHADING_LANGUAGE_VERSION) : 1.30

Shaders:

    vertex shader:
    #version 330 core
    
    layout(location = 0) in vec4 position;
    
    void main(){
       gl_Position = position;
    };
    


    fragment shader:
    #version 330 core
    
    layout(location = 0) out vec4 color;
    
    void main(){
       color = vec4(1.0, 0.0, 0.0, 1.0);
    };

This is the code I am trying to run:

#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>

static unsigned int CompileShader(unsigned int type, const std::string& source){
    unsigned int id =  glCreateShader(type);
    const char* src = source.c_str();
    glShaderSource(id, 1, &src, nullptr);
    glCompileShader(id);
    
    int result;
    glGetShaderiv(id, GL_COMPILE_STATUS, &result);
    if(result == GL_FALSE){
        int length;
        glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
        char* message = (char*)alloca(length * sizeof(char));
        glGetShaderInfoLog(id, length, &length, message);
        std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << " shader!" << std::endl;
        std::cout << message << std::endl;
        glDeleteShader(id);
        return 0;
    }
    
    return id;
}

static unsigned int CreateShader(const std::string& vertexshader, const std::string& fragmentshader){
    unsigned int program = glCreateProgram();
    unsigned int vs = CompileShader(GL_VERTEX_SHADER,vertexshader);
    unsigned int fs = CompileShader(GL_FRAGMENT_SHADER,fragmentshader);
    
    glAttachShader(program, vs);
    glAttachShader(program, fs);
    glLinkProgram(program);
    glValidateProgram(program);
    
    glDeleteShader(vs);
    glDeleteShader(fs);
    return program;
}

int main(void)
{
    GLFWwindow* window;

    /* Initialize the library */
    if (!glfwInit())
        return -1;
        
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    /* Create a windowed mode window and its OpenGL context */
    window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
    if (!window)
    {
        glfwTerminate();
        return -1;
    }

    /* Make the window's context  current */
    glfwMakeContextCurrent(window);
    
    if(glewInit() != GLEW_OK){
        std::cout << "REEEEEEEEEEE" << std::endl;
    }
    
    std::cout << glGetString(GL_VERSION) << std::endl;
    std::cout << glGetString (GL_SHADING_LANGUAGE_VERSION) << std::endl;

    
    float vertices[6] = {
        -0.5f, -0.5f,
         0.0f,  0.5f,
        -0.5f, -0.5f
    };
    
    unsigned int buf;
    glGenBuffers(1, &buf);
    glBindBuffer(GL_ARRAY_BUFFER, buf);
    glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), vertices, GL_STATIC_DRAW);
    
    glEnableVertexAttribArray(buf); // try replacing buf with 0
    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
    
    std::string vertexshader = 
    "#version 330 core\n"
    "\n"
    "layout(location = 0) in vec4 position;\n"
    "\n"
    "void main(){\n"
    "   gl_Position = position;\n"
    "}\n";
    
    std::string fragmentshader = 
    "#version 330 core\n"
    "\n"
    "out vec4 color;\n"
    "\n"
    "void main(){\n"
    "   color = vec4(1.0, 0.0, 0.0, 1.0);\n"
    "}\n";
    
    unsigned int shader = CreateShader(vertexshader, fragmentshader);
    glUseProgram(shader);
    
    /* Loop until the user closes the window */
    while (!glfwWindowShouldClose(window))
    {
        /* Render here */
        glClear(GL_COLOR_BUFFER_BIT);
        
        glDrawArrays(GL_TRIANGLES, 0, 3);

        /* Swap front and back buffers */
        glfwSwapBuffers(window);

        /* Poll for and process events */
        glfwPollEvents();
    }

    glfwTerminate();
    return 0;
}

OS: LINUX DEBIAN

1
How do you create your window & GL context? The library you use for it has to have a way to opt-in for the core profile.HolyBlackCat
@HolyBlackCat I've added the information to the question.User-92
The line that creates the context is: glfwMakeContextCurrent(window); - no. This just makes the (already created context) current for the calling thread. Could you not just simply show the whole GLFW window creation code?Kai Burjack

1 Answers

0
votes

By adding

glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

before the window creation, I was able to get it working. If you don't know what MAJOR and MINOR means, here: What does major and minor mean in OpenGL with GLFW?

TLDR: "Major" and "minor" are two components of a single version number, separated by a dot.

So with the major and minor version both set to three, I was able to request OpenGL 3.3 Core Profile.

Now when I run:

std::cout << glGetString(GL_VERSION) << std::endl;
std::cout << glGetString (GL_SHADING_LANGUAGE_VERSION) << std::endl;

I get:

3.3 (Core Profile) Mesa 18.3.6
3.30