0
votes

I have a quite weird issue in my code, having added the vec2 named "coord" for containing my texture coordinates, these are supposed to be passed into my vertex shader, passed through into the geometry shader, before finally being passed into the fragment shader where it will be used for the sake of texture mapping. But for some reason the GLSL compiler is optimizing away the vec2 input, using glVertexAttribPointer to assign to it nets no result, and double checking its location with glGetUniformLocation(gShaderProgram, "coord"); returns -1 which would indicate it has been optimized out by the GLSL compiler, why is that? To my knowledge the compiler will only optimize values that do not contribute to the shader output, but its used as output in every shader stage. It's a uni assignment so this is mostly all new to me.

Vertex shader

#version 440

layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_color;
layout(location = 2) in vec2 coord;

layout(location = 20) uniform vec4 test;

layout(binding = 3, std140) uniform uniformBlock
{
    float v1;
    float v2;
    float v3;
    float v4;
};

uniform mat4 m;
uniform mat4 p;
uniform mat4 v;

out vData
{
    mat4 m;
    mat4 p;
    mat4 v;
    vec4 test;
    vec4 color;
    vec2 tex_coord;
}vertices;

void main() {
    vertices.m = m;
    vertices.p = p;
    vertices.v = v;
    vertices.test = test;
    vertices.tex_coord = coord;
    vertices.color = vec4(vertex_color, 1.0);
    gl_Position = vec4(vertex_position, 1.0);
}

Geometry shader

#version 400
layout (triangles) in;
layout (triangle_strip, max_vertices = 6) out;

in vData
{
    mat4 m;
    mat4 p;
    mat4 v;
    vec4 test;
    vec4 color;
    vec2 tex_coord;
} vertices[];

out fData
{
    vec3 normal;
    vec4 color;
    mat4 v;
    vec2 tex_coord;
} frag;

void main()
{
    vec3 A = gl_in[2].gl_Position.xyz - gl_in[0].gl_Position.xyz;
    vec3 B = gl_in[1].gl_Position.xyz - gl_in[0].gl_Position.xyz;
    vec3 normal = normalize(cross(A,B));
    int i;
    for(i = 0;i < gl_in.length();i++)
    {
        gl_Position = (vertices[i].p * vertices[i].v * vertices[i].m) * gl_in[i].gl_Position;
        frag.normal = normalize(vec3(vertices[i].m * vec4(normal,0)));
        frag.color = vertices[i].color;
        frag.v = vertices[i].v;
        frag.tex_coord = vertices[i].tex_coord;
        EmitVertex();
    }
    EndPrimitive();

    for(i = 0;i < gl_in.length();i++)
    {
        gl_Position = vertices[i].p * vertices[i].v * vertices[i].m * (gl_in[i].gl_Position + vec4(normal,0));
        frag.normal = normalize(vec3(vertices[i].m * vec4(normal,0)));
        frag.color = vertices[i].color;
        frag.v = vertices[i].v;
        frag.tex_coord = vertices[i].tex_coord;
        EmitVertex();
    }
    EndPrimitive();
}

Fragment shader

#version 400

out vec4 fragment_color;
const float PI = 3.14159265;

uniform sampler2D texture0;

in fData
{
    vec3 normal;
    vec4 color;
    mat4 v;
    vec2 tex_coord;
} frag;



void main () {
    vec3 n = normalize(frag.normal);
    float intensity = min(max(dot(n, vec3(0,0,-1)), 0.0), 1.0);

    //fragment_color = frag.color * intensity * frag.tex_coord.s;
    fragment_color = texture2D(texture0, frag.tex_coord.st);
}

main.cpp

#include <vector>
#include <windows.h>
#include <iostream>
#include <string>
#include <fstream>
#include <streambuf>
#include <chrono>
#include <gl/glew.h>
#include <gl/GL.h>
# define M_PI           3.14159265358979323846
#include "glm\glm.hpp"
#include "glm\gtc\matrix_transform.hpp"
#include "bth_image.h"
#pragma comment(lib, "opengl32.lib")
#pragma comment(lib, "glew32.lib")

using namespace std;
using namespace glm;
HWND InitWindow(HINSTANCE hInstance);
LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM);
HGLRC CreateOpenGLContext(HWND wndHandle);

GLuint gVertexBuffer = 0;
GLuint gVertexAttribute = 0;
GLuint gShaderProgram = 0;
GLuint textures[1];
mat4x4 view;
mat4x4 world;
mat4x4 projection;
float DT;

struct CPUvalues
{
    float v1;
    float v2;
    float v3;
    float v4;
};

CPUvalues Gv = { 0.5, 0, 0, 0 };

GLuint gu = 0;

#define BUFFER_OFFSET(i) ((char *)nullptr + (i))

void CreateShaders()
{
    GLuint vs = glCreateShader(GL_VERTEX_SHADER);
    ifstream shaderFile("VertexShader.glsl");
    std::string shaderText((std::istreambuf_iterator<char>(shaderFile)), std::istreambuf_iterator<char>());
    shaderFile.close();
    const char* shaderTextPtr = shaderText.c_str();
    glShaderSource(vs, 1, &shaderTextPtr, nullptr);
    glCompileShader(vs);

    //create fragment shader | same process.
    GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
    shaderFile.open("Fragment.glsl");
    shaderText.assign((std::istreambuf_iterator<char>(shaderFile)), std::istreambuf_iterator<char>());
    shaderFile.close();
    shaderTextPtr = shaderText.c_str();
    glShaderSource(fs, 1, &shaderTextPtr, nullptr);
    glCompileShader(fs);

    GLuint gs = glCreateShader(GL_GEOMETRY_SHADER);
    shaderFile.open("GMshader.glsl");
    shaderText.assign((std::istreambuf_iterator<char>(shaderFile)), std::istreambuf_iterator<char>());
    shaderFile.close();
    shaderTextPtr = shaderText.c_str();
    glShaderSource(gs, 1, &shaderTextPtr, nullptr);
    glCompileShader(gs);


    GLint success = 0;
    glGetShaderiv(gs, GL_COMPILE_STATUS, &success);
    if (success == GL_FALSE)
    {
        GLint logSize = 0;
        glGetShaderiv(gs, GL_INFO_LOG_LENGTH, &logSize);
        std::vector<GLchar> errorLog(logSize);
        glGetShaderInfoLog(gs, logSize, &logSize, &errorLog[0]);
        for (int i = 0; i < errorLog.size(); i++)
        {
            cout << errorLog.at(i);
        }
    }
    //link shader program (connect vs and ps)
    gShaderProgram = glCreateProgram();
    glAttachShader(gShaderProgram, fs);
    glAttachShader(gShaderProgram, gs);
    glAttachShader(gShaderProgram, vs);

    glBindAttribLocation(gShaderProgram, 1, "vertices");

    glEnable(GL_TEXTURE_2D);
    glGenTextures(1, textures);
    glBindTexture(GL_TEXTURE_2D, textures[0]);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, BTH_IMAGE_WIDTH, BTH_IMAGE_HEIGHT, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid*)BTH_IMAGE_DATA);


    glLinkProgram(gShaderProgram);



    GLint isLinked = 0;
    glGetProgramiv(gShaderProgram, GL_LINK_STATUS, &isLinked);
    if (isLinked == GL_FALSE)
    {
        GLint maxLength = 0;
        glGetProgramiv(gShaderProgram, GL_INFO_LOG_LENGTH, &maxLength);
        std::vector<GLchar> infoLog(maxLength);
        glGetProgramInfoLog(gShaderProgram, maxLength, &maxLength, &infoLog[0]);
        for (GLint i = 0; i < maxLength; i++)
        {
            cout << infoLog.at(i);
        }
    }
}

void CreateTriangleData()
{
    // this is how we will structure the input data for the vertex shader
    // every six floats, is one vertex.

    struct TriangleVertex
    {
        float x, y, z;
        float r, g, b;
        float s, t;
    };
    // create the actual data in plane Z = 0
    TriangleVertex triangleVertices[6] = 
    {
        // pos and color for each vertex
        { -0.5f, 0.5f, 0.0f,    1.0f, 0.0f, 0.0f,   1.0f, 0.0f },
        { 0.5f, -0.5f, 0.0f,    0.0f, 1.0f, 0.0f,   1.0f, 0.0f },
        { -0.5f, -0.5f, 0.0f,   0.0f, 0.0f, 1.0f,   1.0f, 0.0f },

        { 0.5f, 0.5f, 0.0f,     0.0f, 0.0f, 1.0f,   1.0f, 0.0f },
        { 0.5f, -0.5f, 0.0f,    0.0f, 1.0f, 0.0f,   1.0f, 0.0f },
        { -0.5f, 0.5f, 0.0f,    1.0f, 0.0f, 0.0f,   1.0f, 0.0f }


    };
    // Vertex Array Object (VAO) 
    glGenVertexArrays(1, &gVertexAttribute);
    // bind == enable
    glBindVertexArray(gVertexAttribute);
    // this activates the first and second attributes of this VAO
    glEnableVertexAttribArray(0); 
    glEnableVertexAttribArray(1);

    // create a vertex buffer object (VBO) id
    glGenBuffers(1, &gVertexBuffer);
    // Bind the buffer ID as an ARRAY_BUFFER
    glBindBuffer(GL_ARRAY_BUFFER, gVertexBuffer);
    // This "could" imply copying to the GPU, depending on what the driver wants to do...
    glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices), triangleVertices, GL_STATIC_DRAW);

    // query where which slot corresponds to the input vertex_position in the Vertex Shader 
    GLint vertexPos = glGetAttribLocation(gShaderProgram, "vertex_position");
    // specify that: the vertex attribute "vertexPos", of 3 elements of type FLOAT, not normalized, with STRIDE != 0,
    //               starts at offset 0 of the gVertexBuffer (it is implicitly bound!)
    glVertexAttribPointer(vertexPos, 3,    GL_FLOAT, GL_FALSE,     sizeof(TriangleVertex), BUFFER_OFFSET(0));

    // query where which slot corresponds to the input vertex_color in the Vertex Shader 
    GLint vertexColor = glGetAttribLocation(gShaderProgram, "vertex_color");
    cout << "vertex pos " << vertexPos << endl;
    cout << "vertex color " << vertexColor << endl;
    // specify that: the vertex attribute "vertex_color", of 3 elements of type FLOAT, not normalized, with STRIDE != 0,
    //               starts at offset (12 bytes) of the gVertexBuffer 
    glVertexAttribPointer(vertexColor, 3,    GL_FLOAT, GL_FALSE,     sizeof(TriangleVertex), BUFFER_OFFSET(sizeof(float)*3));
    cout << glGetError() << endl;



    GLint tex_coord = glGetUniformLocation(gShaderProgram, "coord");
    cout << "coord " << tex_coord << endl;
    glVertexAttribPointer(tex_coord, 2, GL_FLOAT, GL_FALSE, sizeof(TriangleVertex), BUFFER_OFFSET(sizeof(float) * 6));
    cout << glGetError() << endl;
}

void SetViewport()
{
    glViewport(0, 0, 640, 480);
}

void Render()
{
    // set the color TO BE used
    glClearColor(0, 0, 0, 1);
    // use the color to clear the color buffer
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);


    glUseProgram(gShaderProgram);
    glBindVertexArray(gVertexAttribute);

    glGenBuffers(1, &gu);
    glBindBuffer(GL_UNIFORM_BUFFER, gu);



    glBufferSubData(GL_UNIFORM_BUFFER, 0, sizeof(CPUvalues), &Gv);

    GLint OG_loc = glGetUniformLocation(gShaderProgram, "test");

    GLint _p = glGetUniformLocation(gShaderProgram, "p");
    GLint _m = glGetUniformLocation(gShaderProgram, "m");
    GLint _v = glGetUniformLocation(gShaderProgram, "v");


    GLuint unit = 0;
    GLint texture0 = glGetUniformLocation(gShaderProgram, "texture0");
    //cout << glGetUniformLocation(gShaderProgram, "coord") << endl;;
    glActiveTexture(GL_TEXTURE0 + unit);
    glBindTexture(GL_TEXTURE_2D, textures[0]);
    glUniform1i(texture0, unit);
    glUniformMatrix4fv(_p, 1, GL_FALSE, &projection[0][0]);
    glUniformMatrix4fv(_m, 1, GL_FALSE, &world[0][0]);
    glUniformMatrix4fv(_v, 1, GL_FALSE, &view[0][0]);

    glUniform4f(OG_loc, Gv.v1, Gv.v2, Gv.v3, Gv.v4);
    // draw 3 vertices starting from index 0 in the vertex array currently bound (VAO), with current in-use shader
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_CULL_FACE);

    glFrontFace(GL_CW);
    glCullFace(GL_BACK);
    glDrawArrays(GL_TRIANGLES, 0, 6);

}

int WINAPI wWinMain( HINSTANCE hInstance, HINSTANCE hPrevInstance, LPWSTR lpCmdLine, int nCmdShow )
{
    AllocConsole();
    freopen("CONOUT$", "w", stdout);
    DT = 0.016;
    MSG msg = { 0 };
    HWND wndHandle = InitWindow(hInstance); //1. Skapa fönster
    if (wndHandle)
    {
        HDC hDC = GetDC(wndHandle);

        HGLRC hRC = CreateOpenGLContext(wndHandle); //2. Skapa och koppla OpenGL context

        glewInit(); //3. Initiera The OpenGL Extension Wrangler Library (GLEW)

        SetViewport(); //4. Sätt viewport

        CreateShaders(); //5. Skapa vertex- och fragment-shaders

        CreateTriangleData(); //6. Definiera triangelvertiser, 7. Skapa vertex buffer object (VBO), 8.Skapa vertex array object (VAO)

        ShowWindow(wndHandle, nCmdShow);
        view = lookAt(vec3(0, 0, -2), vec3(0, 0, 0), vec3(0, 1, 0));
        mat4x4 sm;
        sm = scale(sm, vec3(1, 1, 1));
        mat4x4 tm;
        tm = translate(tm, vec3(0, 0, 0));
        mat4x4 rm;
        rm = mat4x4(1);
        projection = perspective<float>(M_PI*0.45, 640 / 480, 0.1, 20);


        while (WM_QUIT != msg.message)
        {
            auto start_time = chrono::high_resolution_clock::now();
            //Gv.v1 += 0.05 * DT;

            Gv.v1 += 1 * DT;
            world = tm * rotate(rm, Gv.v1, vec3(0, 1, 0)) * sm;


            if (PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE))
            {
                TranslateMessage(&msg);
                DispatchMessage(&msg);
            }
            else
            {
                Render(); //9. Rendera
                SwapBuffers(hDC); //10. Växla front- och back-buffer
            }
            auto final_time = chrono::high_resolution_clock::now() - start_time;
            DT = chrono::duration_cast<std::chrono::milliseconds>(final_time).count() / (double)1000;
        }

        wglMakeCurrent(NULL, NULL);
        ReleaseDC(wndHandle, hDC);
        wglDeleteContext(hRC);
        DestroyWindow(wndHandle);
    }

    return (int) msg.wParam;
}

HWND InitWindow(HINSTANCE hInstance)
{
    WNDCLASSEX wcex = { 0 };
    wcex.cbSize = sizeof(WNDCLASSEX); 
    wcex.style          = CS_HREDRAW | CS_VREDRAW;
    wcex.lpfnWndProc    = WndProc;
    wcex.hInstance      = hInstance;
    wcex.lpszClassName = L"BTH_GL_DEMO";
    if( !RegisterClassEx(&wcex) )
        return false;

    RECT rc = { 0, 0, 640, 480 };
    AdjustWindowRect( &rc, WS_OVERLAPPEDWINDOW, FALSE );

    HWND handle = CreateWindow(
        L"BTH_GL_DEMO",
        L"BTH OpenGL Demo",
        WS_OVERLAPPEDWINDOW,
        CW_USEDEFAULT,
        CW_USEDEFAULT,
        rc.right - rc.left,
        rc.bottom - rc.top,
        nullptr,
        nullptr,
        hInstance,
        nullptr);

    return handle;
}

LRESULT CALLBACK WndProc( HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam )
{
    switch (message) 
    {
    case WM_DESTROY:
        PostQuitMessage(0);
        break;      
    }

    return DefWindowProc(hWnd, message, wParam, lParam);
}

HGLRC CreateOpenGLContext(HWND wndHandle)
{
    //get handle to a device context (DC) for the client area
    //of a specified window or for the entire screen
    HDC hDC = GetDC(wndHandle);

    //details: http://msdn.microsoft.com/en-us/library/windows/desktop/dd318286(v=vs.85).aspx
    static  PIXELFORMATDESCRIPTOR pixelFormatDesc =
    {
        sizeof(PIXELFORMATDESCRIPTOR),    // size of this pfd  
        1,                                // version number  
        PFD_DRAW_TO_WINDOW |              // support window  
        PFD_SUPPORT_OPENGL |              // support OpenGL  
        PFD_DOUBLEBUFFER,                 // double buffered        
        PFD_TYPE_RGBA,                    // RGBA type  
        32,                               // 32-bit color depth  
        0, 0, 0, 0, 0, 0,                 // color bits ignored  
        0,                                // no alpha buffer  
        0,                                // shift bit ignored  
        0,                                // no accumulation buffer  
        0, 0, 0, 0,                       // accum bits ignored  
        0,                                // 0-bits for depth buffer <-- modified by Stefan      
        0,                                // no stencil buffer  
        0,                                // no auxiliary buffer  
        PFD_MAIN_PLANE,                   // main layer  
        0,                                // reserved  
        0, 0, 0                           // layer masks ignored  
    };

    //attempt to match an appropriate pixel format supported by a
    //device context to a given pixel format specification.
    int pixelFormat = ChoosePixelFormat(hDC, &pixelFormatDesc);

    //set the pixel format of the specified device context
    //to the format specified by the iPixelFormat index.
    SetPixelFormat(hDC, pixelFormat, &pixelFormatDesc);

    //create a new OpenGL rendering context, which is suitable for drawing
    //on the device referenced by hdc. The rendering context has the same
    //pixel format as the device context.
    HGLRC hRC = wglCreateContext(hDC);

    //makes a specified OpenGL rendering context the calling thread's current
    //rendering context. All subsequent OpenGL calls made by the thread are
    //drawn on the device identified by hdc. 
    wglMakeCurrent(hDC, hRC);

    return hRC;
}
1
Note that coord is not a uniform, thus glGetUniformLocation(gShaderProgram, "coord") will always return -1 -- even if coord is used.G.M.

1 Answers

1
votes

You are trying to query an attribute location with glGetUniformLocation. Use glGetAttribLocation instead.