1
votes

I'm trying to draw a triangle with VBOs. I do not see any pixels on the window. I do not see any GL_ERROR too.

Here is the code I am trying to run:

#include <stdio.h>
#include <GL/glew.h>
#include <GL/freeglut.h>
#include "ogldev_math_3d.h"

GLuint VBO;
int idx = 0;

static void RenderSceneCB()
{   
    glClear(GL_COLOR_BUFFER_BIT);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;

    glEnableVertexAttribArray(0);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;

     glBindBuffer(GL_ARRAY_BUFFER, VBO);
     printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;
     glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
     printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;


     GLfloat data[10]; 
     glGetBufferSubData(GL_ARRAY_BUFFER, 0, 36, data);
     printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;
     for (int i=0; i< 9; i++) {
      fprintf(stdout, "data[%d]:%f\n", i, data[i]);
     }

     glDrawArrays(GL_TRIANGLES, 0, 3);
     printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;


     glDisableVertexAttribArray(0);
     printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;

    glutSwapBuffers();
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;
}


static void InitializeGlutCallbacks()
{
    glutDisplayFunc(RenderSceneCB);
}

static void CreateVertexBuffer()
{

    Vector3f Vertices[3];
    Vertices[0] = Vector3f(-1.0f, -1.0f, 0.0f);
    Vertices[1] = Vector3f(1.0f, -1.0f, 0.0f);
    Vertices[2] = Vector3f(0.0f, 1.0f, 0.0f);
    glGenBuffers(1, &VBO);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;
    glBindBuffer(GL_ARRAY_BUFFER, VBO);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;
    glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;

}


int main(int argc, char** argv)
{
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA| GLUT_DEPTH);
    glutInitWindowSize(1024, 768);
    glutInitWindowPosition(100, 100);
    glutCreateWindow("Tutorial 03");

    InitializeGlutCallbacks();

    // Must be done after glut is initialized!
    GLenum res = glewInit();
    if (res != GLEW_OK) {
      fprintf(stderr, "Error: '%s'\n", glewGetErrorString(res));
      return 1;
    }
     const GLubyte* renderer = glGetString (GL_RENDERER); // get renderer string
     const GLubyte* version = glGetString (GL_VERSION); // version as a string
     printf("Renderer: %s\n", renderer);
     printf("OpenGL version supported %s\n", version);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    printf("Error(%d): %s\n", idx, gluErrorString(glGetError())); idx++;

    CreateVertexBuffer();

    glutMainLoop();

    return 0;
}

Since no GL_NO_ERROR is reported for all the error checks, I am not sure what is wrong with this code. A simple non-VBO draw call works, for example following code snippet:

 GLfloat myvertices[9] = 
 {
 -1.0f, -1.0f, 0.0f,
 1.0f, -1.0f, 0.0f,
 0.0f, 1.0f, 0.0f
 };

 glEnableClientState(GL_VERTEX_ARRAY);
 glVertexPointer(3, GL_FLOAT, 0, myvertices);
 glDrawArrays(GL_TRIANGLES, 0, 3);
 glDisableClientState(GL_VERTEX_ARRAY);

Help me spot the mistake in my code. OpenGL version reported is 2.1 and renderer is software rasterizer. Code compiled for linux64.

1

1 Answers

3
votes

When not using shaders (as I assume you don't), one cannot use theglEnableVertexAttribArray/glVertexAttribPointer methods.

What you can do instead is to use glVertexPointer, which constructs a binding the the VBO when one is bound. The last parameter in this case specifies the offset in the buffer, which will be 0 in your case. (reference)

Something like this should do the trick:

glClear(GL_COLOR_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexPointer(3, GL_FLOAT, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableClientState(GL_VERTEX_ARRAY);