3
votes

I've created a shape in Blender (I've made sure to triangulate faces and add normals) which I've exported in .obj format to use in an openframeworks project. I've also wrote a small class to parse this .obj file. The shape is drawing perfectly, but I can't seem to get the normals to apply correctly.

The export only contains vertices, normals, and faces. As dictated by the .obj format: I add vertices to a mesh and use the supplied indices to draw faces. The normals, and indices for normals, are supplied in a similar fashion. Indicated by the image below, I'm not applying them correctly.

enter image description here

Here's my model loader script:

modelLoader.h

#ifndef _WAVEFRONTLOADER
#define _WAVEFRONTLOADER

#include "ofMain.h"

class waveFrontLoader {

public:

ofMesh mesh;

waveFrontLoader();
~waveFrontLoader();

void loadFile(char *fileName);
ofMesh generateMesh();

private:

typedef struct
{
    ofIndexType v1,v2,v3;
    ofIndexType vn1,vn2,vn3;
}
Index;

std::vector<ofVec3f> vertices;
std::vector<ofVec3f> normals;
std::vector<Index> indices;

void parseLine(char *line);
void parseVertex(char *line);
void parseNormal(char *line);
void parseFace(char *line);
};

#endif

modelLoader.cpp

#include "waveFrontLoader.h"

waveFrontLoader::waveFrontLoader()
{

}

void waveFrontLoader::loadFile(char *fileName)
{
ifstream file;
char line[255];

//open file in openframeworks data folder
file.open(ofToDataPath(fileName).c_str());

if (file.is_open())
{
    while (file.getline(line,255))
    {
        parseLine(line);
    }
}
}

void waveFrontLoader::parseLine(char *line)
{
//If empty, don't do anything with it
if(!strlen(line))
{
    return;
}

//get line type identifier from char string
char *lineType = strtok(strdup(line), " ");

//parse line depending on type
if (!strcmp(lineType, "v")) // Vertices
{
    parseVertex(line);
}
else if (!strcmp(lineType, "vn")) // Normals
{
    parseNormal(line);
}
else if (!strcmp(lineType, "f")) // Indices (Faces)
{
    parseFace(line);
}
}

void waveFrontLoader::parseVertex(char *line)
{
float x;
float y;
float z;

vertices.push_back(ofVec3f(x,y,z));

//get coordinates from vertex line and assign
sscanf(line, "v %f %f %f", &vertices.back().x, &vertices.back().y, &vertices.back().z);
}

void waveFrontLoader::parseNormal(char *line)
{
float x;
float y;
float z;

normals.push_back(ofVec3f(x,y,z));

//get coordinates from normal line and assign
sscanf(line, "vn %f %f %f", &normals.back().x, &normals.back().y, &normals.back().z);
}

void waveFrontLoader::parseFace(char *line)
{
indices.push_back(Index());

//get vertex and normal indices
sscanf(line, "f %d//%d %d//%d %d//%d",
       &indices.back().v1,
       &indices.back().vn1,
       &indices.back().v2,
       &indices.back().vn2,
       &indices.back().v3,
       &indices.back().vn3);
}

ofMesh waveFrontLoader::generateMesh()
{
//add vertices to mesh
for (std::vector<ofVec3f>::iterator i = vertices.begin(); i != vertices.end(); ++i)
{
    mesh.addVertex(*i);
}

//add indices to mesh
for (std::vector<Index>::iterator i = indices.begin(); i != indices.end(); ++i)
{
    // -1 to count from 0
    mesh.addIndex((i->v1) - 1);
    mesh.addIndex((i->v2) - 1);
    mesh.addIndex((i->v3) - 1);

    mesh.addNormal(normals[(i->vn1) - 1]);
    mesh.addNormal(normals[(i->vn2) - 1]);
    mesh.addNormal(normals[(i->vn3) - 1]);

}

return mesh;
}

waveFrontLoader::~waveFrontLoader()
{

}

I've tried adding normals like this as well (makes sense since it's one normal per face):

mesh.addNormal(normals[(i->vn1) - 1]);

I've also tried adding normals only once per two triangles being drawn, and tried adding indices and normals before the indices. Neither of those worked either.

testApp.h

#pragma once

#include "ofMain.h"
#include "waveFrontLoader.h"

class testApp : public ofBaseApp{

public:
    void setup();
    void update();
    void draw();
    void exit();

    waveFrontLoader *objectLoader;
    ofMesh mesh;
    ofEasyCam camera;
    ofLight light;
};

testApp.cpp

#include "testApp.h"

//--------------------------------------------------------------
void testApp::setup()
{
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);

ofBackground(10, 10, 10);

camera.setDistance(10);
light.setPosition(10,30,-25);

objectLoader = new waveFrontLoader();
objectLoader->loadFile("test.obj");
mesh = objectLoader->generateMesh();
}

//--------------------------------------------------------------
void testApp::update()
{

}

//--------------------------------------------------------------
void testApp::draw()
{
camera.begin();
light.enable();
mesh.draw();
light.disable();
camera.end();
}

void testApp::exit()
{
delete objectLoader;
}
2
Post your OpenGL related code that draws the model.kbirk
Added main drawing classanthony
Can you post the code from ofMesh::draw? I want to look at the format you are submitting your vertex data.kbirk
ofMesh is a built in openframeworks class. The source is available here: github.com/openframeworks/openFrameworks/blob/master/libs/…anthony

2 Answers

1
votes

I think this may be an indexing error due to the indices of your position and normal arrays not corresponding. Typically when creating vertex buffers with OpenGL, all vertex attribute (position, normal, texcoord, etc) arrays must be of the same length. Therefore when a triangle is defined as indices [0, 1, 2], it will use the positions [v0, v1, v2] and normals [n0, n1, n2].

Let's use a cube as an example to look over whats happening in your code.

The cube .obj file would contain:

- 8 positions, lets call them v0 to v7
- 6 normals, lets call them n0 to n5
- 12 faces/triangles of format v//n v//n v//n, called f0 to f11

In your generateMesh() code you would submit a vertex array of:

[ v0, v1, v2, v3, v4, v5, v6, v7 ] // length of 8

an index array of:

[ f0.a, f0.b, f0.c, .... f11.a, f11.b, f11.c ] // length of 36

and a normal array of

[ n0, n0, n0, n0, n0, n0, n1, n1, n1, ... n5, n5, n5 ] // length of 36.

In this example, triangle index values would range [0 to 7] for positions and [0 to 5] for normals. This works out for your vertices submitted, but your submitted normals range from [ 0 to 31 ].

Try generating your ofMesh using the following code which assembles unified vertex arrays with corresponding vertex and normal indices:

ofMesh waveFrontLoader::generateMesh()
{
    int indexCount = 0;
    for (std::vector<Index>::iterator i = indices.begin(); i != indices.end(); ++i)
    {
        // add face of positions, -1 to count from 0
        mesh.addVertex(vertices[(i->v1) - 1]);
        mesh.addVertex(vertices[(i->v2) - 1]);
        mesh.addVertex(vertices[(i->v3) - 1]);

        // add face of normals, -1 to count from 0
        mesh.addNormal(normals[(i->vn1) - 1]);
        mesh.addNormal(normals[(i->vn2) - 1]);
        mesh.addNormal(normals[(i->vn3) - 1]);

        // in this code we are defining our vertex arrays 
        // according to the indices, so they will always
        // be [0 to n]
        mesh.addIndex( indexCount++ );
        mesh.addIndex( indexCount++ );
        mesh.addIndex( indexCount++ );   
    }
}

Now obviously this function does not result in arrays of minimal size (in the cube example there are 32 vertices in the array, but only 24 of those are of unique position/normal pairings), but will allow a quick test to see if it is causing the issue.

A more sophisticated approach would be to use an std::map or std::set to check if the position+normal+etc combination already exists, and use those existing indices rather than adding redundant data to the array. In the cube example this would result in the first two faces being indices [0, 1, 2, 1, 2, 3] using 4 vertices rather than indices [0, 1, 2, 3, 4, 5] using 6 vertices,

0
votes

With the way you read the file, you really don't need any indices at all. You're replicating all vertices and normals for each face (triangle) you load. So there's no need for indices, you can just use the vertices in sequence for drawing. If you used OpenGL directly, you could use glDrawArrays(GL_TRIANGLES, ...) instead of glDrawElements(GL_TRIANGLES, ...). There's probably an equivalent option with the framework you are using.

To render the model efficiently, you will want to actually share vertices, and use index buffers. You need to create an OpenGL vertex for each unique combination of position and normal. Here are two of my earlier answers on the topic that illustrate in more detail how this works:

OpenGL - Index buffers difficulties

Why is my OBJ parser rendering meshes like this?