3
votes

I'm trying to write a ray tracer for any objects formed of triangular meshes. I'm using an external library to load a cube from .ply format and then trace it down. So far, I've implemented most of the tracer, and now I'm trying to test it with a single cube, but for some reason all I get on the screen is a red line. I've tried several ways to fix it but I simply can't figure it out anymore. For this primary test, I'm only creating primary rays, and if they hit my cube, then I color that pixel to the cube's diffuse color and return. For checking ray-object intersections, I am going through all the triangles that form that object and return the distance to the closest one. It would be great if you could have a look at the code and tell me what could have gone wrong and where. I would greatly appreciate it.

Ray-Triangle intersection:

bool intersectTri(const Vec3D& ray_origin, const Vec3D& ray_direction, const Vec3D& v0, const Vec3D& v1, const Vec3D& v2, double &t, double &u, double &v) const
    {

        Vec3D edge1 = v1 - v0;  
        Vec3D edge2 = v2 - v0;
        Vec3D pvec = ray_direction.cross(edge2);
        double det = edge1.dot(pvec);
        if (det > - THRESHOLD && det < THRESHOLD)
            return false;
        double invDet = 1/det;  
        Vec3D tvec = ray_origin - v0;
        u = tvec.dot(pvec)*invDet;
        if (u < 0 || u > 1)
            return false;
        Vec3D qvec = tvec.cross(edge1);
        v = ray_direction.dot(qvec)*invDet;
        if (v < 0 || u + v > 1)
            return false;
        t = edge2.dot(qvec)*invDet;
        if (t < 0)
            return false;
        return true;
    }   

//Object intersection
bool intersect(const Vec3D& ray_origin, const Vec3D& ray_direction, IntersectionData& idata, bool enforce_max) const
    {

        double tClosest;
        if (enforce_max)
        {
            tClosest = idata.t;
        }
        else
        {
            tClosest = TMAX;
        }

        for (int i = 0 ; i < indices.size() ; i++)
        {
            const Vec3D v0 = vertices[indices[i][0]];
            const Vec3D v1 = vertices[indices[i][1]];
            const Vec3D v2 = vertices[indices[i][2]];
            double t, u, v;
            if (intersectTri(ray_origin, ray_direction, v0, v1, v2, t, u, v))
            {
                if (t < tClosest)   
                {
                    idata.t = t;
                    tClosest = t;                   
                    idata.u = u;
                    idata.v = v; 
                    idata.index = i;
                }
            }
        }
        return (tClosest < TMAX && tClosest > 0) ? true : false;
    }

Vec3D trace(World world, Vec3D &ray_origin, Vec3D &ray_direction)
{

Vec3D objColor = world.background_color;
IntersectionData idata;
double coeff = 1.0;
int depth = 0;

double tClosest = TMAX; 
Object *hitObject = NULL;   
for (unsigned int i = 0 ; i < world.objs.size() ; i++)
{       
    IntersectionData idata_curr;
    if (world.objs[i].intersect(ray_origin, ray_direction, idata_curr, false)) 
    {
        if (idata_curr.t < tClosest && idata_curr.t > 0) 
        {
            idata.t = idata_curr.t;
            idata.u = idata_curr.u;
            idata.v = idata_curr.v;
            idata.index = idata_curr.index; 
            tClosest = idata_curr.t;            
            hitObject = &(world.objs[i]);
        }
    }
}
if (hitObject == NULL)
{
    return world.background_color;
}
else
{
    return hitObject->getDiffuse();
}
}

int main(int argc, char** argv)
{

parse("cube.ply");
Vec3D diffusion1(1, 0, 0);
Vec3D specular1(1, 1, 1);
Object cube1(coordinates, connected_vertices, diffusion1, specular1, 0, 0);
World wrld;
// Add objects to the world
wrld.objs.push_back(cube1);
Vec3D background(0, 0, 0);
wrld.background_color = background;
// Set light color
Vec3D light_clr(1, 1, 1);
wrld.light_colors.push_back(light_clr);
// Set light position
Vec3D light(0, 64, -10);
wrld.light_positions.push_back(light);

int width = 128;
int height = 128;
Vec3D *image = new Vec3D[width*height];
Vec3D *pixel = image;

// Trace rays
for (int y = -height/2 ; y < height/2 ; ++y)
{
    for (int x = -width/2 ; x < width/2 ; ++x, ++pixel)
    {
        Vec3D ray_dir(x+0.5, y+0.5, -1.0);
        ray_dir.normalize();
        Vec3D ray_orig(0.5*width, 0.5*height, 0.0);
        *pixel = trace(wrld, ray_orig, ray_dir);        
    }
}   

savePPM("./test.ppm", image, width, height);
return 0; 
}

I've just ran a test case and I got this:

for a unit cube centered at (0,0, -1.5) and scaled on the X and Y axis by 100. It seems that there is something wrong with the projection, but I can't really tell exactly what from the result. Also, shouldn't, in this case (cube is centered at (0,0)) the final object also appear in the middle of the picture? FIX: I fixed the centering problem by doing ray_dir = ray_dir - ray_orig before normalizing and calling the trace function. Still, the perspective seems to be plain wrong.

1
Why is your ray origin at (width/2, height/2), when your ray directions go from -(width/2, height/2) to (width/2, height/2)? You need to either move the origin to, well the origin, or move your ray directions so that the the lowest-valued corner is (0, 0, -1). I think this explains the result image, as it looks like a really wide field-of-view.MikeMx7f
Thanks a lot! I moved the ray origin to (0,0), and now I get this: s10.postimage.org/i33s9eih5/test.png. Should I implement rotations for the cube in order to have a 3D aspect? Because this way only the front-face is seen. I also tried to translate the cube on (x,y), but all I get is again only the front face of it.franciscb

1 Answers

1
votes

I continued the work and now I started implementing the diffuse reflection according to Phong.

Vec3D trace(World world, Vec3D &ray_origin, Vec3D &ray_direction) {

Vec3D objColor = Vec3D(0);
IntersectionData idata;
double coeff = 1.0;
int depth = 0;
do
{
    double tClosest = TMAX; 
    Object *hitObject = NULL;   
    for (unsigned int i = 0 ; i < world.objs.size() ; i++)
    {       
        IntersectionData idata_curr;
        if (world.objs[i].intersect(ray_origin, ray_direction, idata_curr, false)) 
        {
            if (idata_curr.t < tClosest && idata_curr.t > 0) 
            {
                idata.t = idata_curr.t;
                idata.u = idata_curr.u;
                idata.v = idata_curr.v;
                idata.index = idata_curr.index; 
                tClosest = idata_curr.t;            
                hitObject = &(world.objs[i]);
            }
        }
    }   
    if (hitObject == NULL)
    {
        return world.background_color;
    }

    Vec3D newStart = ray_origin + ray_direction*idata.t;

    // Compute normal at intersection by interpolating vertex normals (PHONG Idea)
    Vec3D v0 = hitObject->getVertices()[hitObject->getIndices()[idata.index][0]];
    Vec3D v1 = hitObject->getVertices()[hitObject->getIndices()[idata.index][1]];
    Vec3D v2 = hitObject->getVertices()[hitObject->getIndices()[idata.index][2]];   

    Vec3D n1 = hitObject->getNormals()[hitObject->getIndices()[idata.index][0]];
    Vec3D n2 = hitObject->getNormals()[hitObject->getIndices()[idata.index][1]];
    Vec3D n3 = hitObject->getNormals()[hitObject->getIndices()[idata.index][2]];

//  Vec3D N = n1 + (n2 - n1)*idata.u + (n3 - n1)*idata.v;
    Vec3D N = v0.computeFaceNrm(v1, v2);
    if (ray_direction.dot(N) > 0)
    {
        N = N*(-1);
    }
    N.normalize();

    Vec3D lightray_origin = newStart;

    for (unsigned int itr = 0 ; itr < world.light_positions.size() ; itr++)
    {

        Vec3D lightray_dir = world.light_positions[0] - newStart;
        lightray_dir.normalize();

        double cos_theta = max(N.dot(lightray_dir), 0.0);
        objColor.setX(objColor.getX() + hitObject->getDiffuse().getX()*hitObject->getDiffuseReflection()*cos_theta);
        objColor.setY(objColor.getY() + hitObject->getDiffuse().getY()*hitObject->getDiffuseReflection()*cos_theta);
        objColor.setZ(objColor.getZ() + hitObject->getDiffuse().getZ()*hitObject->getDiffuseReflection()*cos_theta);
        return objColor;
    }

    depth++;

} while(coeff > 0 && depth < MAX_RAY_DEPTH);
return objColor;

}

When I reach an object with the primary ray, I send another ray to the light source positioned at (0,0,0) and return the color according to the Phong illumination model for diffuse reflection, but the result is really not the expected one: http://s15.postimage.org/vc6uyyssr/test.png. The cube is a unit cube centered at (0,0,0) and then translated by (1.5, -1.5, -1.5). From my point of view, the left side of the cube should get more light and it actually does. What do you think of it?