2
votes

I have some problems regarding sphere tracing/ray marching in GLSL using SDF functions:

My main program (C++, using Vulkan) generates a screen quad and supplies the vertex shader with a per-vertex inPosition. The vertex shader has access to the window resolution, projection matrix and view matrix. The projection matrix is generated with glm::perspective(45.0, 1920/1080, 0.1, 100.0);.

In the vertex shader, I try to calculate a ray (position and direction using homogeneous coordinates) coming from the origin at vec4(0.0, 0.0, 0.0, 1.0) through the image plane. I am confused where to place the image plane and chose vec4(inPosition.xy, -5.0, 1.0) for now to look along the negative z-axis.

The following code represents my vertex shader:

#version 450
#extension GL_ARB_separate_shader_objects : enable

struct Ray
{
  vec4 pos;
  vec4 dir;
};

layout(binding = 0) uniform UniformBufferObject {
  vec3 res;
  mat4 projection;
  mat4 view;
} ubo;

layout(location = 0) in vec3 inPosition;

layout(location = 0) out vec3 iResolution;
layout(location = 1) out Ray iRay;

out gl_PerVertex {
  vec4 gl_Position;
};

void main() {
  fragCoord = vec2(
    ((inPosition.x+1)/2) * (ubo.res.x-1),
    ((inPosition.y+1)/2) * (ubo.res.y-1)
  );
  iResolution = ubo.res;
  gl_Position = vec4(inPosition, 1.0);
  vec4 direction = inverse(ubo.projection) * vec4(inPosition.xy, -5.0, 1.0);
  iRay.dir = direction;
  iRay.pos = vec4(direction.xy, 0.0, 1.0);
}

I used the projection matrix to transform the directions to world space and distort the unit cube to the window resolution. However, in my fragment shader the SDF functions and intersections do not work properly. I can only see a sphere if I set the same values for the distance and the radius. See the fragment shader:

#version 450
#extension GL_ARB_separate_shader_objects : enable

struct Ray
{
  vec4 pos;
  vec4 dir;
};

layout(location = 0) in vec3 iResolution;
layout(location = 1) in Ray iRay;

layout(location = 0) out vec4 outColor;

float sdfSphere(vec3 p, float r) 
{
  return length(p) - r;
}

bool intersect(Ray ray)
{
  for(int i = 0; i < 100; i++) {
    float hit = sdfSphere((ray.pos.xyz + vec3(0.0, 0.0, -11.0)), 11.0);
    ray.pos += hit * ray.dir;
    if (hit < 0.001) {
      return true;
    }
  }
  return false;
}

void main() 
{
  bool result = intersect(iRay);
  if(result == false) {
    outColor = vec4(0.0, 0.0, 0.0, 1.0);
  } else {
    outColor = vec4(1.0, 0.0, 0.0, 1.0);
  }
}

My question is: How to I have to apply the projection matrix properly? And if it is already applied properly, why am I not able to set a different position/radius for the SDF sphere?

2
If you use Vulkan you should tag it that way instead of using the OpenGL Tag. - BDL

2 Answers

2
votes

Here is my code to compute a ray in world-space from fragment's coordinates. It uses a set of uniform variables that mimic the old fixed functionality pipeline (GLUP uniform variables) in the following code. The tricky part is to properly apply the viewport transform, and taking into account that some variables are in [-1,1] and other ones in [0,1] (made me bang my head on the wall).

struct Ray {
    vec3 O; // Origin
    vec3 V; // Direction vector
};

// Notes: GLUP.viewport = [x0,y0,width,height]
// clip-space coordinates are in [-1,1] (not [0,1]) !

// Computes the ray that passes through the current fragment
// The ray is in world space.
Ray glup_primary_ray() {
    vec4 near = vec4(
    2.0 * ( (gl_FragCoord.x - GLUP.viewport[0]) / GLUP.viewport[2] - 0.5),
    2.0 * ( (gl_FragCoord.y - GLUP.viewport[1]) / GLUP.viewport[3] - 0.5),
        0.0,
        1.0
    );
    near = GLUP.inverse_modelviewprojection_matrix * near ;
    vec4 far = near + GLUP.inverse_modelviewprojection_matrix[2] ;
    near.xyz /= near.w ;
    far.xyz /= far.w ;
    return Ray(near.xyz, far.xyz-near.xyz) ;
}

// Updates fragment depth from a point in world space
void glup_update_depth(in vec3 M_world_space) {
    vec4 M_clip_space = GLUP.modelviewprojection_matrix * vec4(M_world_space,1.0);
    float z = 0.5*(1.0 + M_clip_space.z/M_clip_space.w);
    glup_FragDepth = (1.0-z)*gl_DepthRange.near + z*gl_DepthRange.far;
}

An example fragment shader that draws raytraced spheres using glup_primary_ray():

in vec3 C; // center in world space;
in float r;

void main(void) {
  Ray R = glup_primary_ray();
    vec3 M,N;

    if(
    glupIsEnabled(GLUP_CLIPPING) &&
    GLUP.clipping_mode == GLUP_CLIP_SLICE_CELLS
    ) {
    N = GLUP.world_clip_plane.xyz;
    float w = GLUP.world_clip_plane.w;
    float t = -(w + dot(N,R.O)) / dot(N,R.V);
    M = R.O + t*R.V;
    if(dot(M-C,M-C) > r*r) {
        discard;
    }
    } else {
    vec3 D = R.O-C;    
    float a = dot(R.V,R.V);
    float b = 2.0*dot(R.V,D);
    float c = dot(D,D)-r*r;
    float delta = b*b-4.0*a*c;

    if(delta < 0.0) {
        discard;
    }
    float t = (-b-sqrt(delta))/(2.0*a);
    M = R.O + t*R.V;
    N = M-C;
    //insert here code to compute the shading with N

    //update the depth buffer
    glup_update_depth(M);
    }   
}

The complete code is available in my GEOGRAM library: http://alice.loria.fr/software/geogram/doc/html/index.html (src/lib/geogram_gfx/GLUP/shaders).

-1
votes

I spent several days for solving this problem, because I was required to get an exact soluton to use raymarching in VR with synchronized views for both eyes. The resulted worked solution is to invert normalized device coordinates, which appears in vertex shader, using Model View Projection matrix: Computing ray origin and direction from Model View Projection matrices for raymarching