I suspect this is a misunderstanding on my part of the implications of binding, but I'm at a loss for what I'm doing wrong here. I've put relevant code at the end of the post.
I am working on implementing a numerical algorithm on the GPU. Specifically, I am following this example from Nvidia. In particular, part of this algorithm uses an iterative technique in which the output of one iteration is used as the input to the next. I'm specifically looking at Section 38.3.1 when making decisions here.
Now, a lot of this is learning WebGL/OpenGL/GLSL for me, so I'm not diving straight in. At this point I'm simply trying to directly simulate one "iteration" by rendering an image in one shader (just a uniform color) to a framebuffer, copying that output to another texture, and passing that second texture as a uniform input to the second shader. The second shader should then add a gaussian spot to the image and again render to the same framebuffer as the first (Overwriting the original). The output of this second shader is then again copied to the secondary texture, which is passed to a final shader as a uniform input which renders everything to the screen.
My Problem: If I run only the first or second shader, I see the expected output from just that stage on screen. This tells me that the shaders are running and I'm copying things as I expect (Since the final render to screen can see the copied texture). However, if I run both steps I only get the output of the second shader alone on screen (As if the input from the first was just 0), which tells me that for some reason the uniform input in the second shader isn't getting what I expect from the first.
All fragment shaders use the same vertex shader, which just takes in the boundaries of the solution domain and passes out a varying vec2 of the position.
The relevant code is below. For the sake of brevity I haven't included the shader setup, but basically I create the programs and then get the uniform/attribute positions and store them in the similarly-named properties, and activate the attributes. I also check that the programs successfully linked.
As a final note: I see no warnings or errors in the console while this runs.
Framebuffer setup:
function initFramebuffers() {
console.log("Creating framebuffer for flow field");
flowFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, flowFramebuffer);
frontTex = gl.createTexture();
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, frontTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, nx, ny, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frontTex, 0);
// Start by zeroing out the flow field
gl.clearColor(0.0,0.0,0.0,0.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Now create the texture for the flow field at the last step
backTex = gl.createTexture();
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, backTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, nx, ny, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, nx, ny, 0); // Copy the blank flow field in to our back texture
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
Stage 1:
function stage1() {
console.log("Stage 1...");
gl.useProgram(1Program);
gl.bindFramebuffer(gl.FRAMEBUFFER,flowFramebuffer);
gl.activeTexture(gl.TEXTURE1); // We're going to draw this result to texture unit 1
gl.viewport(0,0, nx, ny);
gl.bindBuffer(gl.ARRAY_BUFFER, solutionGrid);
gl.vertexAttribPointer(stage1program.vertexPositionAttribute, solutionGrid.itemSize, gl.FLOAT, false, 0, 0);
gl.uniform1i(stage1program.flowVelUniform, 2); // Use the texture in unit2 as the flowField uniform (unit1 is our current/updating texture)
gl.uniform1i(stage1program.inFieldUniform, 2); // We're self-advecting, so use the same texture here.
gl.uniform1f(stage1program.dtUniform, 1.0/60.0); // 60 frames per second
gl.drawArrays(gl.TRIANGLES, 0, solutionGrid.numItems); // Solve
gl.activeTexture(gl.TEXTURE2); // Now copy the framebuffer in to texture2
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, nx, ny, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER,null);
}
Shader for stage1:
precision mediump float;
uniform float dt;
uniform sampler2D toAdvect; // Input dye field, varies 0 (no dye) to 1 (filled with dye)
uniform sampler2D flowField; // Flow field from the flow calculation step
varying vec2 vertexOut; // The location of the vertex [-1,1]
vec2 simToTextureSpace(vec2 vertex) {
return 0.5*(vertex+1.0);
}
void main() {
gl_FragColor = vec4(0.0,0.4,0.6,1.0);
}
Stage 2:
function stage2() {
gl.useProgram(stage2program);
gl.bindFramebuffer(gl.FRAMEBUFFER,flowFramebuffer);
gl.activeTexture(gl.TEXTURE1); // We're going to draw this result to texture unit 1
gl.viewport(0,0, nx, ny);
gl.bindBuffer(gl.ARRAY_BUFFER, solutionGrid);
gl.vertexAttribPointer(stage2program.vertexPositionAttribute, solutionGrid.itemSize, gl.FLOAT, false, 0, 0);
gl.uniform1i(stage2program.flowVelUniform, 2); // Use the texture in unit2 as the flowField uniform (unit1 is our current/updating texture)
gl.uniform1f(stage2program.dtUniform, 1.0/60.0); // 60 frames per second
gl.drawArrays(gl.TRIANGLES, 0, solutionGrid.numItems); // Solve
gl.activeTexture(gl.TEXTURE2); // Now copy out
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, nx, ny, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER,null);
}
Stage 2 Shader:
precision mediump float;
uniform float dt;
uniform sampler2D flowField; // Flow field from the flow calculation step
varying vec2 vertexOut; // The location of the vertex [-1,1]
vec2 simToTextureSpace(vec2 vertex) {
return 0.5*(vertex+1.0);
}
void main() {
vec2 texPosition = simToTextureSpace(vertexOut);
vec4 last = texture2D(flowField, texPosition);
float radius2 = pow(vertexOut.x,2.0)+pow(vertexOut.y,2.0);
gl_FragColor = last+vec4(60.0*dt*exp(-radius2/0.001),0.0,0.0,1.0);
}
Render to Screen:
function drawScene() {
// console.log("Drawing scene");
gl.useProgram(visualProgram);
gl.activeTexture(gl.TEXTURE0);
gl.viewport(0,0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0,0.0,1.0,1.0); // Clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.bindBuffer(gl.ARRAY_BUFFER, solutionGrid); // Just reuse the coordinates from the fluid position buffer
gl.vertexAttribPointer(visualProgram.vertexPositionAttribute, solutionGrid.itemSize, gl.FLOAT, false, 0, 0);
gl.uniform1i(visualProgram.inFieldUniform, 2);
gl.drawArrays(gl.TRIANGLES, 0, solutionGrid.numItems);
}
Render to Screen Shader:
precision mediump float;
uniform sampler2D inField; // field to draw
varying vec2 vertexOut; // The location of the vertex [-1,1]
vec2 simToTextureSpace(vec2 vertex) {
return 0.5*(vertex+1.0);
}
void main() {
vec2 texPosition = simToTextureSpace(vertexOut);
vec4 drawField = texture2D(inField,texPosition);
gl_FragColor = drawField;
}
Sequence of calls:
// Setup shaders, initFramebuffers(), etc
//console.log("Drawing...");
stage1();
stage2();
drawScene();
gl.activeTexture(gl.TEXTURE1); // We're going to draw this result to texture unit 1
in stage1 and stage2 are not needed – gman