I created a composite renderer that simply alpha-blends two textures, one on top of the other. The blending is just not working correctly.
Here is the code for the renderer, just rendering the background:
RenderFunction layer_render = [&]() {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(layer_shader_.get_program_id());
// ----- Fetch shader uniform locations
layer_shader_.SetUniform("tex", 0);
layer_shader_.SetUniform("create_alpha_mask", false);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBlendEquation(GL_FUNC_ADD);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, background_texture_id_);
layer_shader_.SetUniform("model", background_model_);
background_mesh_.Bind();
glDrawElements(GL_TRIANGLES, background_mesh_.GetIndicesCount(), GL_UNSIGNED_INT, (void*)0);
background_mesh_.Unbind();
};
layer_framebuffer_.RenderToTexture(layer_render);
The background texture i'm passing is a full rgba -> {1.0, 0.0, 0.0, 0.5}. What comes out is -> {0.5, 0.0, 0.0, 0.5}. The alpha blending is not properly accounting for the source alpha when computing the blending for some reason that i'm failing to see.
Some pseudo code of what i was expecting:
source_alpha = 0.5
dest_alpha = 0.0 * (1.0 - source_alpha) = 0.0
output_alpha = source_alpha + dest_alpha = 0.5
out_r = (source_r * source_alpha + dest_r * dest_alpha) / output_alpha = (1.0 * 0.5 + 0.0 * 0.0) / 0.5 = 1.0
out_g = (source_g * source_alpha + dest_g * dest_alpha) / output_alpha = (0.0 * 0.5 + 0.0 * 0.0) / 0.5 = 0.0
out_b = (source_b * source_alpha + dest_b * dest_alpha) / output_alpha = (0.0 * 0.5 + 0.0 * 0.0) / 0.5 = 0.0
out_a = output_alpha = 0.5