I'm using WebGL 2. I set up my projection matrix as follows:
function ProjMatrix(width, height, near, far, fov)
{
var ar = width / height;
var fac = 1.0 / Math.tan(fov/2.0);
return [
fac/ar, 0.0, 0.0, 0.0,
0.0, fac, 0.0, 0.0,
0.0, 0.0, -(far+near)/(far-near), -1.0,
0.0, 0.0, -(2*far*near) / (far-near), 0.0
];
}
I initialise it and pass it to the shader as follows:
// set up the matrices
var matProj = ProjMatrix(canvas.width * 1.0, canvas.height * 1.0, 0.1, 100.0, 60.0*Math.PI/180.0);
dumpMatrix(matProj);
gl.uniformMatrix4fv(program.uProj, false, new Float32Array(matProj));
gl.uniformMatrix4fv(program.uView, false, new Float32Array(IdentityMatrix()));
gl.uniformMatrix4fv(program.uModel, false, new Float32Array(IdentityMatrix()));
The model is as follows:
var modelData = [
-1.0, -1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0,
1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0,
0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0,
];
Where the first 4 numbers on each row are the position, and the other 4 are the color.
When using this model and this matrix, nothing at all is drawn. HOWEVER, if I use an identity matrix, and the coordinates are z=0 and x and y between 0.5 and -0.5 instead, it DOES get drawn, so the problem must be with the matrix.
Can anyone suggest what could be wrong here?

gl_Position = matProj * matView * matModel * attrVertex;- user2894959