I'm trying to implement diffuse lighting with OpenGL. I've debugged the fragment shader for a cube and found that the normal vector is always (0,0,0), even though I specify normals in my vertex data and enable the vertex attribute.
Relevant OpenGL code:
GLfloat vertices[] = {
-0.5f, -0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
0.5f, -0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
0.5f, 0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
0.5f, 0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 0.0f, -1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, -1.0f, 0.0f, 0.0f,
-0.5f, 0.5f, -0.5f, -1.0f, 0.0f, 0.0f,
-0.5f, -0.5f, -0.5f, -1.0f, 0.0f, 0.0f,
-0.5f, -0.5f, -0.5f, -1.0f, 0.0f, 0.0f,
-0.5f, -0.5f, 0.5f, -1.0f, 0.0f, 0.0f,
-0.5f, 0.5f, 0.5f, -1.0f, 0.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f, 0.0f,
0.5f, 0.5f, -0.5f, 1.0f, 0.0f, 0.0f,
0.5f, -0.5f, -0.5f, 1.0f, 0.0f, 0.0f,
0.5f, -0.5f, -0.5f, 1.0f, 0.0f, 0.0f,
0.5f, -0.5f, 0.5f, 1.0f, 0.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f, 0.0f,
-0.5f, -0.5f, -0.5f, 0.0f, -1.0f, 0.0f,
0.5f, -0.5f, -0.5f, 0.0f, -1.0f, 0.0f,
0.5f, -0.5f, 0.5f, 0.0f, -1.0f, 0.0f,
0.5f, -0.5f, 0.5f, 0.0f, -1.0f, 0.0f,
-0.5f, -0.5f, 0.5f, 0.0f, -1.0f, 0.0f,
-0.5f, -0.5f, -0.5f, 0.0f, -1.0f, 0.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
0.5f, 0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
0.5f, 0.5f, 0.5f, 0.0f, 1.0f, 0.0f,
0.5f, 0.5f, 0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 1.0f, 0.0f
};
GLuint VBO, boxVAO;
glGenVertexArrays(1, &boxVAO);
glGenBuffers(1, &VBO);
glBindVertexArray(boxVAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
out vec3 FragPos;
out vec3 outNormal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 proj;
void main()
{
gl_Position = proj * view * model * vec4(position, 1.0);
FragPos = vec3(model * vec4(position, 1.0f));
outNormal = vec3(normal);
}
Fragment Shader:
#version 330 core
in vec3 FragPos;
in vec3 outNormal;
out vec4 color;
uniform vec3 objectColor;
uniform vec3 lightColor;
uniform vec3 lightPos;
void main()
{
float ambientIntensity = 0.5f;
vec3 ambientColor = ambientIntensity * lightColor;
vec3 norm = normalize(outNormal);
vec3 lightDir = normalize(lightPos - FragPos);
float diffuse = max(dot(norm, lightDir), 0.0);
vec3 diffuseColor = diffuse * lightColor;
vec3 resultColor = (ambientColor + diffuseColor) * objectColor;
color = vec4(resultColor, 1.0f);
}
Output: (only shows ambient lighting, no diffuse)
Also, not sure if this helps at all, but doing something weird like setting the normal as the fragments position yields this result:
So it seems like the the normals should be producing diffuse light, but are somehow not getting loaded into the shader correctly. Any ideas?
This is the code I used to debug the shader:
// debug testing
vec3 test = vec3(outNormal.xyz);
bvec3 ln = lessThan(test, vec3(0,0,0));
if (ln[2]){
color = vec4(1.0, 0.0, 0.0, 1.0);
}else{
color = vec4(0.0, 1.0, 0.0, 1.0);
}
Oops! Turns out when I created the light VAO, I used the same vertices but only used the first three values of each vertex for position (which is correct for the light). But I accidentally used that VAO for drawing the cube instead of the VAO shown in the code above. Can't believe I did not catch that.. Anyway, the code here is correct.
Peace.