Search code examples
c++openglglslglm-math

Issue with passing integer vertex attributes with "in" keyword


I'm working on bone animation. I have a vertex struct that basically looks like

struct MeshVertex
{
    glm::vec3 pos;
    glm::vec3 normal;
    glm::vec2 tex;
    glm::vec3 tangent;
    glm::vec3 bitangent;
    uint32_t ids[4] = {};
    float weights[4] = {};

    void print() const;
};

The mesh is a basic cube with one bone. Therefore ids = {0,0,0,0} and weights = {1.0f,0.0f,0.0f,0.0f} for every single vertex. In my mesh class I have a static function Mesh::genFormat() that handles attributes. vao is a static int in the mesh class and for_i is just a convenient macro I use to do for loops. Note that I correctly use glVertexArrayAttribIFormat.

Mesh::Mesh(const std::vector<MeshVertex>& vertices, const std::vector<uint>& indices, const std::vector<Texture>& textures)
{
    m_textures = textures;
    m_num_indices = indices.size();
        
    // create vertex and index buffers
    glCreateBuffers(1, &m_vbo);
    glCreateBuffers(1, &m_ibo);
    glNamedBufferData(m_vbo, sizeof(MeshVertex) * vertices.size(), &vertices[0], GL_STATIC_DRAW);
    glNamedBufferData(m_ibo, sizeof(uint) * indices.size(), &indices[0], GL_STATIC_DRAW);
}

void Mesh::genFormat()
{
    glCreateVertexArrays(1, &vao);
    for_i(7) { glEnableVertexArrayAttrib(vao, i); }
    glVertexArrayAttribFormat(vao, 0, 3, GL_FLOAT, false, offsetof(MeshVertex, pos)));
    glVertexArrayAttribFormat(vao, 1, 3, GL_FLOAT, false, offsetof(MeshVertex, normal));
    glVertexArrayAttribFormat(vao, 2, 2, GL_FLOAT, false, offsetof(MeshVertex, tex));
    glVertexArrayAttribFormat(vao, 3, 3, GL_FLOAT, false, offsetof(MeshVertex, tangent));
    glVertexArrayAttribFormat(vao, 4, 3, GL_FLOAT, false, offsetof(MeshVertex, bitangent));
        
    glVertexArrayAttribIFormat(vao, 5, 4, GL_UNSIGNED_INT, offsetof(MeshVertex, ids)));
    glVertexArrayAttribFormat(vao, 6, 4, GL_FLOAT, false, offsetof(MeshVertex, weights)));

    for_i(7) { glVertexArrayAttribBinding(vao, i, 0); }

    glBindVertexArray(0);
}

The following GLSL won't render anything.

#version 460 core

layout(location = 0) in vec3 Pos;
layout(location = 1) in vec3 Normal;
layout(location = 2) in vec2 Tex;
layout(location = 3) in vec3 Tan;
layout(location = 4) in vec3 BiTan;
layout(location = 5) in uvec4 BoneIds;
layout(location = 6) in vec4 Weights;

out vec3 normal;
out vec2 tex;

layout(binding = 2, std140) uniform Camera
{
    mat4 VP;
    vec4 cpos;
};

uniform mat4 node;
uniform mat4 bones_inverse_bind_mesh_parent[50];

void main()
{
    tex = Tex;

    mat4 W = mat4(0.0f);
    if (Weights[0] != 0.0f)
    {
        for (uint i = 0; i < 4; i++)
            W = W + (Weights[i] * bones_inverse_bind_mesh_parent[BoneIds[i]]);
            
        W = node * W;
    }
    else
        W = node;
    
    gl_Position = VP * W * vec4(Pos, 1.0);
}

Since BoneIds[i] is always zero, if I replace

W = W + (Weights[i] * bones_inverse_bind_mesh_parent[BoneIds[i]]);

with

W = W + (Weights[i] * bones_inverse_bind_mesh_parent[0]);

the result should be unchanged. My matrix transforms are currently a bit off (something to fix later), but now the cube renders fine. So there is something wrong with BoneIds. After bashing my head against the wall on this for a while, I instead replaced

layout(location = 5) in uvec4 BoneIds;

with

layout(location = 5) varying uvec4 BoneIds;

after seeing some old GLSL online, and now everything works. What I don't understand is why. I've seen plenty of GLSL code on the internet work with integer attributes using the in keyword.

UPDATE :

If I replace glVertexArrayAttribIFormat in Mesh::genFormat() with

glVertexArrayAttribFormat(vao, 5, 4, GL_UNSIGNED_INT, false, offsetof(MeshVertex, ids));

in C++ and

layout(location = 5) in vec4 BoneIds;

in GLSL and cast bone ids from float to int in the glsl code, the code also works.


Solution

  • Okay I solved the issue, even though I don't quite understand how this fixes the problem. My preferred graphics processor was on auto but when I forced it to use the NVIDIA processor over my integrated graphics, everything works out fine. image of solution

    Update :

    I think it is as simple as my Intel processor graphics supporting OpenGL 4.4 and glVertexArrayAttribIFormat came about in OpenGL 4.5.