GLuint buffer;
glCreateBuffers(1, &buffer);
glNamedBufferStorage(buffer, 1024, NULL, GL_MAP_WRITE_BIT | GL_MAP_READ_BIT | GL_DYNAMIC_STORAGE_BIT);
unsigned int x = 0x01010101;
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glClearBufferData(GL_ARRAY_BUFFER, GL_R32UI, GL_RED, GL_UNSIGNED_INT, &x);
int* p = (int*)glMapBuffer(GL_ARRAY_BUFFER, GL_READ_WRITE);
for (int i = 0; i < 4; ++i)
cout << hex << p[i] << " ";
glUnmapBuffer(GL_ARRAY_BUFFER);
However, the output is '1010102' rather than '1010101'.
glClearBufferData(GL_ARRAY_BUFFER, GL_R32UI, GL_RED, GL_UNSIGNED_INT, &x);
The proper format parameter should be GL_RED_INTEGER
, not GL_RED
.