Search code examples
pythonglslcompute-shader

Compute shader won't write to texture


I'm porting an application from Windows to Ubuntu 20.04 and none of my compute shaders will write to a texture. I've reproduced the problem in the following program. I initialize the texture to zeros and after the shader runs the texture is still filled with zeros.

import numpy
import moderngl
import OpenGL.GL

compute_shader_source = '''
#version 450 core
#extension GL_ARB_uniform_buffer_object : enable

layout(local_size_x=1, local_size_y=1, local_size_z=1) in;

layout(r8ui) uniform uimage3D to_image;

void main() {
  imageStore(to_image, ivec3(gl_GlobalInvocationID.x, gl_GlobalInvocationID.y, gl_GlobalInvocationID.z), uvec4(255, 255, 255, 255));
  return;
}
'''.strip()

context = moderngl.create_context(require=450, standalone=True)

compute_program = OpenGL.GL.glCreateProgram()
shader = OpenGL.GL.glCreateShader(OpenGL.GL.GL_COMPUTE_SHADER)
OpenGL.GL.glShaderSource(shader, [compute_shader_source])
OpenGL.GL.glCompileShader(shader)
compiled = OpenGL.GL.glGetShaderiv(shader, OpenGL.GL.GL_COMPILE_STATUS, None)
if not compiled:
  log = OpenGL.GL.glGetShaderInfoLog(shader).decode()
  raise RuntimeError("Couldn't compile shader:\n" + log)

OpenGL.GL.glAttachShader(compute_program, shader)
OpenGL.GL.glLinkProgram(compute_program)

linked = OpenGL.GL.glGetProgramiv(compute_program, OpenGL.GL.GL_LINK_STATUS, None)
if not linked:
  log = OpenGL.GL.glGetProgramInfoLog(compute_program).decode()
  raise RuntimeError("Couldn't link shader:\n" + log)
OpenGL.GL.glUseProgram(compute_program)

OpenGL.GL.glActiveTexture(OpenGL.GL.GL_TEXTURE0)
to_image_location = OpenGL.GL.glGetUniformLocation(compute_program, 'to_image')
OpenGL.GL.glUniform1i(to_image_location, 0)

textures = OpenGL.GL.glGenTextures(1)
zeros = numpy.zeros((32, 32, 32), dtype=numpy.uint8)
OpenGL.GL.glBindTexture(OpenGL.GL.GL_TEXTURE_3D, textures)
OpenGL.GL.glTexImage3D(OpenGL.GL.GL_TEXTURE_3D, 0, OpenGL.GL.GL_RED,
                           32, 32, 32, 0,
                           OpenGL.GL.GL_RED, OpenGL.GL.GL_UNSIGNED_BYTE, zeros)
OpenGL.GL.glTexParameteri(OpenGL.GL.GL_TEXTURE_3D, OpenGL.GL.GL_TEXTURE_MIN_FILTER, OpenGL.GL.GL_NEAREST)
OpenGL.GL.glTexParameteri(OpenGL.GL.GL_TEXTURE_3D, OpenGL.GL.GL_TEXTURE_MAG_FILTER, OpenGL.GL.GL_NEAREST)
OpenGL.GL.glBindTexture(OpenGL.GL.GL_TEXTURE_3D, 0)

OpenGL.GL.glBindImageTexture(0, textures, 0, True, 0, OpenGL.GL.GL_READ_WRITE, OpenGL.GL.GL_R8UI)


OpenGL.GL.glDispatchCompute(32, 32, 32)
OpenGL.GL.glFinish()

OpenGL.GL.glBindTexture(OpenGL.GL.GL_TEXTURE_3D, textures)
data = OpenGL.GL.glGetTexImage(OpenGL.GL.GL_TEXTURE_3D, 0, OpenGL.GL.GL_RED, OpenGL.GL.GL_UNSIGNED_BYTE)
print('data', sum(data))

Solution

  • My question is pretty similar to this one: OpenGL Compute Shader - glDispatchCompue() does not run. However, changing the internalformat argument in glTexImage3D to GL_R8UI caused an invalid operation error. But just now I tried changing internalformat to GL_R8UI and format to GL_RED_INTEGER and it works now.