I am using com.jogamp.opengl.GL2
to render a 3d-texture of shorts offscreen then resolve the results through glReadPixels
. When my 3d-texture values are all positive, the results are what I expect. But when I have negative values I cannot figure out how to configure opengl to give correct results. I have tried glPixelStorei(GL2.GL_PACK_SWAP_BYTES, 1);
and glPixelStorei(GL2.GL_UNPACK_SWAP_BYTES, 1);
. I have tried all 4 combinations of GL_PACK/UNPACK_SWAP_BYTES and all results are incorrect.
The data spans from -1024 to +1024 (or around that range), so as an alternative, I normalize and denormalize after (+4096 then -4096 for good measure). That gives me the correct results, but it feels really hacky. Is there a correct way to render and resolve signed 16-bit through java?
Here is the basic code:
private int upload3DTexture(GL2 gl2, ShortBuffer data)
{
int texName[] = new int[1];
gl2.glPixelStorei(GL2.GL_PACK_ALIGNMENT, 2);
gl2.glPixelStorei(GL2.GL_UNPACK_ALIGNMENT, 2);
gl2.glPixelStorei(GL2.GL_PACK_SWAP_BYTES, 1);
gl2.glPixelStorei(GL2.GL_UNPACK_SWAP_BYTES, 1);
gl2.glGenTextures(1, texName, 0);
gl2.glEnable(GL2.GL_TEXTURE_3D);
gl2.glBindTexture(GL2.GL_TEXTURE_3D, texName[0]);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_S, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_T, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_R, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl2.glTexEnvi(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_ENV_MODE, GL2.GL_MODULATE);
((java.nio.Buffer)data).rewind();
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2.GL_R16, nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);
return texName[0];
}
private int upload3DTexture(GL2 gl2, ShortBuffer data)
{
int texName[] = new int[1];
gl2.glPixelStorei(GL2.GL_PACK_ALIGNMENT, 2);
gl2.glPixelStorei(GL2.GL_UNPACK_ALIGNMENT, 2);
gl2.glPixelStorei(GL2.GL_PACK_SWAP_BYTES, 1);
gl2.glPixelStorei(GL2.GL_UNPACK_SWAP_BYTES, 1);
gl2.glGenTextures(1, texName, 0);
gl2.glEnable(GL2.GL_TEXTURE_3D);
gl2.glBindTexture(GL2.GL_TEXTURE_3D, texName[0]);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_S, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_T, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_WRAP_R, GL2.GL_CLAMP_TO_BORDER);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
gl2.glTexParameteri(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl2.glTexEnvi(GL2.GL_TEXTURE_3D, GL2.GL_TEXTURE_ENV_MODE, GL2.GL_MODULATE);
((java.nio.Buffer)data).rewind();
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2.GL_R16, nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);
return texName[0];
}
private short[] renderAndResolve() {
short[] volume= new short[64];
short index = 0;
for (int z = 0; z < 4; z++) {
for (int y = 0; y < 4; y++) {
for (int x = 0; x < 4; x++) {
volume[index] = (short)(-100*z + y);
index++;
}
}
}
ShortBuffer shortBuffer = ShortBuffer.wrap(volume);
texID = upload3DTexture(gl2, shortBuffer);
display();
ShortBuffer resultBuffer = ShortBuffer.allocate(nSlices * nRows);
resolve(resultBuffer, 4, 4);
return resolve.array();
}
private void resolveDisplay(ShortBuffer slice, int w, int h)
{
GL2 gl2 = drawable.getGL().getGL2(); // The object that contains all the OpenGL methods.
fbo.bind(gl2);
gl2.glReadBuffer(GL2.GL_COLOR_ATTACHMENT0);
((java.nio.Buffer)slice).rewind();
int xd = (frameWidth - w)/2;
int yd = (frameHeight - h)/2;
gl2.glReadPixels(xd, yd, w, h, GL2.GL_RED, GL2.GL_SHORT, slice);
fbo.unbind(gl2);
((java.nio.Buffer)slice).rewind();
}
the issue is caused by the internal format of the texture.
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2.GL_R16, nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);
The internal format GL_R16
for the data store of a texture image is not a singed format, but it is an unsigned integral 16 bit format.
I don't know which OpenGL version you are using.
Desktop OpenGL provides the internal data format GL_R16_SNORM
which is a 16 bit signed integral data format - see glTexImage3D
.
GL_R16_SNORM
is implemented in the interface GL2GL3
:
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2GL3.GL_R16_SNORM,
nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);
OpenGL ES (3.0) provides an singed integral 8 bit format GL_R8_SNORM
.
GL_R8_SNORM
is implemented interface GL2ES3
:
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2ES3.GL_R8_SNORM,
nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);
As an alternative both, desktop OpenGL and OpenGL ES, provide a 16 floating point format:
e.g.
gl2.glTexImage3D(GL2.GL_TEXTURE_3D, 0, GL2.GL_R16F,
nCols, nRows, nSlices, 0, GL2.GL_RED, GL.GL_SHORT, data);