Search code examples
image-processingpngwebgl16-bitwebgl2

How can I fix the display of this 16-bit RGBA PNG using WebGL2?


I am trying to work with 16-bit per channel RGBA data (and later RGB data) in WebGL2. I am having trouble properly displaying one of the reference images from PngSuite and I'd be eternally grateful if someone could take a look.

I am loading a 3x16 bits rgb color + 16 bit alpha-channel PNG file using pngtoy.js or UPNG.js (both give the same values which I believe are correct). Here is what I am seeing:

enter image description here

My WebGL2 code was based on gman's past answers which have been incredibly helpful. I don't know where to focus to investigate where I went wrong. I have spent an entire day looking at this so any advice or pointers where to look is greatly appreciated!!!

https://jsfiddle.net/mortac8/yq2tfe97/13/
(apologies for the messy jsfiddle with inline resources at the top)

// https://stackoverflow.com/a/57704283/1469613
function addWebgl(canvas, gl, img, w, h) {
    var program = gl.createProgram();

    // texture
    var tex = gl.createTexture(); // create empty texture
    gl.bindTexture(gl.TEXTURE_2D, tex);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
    gl.texImage2D(
        gl.TEXTURE_2D, // target
        0, // mip level
        gl.RGBA16UI, // internal format -> gl.RGBA16UI
        w, h, // width and height
        0, // border
        gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
        gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
        img // texture data
    );

    // buffer
    var buffer = gl.createBuffer();
    var bufferData =  new Float32Array([
        -1, -1,
         1, -1,
         1,  1,               
         1,  1,
        -1,  1,
        -1, -1
    ]);
    gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
    gl.bufferData(gl.ARRAY_BUFFER, bufferData, gl.STATIC_DRAW);

    // shaders
    program.vs = gl.createShader(gl.VERTEX_SHADER);
    gl.shaderSource(program.vs, `#version 300 es
    in vec4 vertex; // incoming pixel input?
    out vec2 pixelCoordinate; // variable used to pass position to fragment shader
    void main(){
        gl_Position = vertex;  // set pixel output position to incoming position (pass through)
        pixelCoordinate = vertex.xy*0.5+0.5; // set coordinate for fragment shader
        pixelCoordinate.y = 1.0 - pixelCoordinate.y; //flip
    }
    `);

    program.fs = gl.createShader(gl.FRAGMENT_SHADER);
    gl.shaderSource(program.fs, `#version 300 es
    precision highp float; // ?
    uniform highp usampler2D tex; // ?
    in vec2 pixelCoordinate; // receive pixel position from vertex shader
    out vec4 fooColor;
    void main() {
        uvec4 unsignedIntValues = texture(tex, pixelCoordinate);
        vec4 floatValues0To65535 = vec4(unsignedIntValues);
        vec4 colorValues0To1 = floatValues0To65535 / 65535.0;
        fooColor = colorValues0To1;
    }
    `);

    gl.compileShader(program.vs);
    checkCompileError(program.vs);
    gl.compileShader(program.fs);
    checkCompileError(program.fs);

    function checkCompileError(s) {
    if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
    console.error(gl.getShaderInfoLog(s));
    }
    }

    gl.attachShader(program,program.vs);
    gl.attachShader(program,program.fs);

    gl.deleteShader(program.vs);
    gl.deleteShader(program.fs);

    // program
    gl.bindAttribLocation(program, 0, "vertex");
    gl.linkProgram(program);
    gl.useProgram(program);
    gl.enableVertexAttribArray(0);
    gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
    gl.clear(gl.COLOR_BUFFER_BIT);
    gl.drawArrays(gl.TRIANGLES, 0, 6); // execute program
}

Solution

  • Per default the webgl context uses premultiplied alpha, disabling it fixes your issue.

    var myCtx = myCv.getContext('webgl2', { premultipliedAlpha: false });