Search code examples
javascriptglslshaderwebgltwgl.js

Why does my vertex shader ignore Uint8Array data but not Float32Array data?


My vertex shader has the following attribute:

attribute float a_color;

I have an array buffer (WebAssembly's memory) which consists only of 0s and 1s and I create a view of it by creating a Uint8Array. However my vertex shader ignores it (no errors, but seems to treat everything as 0). I am using twgl. Here's my code:

Vertex shader:

attribute vec2 a_position;
attribute float a_color;
uniform vec2 u_resolution;
uniform float u_point_width;
varying vec4 v_color;

vec2 normalize_coords(vec2 position) {
   float x = position[0];
   float y = position[1];
   float resx = u_resolution[0];
   float resy = u_resolution[1];
   return vec2(2.0 * x / resx - 1.0, 2.0 * y / resy - 1.0);
}

void main() {
   gl_PointSize = u_point_width;
   vec2 coords = normalize_coords(a_position);
   gl_Position = vec4(coords * vec2(1, -1), 0, 1);
   v_color = vec4(0, a_color ,0,1);
}

Fragment shader:

precision highp float;

varying vec4 v_color;
void main() {
  gl_FragColor = v_color;
}

Javascript:

  const canvas = document.getElementById("c");

  const CELL_SIZE = 10;

  const gl = canvas.getContext("webgl");
  const programInfo = twgl.createProgramInfo(gl, [
    "vertex-shader",
    "fragment-shader"
  ]);

  twgl.setDefaults({ attribPrefix: "a_" });

  twgl.resizeCanvasToDisplaySize(gl.canvas, window.devicePixelRatio);
  gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

  const universe = Universe.new(
    Math.ceil(gl.canvas.width / CELL_SIZE / 2),
    Math.ceil(gl.canvas.height / CELL_SIZE / 2)
  );
  const width = universe.width();
  const height = universe.height();

  let points = [];

  for (let i = 0; i < width; i++) {
    for (let j = 0; j < height; j++) {
      points.push([i * CELL_SIZE * 2, j * CELL_SIZE * 2]);
    }
  }

  const cellsPtr = universe.cells();
  // Webassembly memory (only 0's and 1's)
  const cells = new Uint8Array(memory.buffer, cellsPtr, width * height);

  const arrays = {
    position: {
      data: new Float32Array(points.flat()),
      size: 2
    },
    color: {
      data: cells,
      size: 1,
      type: gl.UNSIGNED_BYTE
    }
  };

  const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);

  const uniforms = {
    u_resolution: [gl.canvas.width, gl.canvas.height],
    u_point_width: CELL_SIZE
  };

  gl.clearColor(0, 0, 0, 1);
  gl.clear(gl.COLOR_BUFFER_BIT);
  gl.useProgram(programInfo.program);

  twgl.setUniforms(programInfo, uniforms);

  twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);

  twgl.drawBufferInfo(gl, bufferInfo, gl.POINTS);

  function renderLoop(time) {
    twgl.resizeCanvasToDisplaySize(gl.canvas, window.devicePixelRatio);
    gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

    universe.tick();
    const cellsPtr = universe.cells();
    const cells = new Uint8Array(
      memory.buffer,
      cellsPtr,
      width * height
    );

    const uniforms = {
      u_resolution: [gl.canvas.width, gl.canvas.height],
      u_point_width: CELL_SIZE
    };

    gl.clearColor(0, 0, 0, 1);
    gl.clear(gl.COLOR_BUFFER_BIT);

    gl.useProgram(programInfo.program);
    twgl.setUniforms(programInfo, uniforms);

    twgl.setAttribInfoBufferFromArray(gl, bufferInfo.attribs.a_color, {
      data: cells,
      size: 1,
      type: gl.UNSIGNED_BYTE
    }); // Dynamically update buffer with new colors

    twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);

    twgl.drawBufferInfo(gl, bufferInfo, gl.POINTS);
    requestAnimationFrame(renderLoop);
  }

  requestAnimationFrame(renderLoop);
};

I have no problems if I manually convert cells to Float32Array. Am I doing something wrong?

Here's a live simplified version of the code above (there should be green points on the screen but there aren't):

https://codesandbox.io/s/silly-jackson-ut3wm


Solution

  • If you actually are using twgl then, in the same way that it guesses size = 3 for position, 4 for colors, 2 for texcoords, the issue that twgl is guessing to use normalize: true and you need to tell it normalize: false

    From the docs

    normalize boolean --- normalize for vertexAttribPointer. Default is true if type is Int8Array or Uint8Array otherwise false.

    So,

      const arrays = {
        position: {
          data: new Float32Array(points.flat()),
          size: 2
        },
        color: {
          data: cells,
          size: 1,
          type: gl.UNSIGNED_BYTE,
          normalize: false,       // added <------------------------
        }
      };
    

    BTW: if cells is a Uint8Array then you don't need type: gl.UNSIGNED_BYTE. twgl will set type to gl.UNSIGNED_BYTE based on the fact that the data is Uint8Array.