Search code examples
compiler-errorsmacrosglslshaderwebgl2

GLSL WebGL2 Error Multi-line Macros Safari 12 and ios12, WebGL: INVALID_VALUE: shaderSource: string not ASCII


I'm attempting to compile a WebGL2 300 es shader that uses multi-line preprocessor macros, such as this toy example:

#define FOO() \
    do { } while (false);

the same goes for

#define FOO \
    do { } while (false);

or

    #define FOO() \
        do { } while (false); \

This works on the latest version of Chrome and Firefox, but Safari returns the following error when I call gl.shaderSource:

WebGL: INVALID_VALUE: shaderSource: string not ASCII

I used the following loop to check whether any of the character codes in the input source string have values greater than 127:

for (char c = 0; c < src.length; c += 1) {
    if (src.charCodeAt(c) > 127) {
       console.error(src.charAt(c), src.charCodeAt(c));
    }
}

The loop prints no errors. Also, I see no extraneous hidden characters when I enable invisible characters in my text editor.

Is Safari's GLSL compiler simply unable to process the backslash character?

Here is a minimal example fragment shader that fails due to the backslash:

#version 300 es
precision highp float;

out vec4 fragColor;

    #define FOO() \
        true

void main() {
    fragColor = vec4(1.0);
}

Solution

  • Safari shipped WebGL2 in Safari 15, released in September 2021

    Safari appears to be finally getting WebGL2. At least as of Sept 2020 Safari Technology Preview 113 support WebGL2.

    Safari does not support WebGL2 as of October 2019. It's an experimental feature on desktop Safari only and it doesn't pass even 20% of the WebGL2 conformance tests. See the source. Search for "NOT IMPLEMENTED" and you'll see over 80 WebGL2 API functions are not implemented as just one example of how much work is not done.

    void WebGL2RenderingContext::uniform1ui(WebGLUniformLocation*, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform1ui()");
    }
    
    void WebGL2RenderingContext::uniform2ui(WebGLUniformLocation*, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform2ui()");
    }
    
    void WebGL2RenderingContext::uniform3ui(WebGLUniformLocation*, GC3Duint, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform3ui()");
    }
    
    void WebGL2RenderingContext::uniform4ui(WebGLUniformLocation*, GC3Duint, GC3Duint, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform4ui()");
    }
    
    void WebGL2RenderingContext::uniform1uiv(WebGLUniformLocation*, Uint32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform1uiv()");
    }
    
    void WebGL2RenderingContext::uniform2uiv(WebGLUniformLocation*, Uint32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform2uiv()");
    }
    
    void WebGL2RenderingContext::uniform3uiv(WebGLUniformLocation*, Uint32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform3uiv()");
    }
    
    void WebGL2RenderingContext::uniform4uiv(WebGLUniformLocation*, Uint32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniform4uiv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix2x3fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix2x3fv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix3x2fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix3x2fv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix2x4fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix2x4fv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix4x2fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix4x2fv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix3x4fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix3x4fv()");
    }
    
    void WebGL2RenderingContext::uniformMatrix4x3fv(WebGLUniformLocation*, GC3Dboolean, Float32List&&, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] uniformMatrix4x3fv()");
    }
    
    void WebGL2RenderingContext::vertexAttribI4i(GC3Duint, GC3Dint, GC3Dint, GC3Dint, GC3Dint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] vertexAttribI4i()");
    }
    
    void WebGL2RenderingContext::vertexAttribI4iv(GC3Duint, Int32List&&)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] vertexAttribI4iv()");
    }
    
    void WebGL2RenderingContext::vertexAttribI4ui(GC3Duint, GC3Duint, GC3Duint, GC3Duint, GC3Duint)
    {
        LOG(WebGL, "[[ NOT IMPLEMENTED ]] vertexAttribI4ui()");
    }
    

    Otherwise in WebGL1 it's working for me. Tested on an 2014 Macbook Pro with MacOS 10.14.6 and an iPhoneX with iOS 13.1

    const fs = `
    precision highp float;
    
        #define FOO() \
            true
    
    void main() {
        gl_FragColor = vec4(1.0);
    }
    `;
    
    const gl = document.createElement('canvas').getContext('webgl');
    const sh = gl.createShader(gl.FRAGMENT_SHADER);
    gl.shaderSource(sh, fs);
    gl.compileShader(sh);
    const success = gl.getShaderParameter(sh, gl.COMPILE_STATUS);
    if (success) {
      console.log('pass: shader compiled successfully');
    } else {
      console.error('fail: shader failed to compile');
    }

    Suggest your test check for >= 32 and <= 127 or use a hex editor or hexdump to look at your file.

    As for #define itself there really isn't much point in #define in JavaScript as JavaScript, unlike C/C++ has pretty good string manipulation.

    const subs = {
      width: 100,
      height: 200,
    }
    
    const fs = `
    void main() {
      vec2 thingSize = vec2(${subs.width}, ${subs.height});
    }
    `;
    
    console.log(fs);

    or even

    function preprocess(s, subs) {
      const re = new RegExp(Object.keys(subs).join('|'), 'g');
      return s.replace(re, m => subs[m] || '');
    }
    
    const subs = { WIDTH: 123, HEIGHT: 456 };
    const fs = preprocess(`
    void main() {
      vec2 thingSize = vec2(WIDTH, HEIGHT);
    }
    `, subs);
    
    console.log(fs);

    Not saying you shouldn't use #define just pointing out there are lots of easy ways to manipulate strings in JavaScript and so the GLSL preprocessor isn't quite as useful as it is in C (OpenGL is a C based API)