Search code examples
webglframebuffer

Rendering a fullscreen quad using WebGL


I have a framebuffer to which I rendered my scene and now I want to render this to a "fullscreen" quad. How can I set my camera and what should I put in my vertex shader in order to render the framebuffer's texture to the whole screen.

I've tried creating a fullscreen quad like this

var gl = this.gl;
var quad_vertex_buffer = gl.createBuffer();
var quad_vertex_buffer_data = new Float32Array([ 
    -1.0, -1.0, 0.0,
     1.0, -1.0, 0.0,
    -1.0,  1.0, 0.0,
    -1.0,  1.0, 0.0,
     1.0, -1.0, 0.0,
     1.0,  1.0, 0.0]);
gl.bufferData(quad_vertex_buffer, quad_vertex_buffer_data, gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
gl.vertexAttribPointer(this.shaderProgram.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
//gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
gl.drawArrays(gl.TRIANGLES,0, 6);

but it still renders everything black. Any ideeas or examples/tutorials I can follow?


Solution

  • It is really not a big deal, once you get how to use Vertex Buffers and shaders. Then you can easily write an utility function to do it. Here is one I normally use, if you are looking for a reference:

    drawFullScreenQuad : function(shaderProgram) {
    
        if (!shaderProgram)
        {
            utils.warning("Missing the shader program!");
            return;
        }
    
        // Only created once
        if (this.screenQuadVBO == null)
        {
            var verts = [
                // First triangle:
                 1.0,  1.0,
                -1.0,  1.0,
                -1.0, -1.0,
                // Second triangle:
                -1.0, -1.0,
                 1.0, -1.0,
                 1.0,  1.0
            ];
            this.screenQuadVBO = this.gl.createBuffer();
            this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.screenQuadVBO);
            this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(verts), this.gl.STATIC_DRAW);
        }
    
        // Bind:
        this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.screenQuadVBO);
        this.gl.enableVertexAttribArray(shaderProgram.vertexAttributes.vertexPositionNDC);
        this.gl.vertexAttribPointer(shaderProgram.vertexAttributes.vertexPositionNDC, 2, this.gl.FLOAT, false, 0, 0);
    
        // Draw 6 vertexes => 2 triangles:
        this.gl.drawArrays(this.gl.TRIANGLES, 0, 6);
    
        // Cleanup:
        this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
    },
    

    Then you can go fancy like I did and compute the texture coordinates on-the-fly in the vertex shader:

    Vertex Shader:

    precision lowp float;
    
    // xy = vertex position in normalized device coordinates ([-1,+1] range).
    attribute vec2 vertexPositionNDC;
    
    varying vec2 vTexCoords;
    
    const vec2 scale = vec2(0.5, 0.5);
    
    void main()
    {
        vTexCoords  = vertexPositionNDC * scale + scale; // scale vertex attribute to [0,1] range
        gl_Position = vec4(vertexPositionNDC, 0.0, 1.0);
    }
    

    Fragment Shader:

    precision mediump float;
    
    uniform sampler2D colorMap;
    varying vec2 vTexCoords;
    
    void main()
    {
        gl_FragColor = texture2D(colorMap, vTexCoords);
    }
    

    The important point to note are the vertexes in Normalized Device Coordinates (NDC), so you just pass the vertexes in the [-1,1] range and forward them directly to gl_Position without the need to multiply by a projection matrix.