Search code examples
c++openglglslnon-ascii-characters

Why is fgetc() reading a non-ASCII character? (Trying to load GLSL shaders)


I expect my program to draw me a simple red triangle. The vertex and fragment shaders are supposed to be loaded from external files via my loadShader() function, but for some weird reason my loadShader() function is reading non-ASCII characters, such that shader compile errors are getting generated.

Attempts to convert both of my shader files to an ASCII format following the instructions provided here (using Notepad++) failed, since the outcome is the same -- namely, the shader compiler error regarding the non-ASCII characters (see screenshots below) and a white instead of an expected red triangle (due to the shader not compiling).

Further Troubleshooting Attempts:

(Note: I additionally uploaded my source code to Pastebin for easy line number referencing.)

The critical code parts go from 14 to 44 -- my loadShader function. The "tell file size" section starting at line 22 is working properly as evidenced in the screenshots below, since my debug output (line 25) has the same byte number as the file size provided by the Windows Explorer. Furthermore, the buffer (in line 28) is corresponding exactly to the shader file sizes as evidenced in the debug output in line 41 (see screenshots). Lastly, the syntaxes of my two shaders are correct, since I previously hard-coded them and the result was the desired red triangle rendering.

Screenshot:

enter image description here

Source Code:

// Expected result: Draws a simple red colored triangle to the screen
// Problem to debug: Why does my loadShader function read non-ASCII characters?

#include <glad/glad.h>
#define GLFW_DLL
#include <GLFW\glfw3.h>
#include <cstdio>
#include <iostream>

// TODO: Debug
/* Loads shader text files from a given file name (extension required) 
 * and returns the shader code as a null terminated string from that file. 
 */
const char * loadShader(const char * shaderFileName) {
    FILE * shaderFile{};
    fopen_s(&shaderFile, shaderFileName, "r");
    if (!shaderFile) {
        std::cerr << "ERROR: Cannot open file" << std::endl;
        return "\0";
    }
    // Tell file size
    fseek(shaderFile, 0L, SEEK_END);
    unsigned long shaderFileSize{};
    shaderFileSize = ftell(shaderFile);
    std::cout << "DEBUG: shaderFileSize: " << shaderFileSize << std::endl; // Debug output
    rewind(shaderFile);
    // Read from file
    char * buffer = (char *)malloc(sizeof(char)*(shaderFileSize+1UL));
    if (!buffer) {
        std::cerr << "ERROR: Failed to allocate memory" << std::endl;
        return "\0";
    }
    int c{};
    int i = 0;
    while ((c = fgetc(shaderFile))!= EOF) {
        buffer[i++] = c;
    }
    // Put '\0' at the end of the buffer (required for OpenGL)
    buffer[shaderFileSize] = '\0';
    std::cout << "DEBUG: buffer: " << buffer << std::endl; // Debug output
    std::cout << "DEBUG: strlen: " << strlen(buffer) << std::endl; // Debug output
    fclose(shaderFile);
    return buffer;
} // end of loadShader() 

int main() {
    // Initialize GLFW
    if (!glfwInit()) {
        std::cerr << "ERROR: Failed to initialize GLFW3" << std::endl;
        return -1;
    }
    // Create window
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL Game", nullptr, nullptr);
    if (!window) {
        std::cerr << "ERROR: Failed to create window with GLFW3" << std::endl;
        glfwTerminate();
        return -1;
    }
    glfwMakeContextCurrent(window);
    // Load all OpenGL function pointers.
    if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) {
        std::cerr << "ERROR: Failed to initialize GLAD" << std::endl;
        return -1;
    }
    // Get info from renderer
    const GLubyte* rendererName = glGetString(GL_RENDERER);
    const GLubyte* OpenGLVersionSupported = glGetString(GL_VERSION);
    std::cout << rendererName << std::endl << OpenGLVersionSupported << std::endl;
    // Enable depth
    glEnable(GL_DEPTH_TEST);
    glDepthFunc(GL_LESS);
    // Define triangle
    GLfloat points[] = { 0.0f, 0.5f, 0.0f,
                        0.5f, -0.5f, 0.0f,
                        -0.5f, -0.5f, 0.0f };
    // Create buffer object
    GLuint vertexBufferObject = 0;
    glGenBuffers(1, &vertexBufferObject);
    glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
    glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
    // Create vertex attribute object
    GLuint vertexAttributeObject = 0;
    glGenVertexArrays(1, &vertexAttributeObject);
    glBindVertexArray(vertexAttributeObject);
    glEnableVertexAttribArray(0);
    glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
    // Load shaders
    const char * vertexShaderCode = loadShader("VertexShader.glsl");
    const char * fragmentShaderCode = loadShader("FragmentShader.glsl");
    // Compile shaders
    GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vertexShader, 1, &vertexShaderCode, nullptr);
    glCompileShader(vertexShader);
    // Check vertex shader for compile errors
    int success = 0;
    char message[512] = "";
    glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
    if (!success) {
        glGetShaderInfoLog(vertexShader, 512, nullptr, message);
        std::cerr << "ERROR: Failed to compile vertex shader" << std::endl << message;
    }
    GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fragmentShader, 1, &fragmentShaderCode, nullptr);
    glCompileShader(fragmentShader);
    // Check fragment shader for compile errors
    success = 0;
    glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
    if (!success) {
        glGetShaderInfoLog(fragmentShader, 512, nullptr, message);
        // TODO: Specify error type in message
        std::cerr << "ERROR: Failed to compile fragment shader" << std::endl << message;
    }
    // Create shader program and link it
    GLuint shaderProgram = glCreateProgram();
    glAttachShader(shaderProgram, vertexShader);
    glAttachShader(shaderProgram, fragmentShader);
    glLinkProgram(shaderProgram);
    // Check for linking errors
    glGetProgramiv(shaderProgram, GL_LINK_STATUS, &success);
    if (!success) {
        glGetShaderInfoLog(shaderProgram, 512, nullptr, message);
        // TODO: Specify error type in message
        std::cerr << "ERROR: Failed to link shaders" << std::endl << message;
    }
    // Render loop
    while (!glfwWindowShouldClose(window)) {
        // Wipe the drawing surface clear
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        // Use shader program and vertex attribute object
        glUseProgram(shaderProgram);
        glBindVertexArray(vertexAttributeObject);
        // Draw from the currently bound vertex attribute object
        glDrawArrays(GL_TRIANGLES, 0, 3);
        glfwPollEvents();
        glfwSwapBuffers(window);
    }
    // Exit program 
    glfwTerminate();
    return 0;
} // end of main()

Solution

  • 0xcd is a value used by MSVC CRT to fill uninitialized memory. What happens is that your file uses \r\n line endings but you open it in text mode and the CRT converts them to \n line endings. As a result you read into buffer less bytes than is the size returned by ftell, so that the last value of i is less than shaderFileSize, and, accordingly, you have some uninitialized bytes between the last value written to buffer[i] and the null-terminator.

    Instead, replace your code with:

    FILE * shaderFile{};
    fopen_s(&shaderFile, shaderFileName, "rb"); // <-------- HERE !!!!
    fseek(shaderFile, 0L, SEEK_END);
    unsigned long shaderFileSize = ftell(shaderFile);
    rewind(shaderFile);
    char * buffer = (char *)malloc(shaderFileSize+1);
    fread(buffer, shaderFileSize, 1, shaderFile);
    buffer[shaderFileSize] = '\0';