Search code examples
c++winapiopenglwgl

Why does this OpenGL + WIN32 code produce Vertical lines?


To be clear, I've extensively tested this code and found the issue is somewhere with the code written prior to the WGL code. It's precisely in the WIN32 code. Now I think it could partially be caused by calling gluOrth2D but even then, that shouldn't be the primary cause as far I as I understand. I may figure this out myself just by messing with stuff but considering this issue (that occured in a much larger project) has taken up a lot of my time I thought it was worth posting this encase anyone else runs into the same problem. I'm hoping for an explanation as well as fix/correction.

#include <GL/glew.h>
#include <glfw3.h>
#define GLFW_EXPOSE_NATIVE_WGL
#define GLFW_EXPOSE_NATIVE_WIN32
#include <glfw3native.h>

constexpr int width = 1000, height = 1000;

bool running = true;

LRESULT CALLBACK WIN32_processMessage(HWND hwnd, UINT msg,
    WPARAM wparam, LPARAM lparam)
{
    return DefWindowProcA(hwnd, msg, wparam, lparam);
}

int main()
{
    HINSTANCE hinst = GetModuleHandleA(NULL);

    HICON icon = LoadIcon(hinst, IDI_APPLICATION);
    
    WNDCLASSA* wc = (WNDCLASSA*)memset(malloc(sizeof(WNDCLASSA)), 0, sizeof(WNDCLASSA));
    if (wc == NULL)
    {
        return -1;
    }

    wc->style = CS_DBLCLKS;
    wc->lpfnWndProc = WIN32_processMessage;
    wc->cbClsExtra = 0;
    wc->cbWndExtra = 0;
    wc->hInstance = hinst;
    wc->hIcon = icon;
    wc->hCursor = LoadCursor(NULL, IDC_ARROW);
    wc->hbrBackground = NULL;
    wc->lpszClassName = "toast_window_class";

    if (!RegisterClassA(wc))
    {
        return 1;
    }

    UINT32 windowX = 100;
    UINT32 windowY = 100;
    UINT32 windowWidth = width;
    UINT32 windowHeight = height;

    UINT32 windowStyle = WS_OVERLAPPED | WS_SYSMENU | WS_CAPTION;
    UINT32 windowExStyle = WS_EX_APPWINDOW;

    windowStyle |= WS_MAXIMIZEBOX;
    windowStyle |= WS_MINIMIZEBOX;
    windowStyle |= WS_THICKFRAME;

    RECT borderRect = { 0,0,0,0 };
    AdjustWindowRectEx(&borderRect, windowStyle, 0, windowExStyle);

    windowX += borderRect.left;
    windowY += borderRect.top;

    windowWidth += borderRect.right - borderRect.left;
    windowWidth += borderRect.bottom - borderRect.top;

    HWND window = CreateWindowExA(windowExStyle, "toast_window_class", (LPCSTR)"test",
        windowStyle, windowX, windowY, windowWidth, windowHeight, 0, 0, hinst, 0);

    if (window == 0)
    {
        return 2;
    }

    bool shouldActivate = true;
    const int showWindowCommandFlags = shouldActivate ? SW_SHOW : SW_SHOWNOACTIVATE;

    ShowWindow(window, showWindowCommandFlags);

    // WGL -----------------------------------------------------------
    HDC device = GetDC(window);

    PIXELFORMATDESCRIPTOR pfd;
    pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
    pfd.nVersion = 1;
    pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
    pfd.iPixelType = PFD_TYPE_RGBA;
    pfd.cColorBits = 32;
    pfd.cAlphaBits = 0;
    pfd.cAccumBits = 0;
    pfd.cDepthBits = 0;
    pfd.cStencilBits = 0;
    pfd.cAuxBuffers = 0;
    pfd.iLayerType = PFD_MAIN_PLANE;

    SetPixelFormat(device, ChoosePixelFormat(device, &pfd), &pfd);

    HGLRC render = wglCreateContext(device);

    wglMakeCurrent(device, render);

    // GLEW ---------------------------------------------------------

    const GLint res = glewInit();

    if (GLEW_OK != res)
    {
        return 1;
    }

    // setup OpenGL --------------------------------------------------
    gluOrtho2D(0, width, 0, height);

    // main loop -----------------------------------------------------
    while (running)
    {
        glClear(GL_COLOR_BUFFER_BIT);

        // rendering
        glBegin(GL_POINTS);
        for (int x = 10; x < width - 10; ++x)
        {
            for (int y = 10; y < height - 10; ++y)
            {
                glVertex2f(x, y);
            }
        }
        glEnd();

        SwapBuffers(device);
    }

    return 0;
}

Solution

  • Don't use GL_POINTS for drawing an image pixel-by-pixel.

    For one, it's probably the most inefficient way (on any system, in any configuration) to go about, due to the way OpenGL, its implementations, and modern GPU do work. Instead – if using OpenGL – create a 2D array of the desired size, fill in the pixel values to your desire, then load the picture into a texture and draw a textured quad, or better yet a rectangle covering triangle which you crop using glScissor.

    Back to your point-to-point drawing problems. The issue you're running is essentially rounding errors and the way OpenGL is transforming between modelview space, over projection space into NDC and finally the viewport. BTW, you're not calling glViewport, so any adjustments in window size are not taken into account.

    The call to glOrtho2D will setup the transformation matrix (in your case the modelview, since you didn't bother to switch to the projection matrix) so that coordinates in the range [0; width]×[0; height] will map to NDC coordinates [-1; 1]×[-1; 1]. These ranges are closed, i.e. they do include their limiting values. However pixels in a pixel grid are addressed in a range over the integer numbers with the end of the range being open (i.e. ℤ²∩[0; width(×[0; height(). And that means, that if you use the viewport extents for an orthogonal projection without adjustment, there'll be an ever so slight difference between pixel indices and OpenGL coordinates. I'll leave it to you, as an exercise, to figure out the math for what kind of adjustment to make (hint, you'll have to subtract a certain fractional value of the viewport extents to some of the glOrtho parameters).

    Now for certain window sizes you won't notice this problem, because for all "pixel" locations you put into OpenGL the round off will happen to coerce them toward your desired pixel locations. But for other window sizes, for certain coordinate values, the round off goes the other way, and you're left with columns and/or rows untouched. Which leads to the lines you're seeing sometimes.