I am writing a proof of concept program to get more familiar with textures, but it's acting strangely in that it works for 1x1 textures but fails for all others. I scanned the program many times and looked over the arguments of each function, but can't understand why it works correctly for 1x1 textures(or first pixel in larger textures), but shows a white square otherwise.
Here's a picture of the same program running with glTexImage2D's width and height arguments (1, 1) and (2,2) respectively.
This is the code that shows a white square(Whereas expected to draw a 2x2 red,green,blue, yellow row major texture).
note:
in(Comment, Arg)
macro is equivelent toArg
, it's only used in this particular program because it's tricky to remember arguments of glTexImage2D off the top of your head.
#include <GL/glut.h>
// | reminder decorator macro.
#define in(Comment, Arg) Arg
GLubyte bm[16] = {
0xff, 0x00, 0x00, 0xff,
0x00, 0xff, 0x00, 0xff,
0x00, 0x00, 0xff, 0xff,
0xff, 0xff, 0x00, 0xff
};
GLuint tex;
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, tex);
glLoadIdentity();
// | make the square smaller to contrast our target from the background.
glScalef(.5f, .5f, 1.f);
glBegin(GL_TRIANGLE_FAN);
glTexCoord2f(0.f, 0.f);
glVertex2f(-1.f, 1.f);
glTexCoord2f(0.f, 1.f);
glVertex2f(1.f, 1.f);
glTexCoord2f(1.f, 1.f);
glVertex2f(1.f, -1.f);
glTexCoord2f(1.f, 0.f);
glVertex2f(-1.f, -1.f);
glEnd();
glFlush();
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE);
glutCreateWindow("");
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D (
in(target, GL_TEXTURE_2D),
in(level, 0),
in(internalFormat, GL_RGBA),
in(width, 2),
in(height, 2),
in(border, 0),
in(format, GL_RGBA),
in(type, GL_UNSIGNED_BYTE),
in(data, bm)
);
glTexParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(tex, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
if I change in(width, 2)
to in(width, 1)
and in(height, 2)
to in(height, 1)
, it shows a red square, and altering the first three values of bm
array, it maps this color to the cube as expected.
#include <GL/glut.h>
// | reminder decorator macro.
#define in(Comment, Arg) Arg
GLubyte bm[16] = {
0xff, 0x00, 0x00, 0xff,
0x00, 0xff, 0x00, 0xff,
0x00, 0x00, 0xff, 0xff,
0xff, 0xff, 0x00, 0xff
};
GLuint tex;
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, tex);
glLoadIdentity();
// | make the square smaller to contrast our target from the background.
glScalef(.5f, .5f, 1.f);
glBegin(GL_TRIANGLE_FAN);
glTexCoord2f(0.f, 0.f);
glVertex2f(-1.f, 1.f);
glTexCoord2f(0.f, 1.f);
glVertex2f(1.f, 1.f);
glTexCoord2f(1.f, 1.f);
glVertex2f(1.f, -1.f);
glTexCoord2f(1.f, 0.f);
glVertex2f(-1.f, -1.f);
glEnd();
glFlush();
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE);
glutCreateWindow("");
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D (
in(target, GL_TEXTURE_2D),
in(level, 0),
in(internalFormat, GL_RGBA),
in(width, 1),
in(height, 1),
in(border, 0),
in(format, GL_RGBA),
in(type, GL_UNSIGNED_BYTE),
in(data, bm)
);
glTexParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(tex, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
Can somebody explain what is happening and how I can solve this issue?
Solution: passed tex to glTexParameteri instead of GL_TEXTURE_2D(side note: and a typo 1.1 to 1.f in glVertex2f)
#include <GL/glut.h>
// | reminder decorator macro.
#define in(Comment, Arg) Arg
GLubyte bm[16] = {
0xff, 0x00, 0x00, 0xff,
0x00, 0xff, 0x00, 0xff,
0x00, 0x00, 0xff, 0xff,
0xff, 0xff, 0x00, 0xff
};
GLuint tex;
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, tex);
glLoadIdentity();
// | make the square smaller to contrast our target from the background.
glScalef(.5f, .5f, 1.f);
glBegin(GL_TRIANGLE_FAN);
glTexCoord2f(0.f, 0.f);
glVertex2f(-1.f, 1.f);
glTexCoord2f(1.f, 0.f);
glVertex2f(1.f, 1.f);
glTexCoord2f(1.f, 1.f);
glVertex2f(1.f, -1.f);
glTexCoord2f(0.f, 1.f);
glVertex2f(-1.f, -1.f);
glEnd();
glFlush();
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE);
glutCreateWindow("");
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D (
in(target, GL_TEXTURE_2D),
in(level, 0),
in(internalFormat, GL_RGBA),
in(width, 2),
in(height, 2),
in(border, 0),
in(format, GL_RGBA),
in(type, GL_UNSIGNED_BYTE),
in(data, bm)
);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
When you specify a 2x2 texture, your texture is not mipmap-complete, but you use a mip-mapping minification filter. The result you get is the correct result, as per the specification.
Your mistake lies in your attempt to set the non-mipmapping GL_NEAREST
filter:
glTexParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameter
expects the texture binding point, which would be GL_TEXTURE_2D
, not the texture name. As a result, this call will generate GL_INVALID_ENUM
error and has no other effect.