Search code examples
javaopenglglsltexturesuv-mapping

2D-Image texturing in OpenGl does not work as expected


I have a homework on Textures and I try to load a picture on my cube. Despite that, when I compile I only see my cube colored grey, but I dont understand why.

This is my Java code,from where the cube coordinates and the uv coordinates are passed to the shaders:

import static org.lwjgl.opengl.GL11.GL_FLOAT;
import static org.lwjgl.opengl.GL11.GL_TRIANGLES;
import static org.lwjgl.opengl.GL11.glDrawArrays;
import static org.lwjgl.opengl.GL15.GL_ARRAY_BUFFER;
import static org.lwjgl.opengl.GL15.GL_STATIC_DRAW;
import static org.lwjgl.opengl.GL15.glBindBuffer;
import static org.lwjgl.opengl.GL15.glBufferData;
import static org.lwjgl.opengl.GL15.glGenBuffers;
import static org.lwjgl.opengl.GL20.glEnableVertexAttribArray;
import static org.lwjgl.opengl.GL20.glVertexAttribPointer;
import static org.lwjgl.opengl.GL30.*;

import lenz.opengl.AbstractOpenGLBase;
import lenz.opengl.ShaderProgram;
import lenz.opengl.Texture;

public class Aufgabe3undFolgende extends AbstractOpenGLBase {
public Matrix4 matrix = new Matrix4();
public Matrix4 projMatrix = new Matrix4(0.8f,500f);
//Coordinates for cube, which is in the center of the screen
    float[] cube = {
        -1.0f,-1.0f,-1.0f, // triangle 1 : begin
        -1.0f,-1.0f, 1.0f,
        -1.0f, 1.0f, 1.0f, // triangle 1 : end

         1.0f, 1.0f,-1.0f, // triangle 2 : begin
        -1.0f,-1.0f,-1.0f,
        -1.0f, 1.0f,-1.0f, // triangle 2 : end

         1.0f,-1.0f, 1.0f,
        -1.0f,-1.0f,-1.0f,
         1.0f,-1.0f,-1.0f,

         1.0f, 1.0f,-1.0f,
         1.0f,-1.0f,-1.0f,
        -1.0f,-1.0f,-1.0f,

        -1.0f,-1.0f,-1.0f,
        -1.0f, 1.0f, 1.0f,
        -1.0f, 1.0f,-1.0f,

         1.0f,-1.0f, 1.0f,
        -1.0f,-1.0f, 1.0f,
        -1.0f,-1.0f,-1.0f,

       -1.0f, 1.0f, 1.0f,
       -1.0f,-1.0f, 1.0f,
        1.0f,-1.0f, 1.0f,

        1.0f, 1.0f, 1.0f,
        1.0f,-1.0f,-1.0f,
        1.0f, 1.0f,-1.0f,

        1.0f,-1.0f,-1.0f,
        1.0f, 1.0f, 1.0f,
        1.0f,-1.0f, 1.0f,

         1.0f, 1.0f, 1.0f,
         1.0f, 1.0f,-1.0f,
        -1.0f, 1.0f,-1.0f,

         1.0f, 1.0f, 1.0f,
        -1.0f, 1.0f,-1.0f,
        -1.0f, 1.0f, 1.0f,

        1.0f, 1.0f, 1.0f,
       -1.0f, 1.0f, 1.0f,
        1.0f,-1.0f, 1.0f
    };

//UV Coordinates for texturing
float[] uvKoord = {
        0.0f, 0.0f,  // lower-left corner  
           1.0f, 0.0f,  // lower-right corner
          0.5f, 1.0f 
};

private ShaderProgram shaderProgram;


public static void main(String[] args) {
    new Aufgabe3undFolgende().start("CG Aufgabe 3", 700, 700);
}

@Override
protected void init() {

    shaderProgram = new ShaderProgram("aufgabe3");
    glUseProgram(shaderProgram.getId());
    //vao bauen
    int vaold = glGenVertexArrays();
    glBindVertexArray(vaold);
    makeVBOS(0,3,cube);
    makeVBOS(1,2,uvKoord);
        glEnable(GL_DEPTH_TEST); // z-Buffer activate
        Texture texture = new Texture("pikatcu.png");
        glBindTexture(GL_TEXTURE_2D, texture.getId());
}


//vbos bauen (jeder vbo ist ein Objekt) -- delete
public void makeVBOS(int index, int size, float[] name) {
    int vbold = glGenBuffers();
    glBindBuffer(GL_ARRAY_BUFFER, vbold);
    glBufferData(GL_ARRAY_BUFFER, name, GL_STATIC_DRAW);
    glVertexAttribPointer(index,size, GL_FLOAT, false, 0,0);
    glEnableVertexAttribArray(index);       
}

@Override
public void update() {
    matrix = new Matrix4();
    matrix.translate(0, 0, -3);
    matrix.rotateZ(0.1f);

  }

@Override
protected void render() {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    int loc 
          =glGetUniformLocation(shaderProgram.getId(),"viewMatrix");
             glUniformMatrix4fv(loc,false, 
    matrix.getValuesAsArray()); 
    int loc2 
     =glGetUniformLocation(shaderProgram.getId(),"projMatrix");
              glUniformMatrix4fv(loc2,false, 
   projMatrix.getValuesAsArray()); 


    glDrawArrays(GL_TRIANGLES, 0, 36); // 12*3 indices starting at 
  0 -> 12 triangles -> 6 squares
}


@Override
public void destroy() {
}
   }

This is my vertex shader:

    #version 330
    layout(location=0) in vec3 vertex;
    layout(location=1) in vec2 uvKoord;


   uniform mat4 viewMatrix;
   uniform mat4 projMatrix;

   out vec3 vertexColor;
   out vec2 uv;

   void main(){


  vertexColor = vec3(1.0, 0.0, 0.0);        
  uv=uvKoord;   
  gl_Position = projMatrix*viewMatrix*vec4(vertex, 1.0);

  }

And this is my fragment shader:

#version 330
in vec3 vertexColor;
in vec2 uv;
out vec3 pixelColor;
uniform sampler2D smplr;
void main(){

    vec4 texel = texture(smplr,uv);
    pixelColor = texel.rgb;
 }

Could someone help me understand what I am doing wrong? I would really apreciate an explanation and not just code as I would like to understand my mistake and not just copy the code. Thanks in advance.

PS: The class Texture was give from my lecturer. I did not upload this because I thought the post would get too long, but I can provide it if needed. The picture I am loading is this one https://mytoys.scene7.com/is/image/myToys/ext/7980544-01.jpg?$rtf_mt_prod-main_xl$ and I have changed its resolution to 256x256.


Solution

  • The vertex coordinates and its attributes are a set of date. This means one texture coordinate (u, v) is of need per vertex coordinate.
    Sine you have 36 vertex coordinates tuples (x, y, z), you need 36 texture coordinates tuples (u, v), too:

    float[] uvKoord = {
    //  left bottom  right bottom  right top   left bottom  right top    left top
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f, 
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f, 
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f, 
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f, 
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f, 
        0.0f, 0.0f,  0.0f, 1.0f,   1.0f, 1.0f, 0.0f, 0.0f,  1.0f, 1.0f,  1.0f, 0.0f 
    };
    

    Note, the order of the texture coordinates has the match the order of the vertex coordinates:

    float[] cube = {
    
        // left
        -1.0f,  1.0f, -1.0f,
        -1.0f, -1.0f, -1.0f,
        -1.0f,  1.0f,  1.0f,
    
        -1.0f,  1.0f, -1.0f,
        -1.0f,  1.0f,  1.0f,
        -1.0f,  1.0f, -1.0f,
    
         // right
         1.0f, -1.0f, -1.0f,
         1.0f,  1.0f, -1.0f,
         1.0f,  1.0f,  1.0f,
    
         1.0f, -1.0f, -1.0f,
         1.0f,  1.0f,  1.0f,
         1.0f, -1.0f,  1.0f,
    
         // front
         1.0f, -1.0f, -1.0f,
        -1.0f, -1.0f, -1.0f,
         1.0f, -1.0f,  1.0f,
    
         1.0f, -1.0f, -1.0f,
         1.0f, -1.0f,  1.0f,
         1.0f, -1.0f, -1.0f,
    
         // back
         -1.0f, 1.0f, -1.0f,
          1.0f, 1.0f, -1.0f,
          1.0f, 1.0f,  1.0f,
    
         -1.0f, 1.0f, -1.0f,
          1.0f, 1.0f,  1.0f,
         -1.0f, 1.0f,  1.0f,
    
          // bottom
         1.0f, -1.0f, -1.0f,
        -1.0f, -1.0f, -1.0f,
         1.0f,  1.0f, -1.0f,
    
         1.0f, -1.0f, -1.0f,
         1.0f,  1.0f, -1.0f,
         1.0f, -1.0f, -1.0f,
    
         // top
         -1.0f, -1.0f, 1.0f,
          1.0f, -1.0f, 1.0f,
          1.0f,  1.0f, 1.0f,
    
         -1.0f, -1.0f, 1.0f,
          1.0f,  1.0f, 1.0f,
         -1.0f,  1.0f, 1.0f,
    };