Vertex Attribute Arrays aren't working

I think I’ve narrowed it down to a few methods, but my model render won’t work. Immediate mode works with the shader bound and everything, but somethings wrong with the model/vertex attrib code. It just gives me a blank screen. So its not the shaders, or the shader loader. Also, I doubt its the vertex. But it might be the vertex attrib wrapper.

(Model.class)

/**
 * Links the model's vertex data with the vertex attribute objects
 * 
 * @param positions The vertex positions vertex attribute ID
 * @param textures The vertex texture coordinate vertex attribute ID
 * @param colors The vertex color vertex attribute ID
 * @param normals The vertex normal vertex attribute ID
 */
public void link(int positions, int textures, int colors, int normals){
	float[] vertexPos = new float[model.length * 4];
	float[] textureUV = new float[model.length * 2];
	float[] color = new float[model.length * 4];
	float[] normal = new float[model.length * 3];
	
	for(int i = 0; i < model.length; i++){
		vertexPos[4*i+0] = model[i].position.x;
		vertexPos[4*i+1] = model[i].position.y;
		vertexPos[4*i+2] = model[i].position.z;
		vertexPos[4*i+3] = model[i].position.w;
		
		textureUV[2*i+0] = model[i].textureUV.x;
		textureUV[2*i+1] = model[i].textureUV.x;
		
		color[4*i+0] = model[i].color.getRed();
		color[4*i+1] = model[i].color.getGreen();
		color[4*i+2] = model[i].color.getBlue();
		color[4*i+3] = model[i].color.getAlpha();
		
		normal[3*i+0] = model[i].normal.x;
		normal[3*i+1] = model[i].normal.y;
		normal[3*i+2] = model[i].normal.z;
	}
	
	FloatBuffer indicesBuffer = BufferUtils.createFloatBuffer(indices.length);
	indicesBuffer.put(indices);
	indicesBuffer.flip();
	
	va = GL15.glGenBuffers();
	GL30.glBindVertexArray(va);
		this.positions = new VertexAttrib(positions, 4, vertexPos);
		this.textureUVs = new VertexAttrib(textures, 2, textureUV);
		this.colors = new VertexAttrib(colors, 4, color);
		this.normals = new VertexAttrib(normals, 3, normal);
	GL30.glBindVertexArray(0);
	
	vi = GL15.glGenBuffers();
	GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vi);
	GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indicesBuffer, GL15.GL_STATIC_DRAW);
	GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
}
/**
 * Draws the vertex attribute object
 * @param mode The mode to draw
 */
public void render(int mode){
	GL30.glBindVertexArray(va);
		positions.bind();
		textureUVs.bind();
		colors.bind();
		normals.bind();
			GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vi);
			GL11.glDrawElements(mode, indices.length, GL11.GL_UNSIGNED_BYTE, 0);
			GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
		positions.unbind();
		textureUVs.unbind();
		colors.unbind();
		normals.unbind();
	GL30.glBindVertexArray(0);
}

My render method:

ambient = new Shader();
ambient.attachShaderSource(new File("./shaders/ambient.fs"), GL20.GL_FRAGMENT_SHADER);
ambient.attachShaderSource(new File("./shaders/ambient.vs"), GL20.GL_VERTEX_SHADER);

ambient.bindAttribute(0, "in_Position");
ambient.bindAttribute(1, "in_TextureUV");
ambient.bindAttribute(2, "in_Color");
ambient.bindAttribute(3, "in_Normal");
ambient.link();

model = new Model(4);
model.add(0, new Vertex(new Vector4f(0, 1, 0, 1), new Color(1, 0, 0, 1)));
model.add(1, new Vertex(new Vector4f(0, 0, 0, 1), new Color(0, 0, 1, 1)));
model.add(2, new Vertex(new Vector4f(1, 0, 0, 1), new Color(0, 1, 0, 1)));
model.add(3, new Vertex(new Vector4f(1, 1, 0, 1), new Color(1, 1, 1, 1)));
model.setDrawingOrder(0, 1, 2, 2, 3, 0);
model.link(0, 1, 2, 3);

while(!Window.isClosing()){
	GL11.glClearColor(0.5f, 0.5f, 0.5f, 1);
	GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
	
	ambient.bind();
	model.render(GL11.GL_TRIANGLES);
	Shader.unbind();
		
	Window.update();
}

Full “VertexAttrib.class”: http://pastebin.com/iBy23Akb
Full “Model.class”: http://pastebin.com/XFCxV9gk
Full “Vertex.class”: http://pastebin.com/B0X5kryZ
Full "ambient.* ": http://pastebin.com/CbsanRWB
Full “Shader.class”: http://pastebin.com/uCWpyi3Q

Screenie:

This is where you are having problem.


GL30.glBindVertexArray(va);
      positions.bind();
      textureUVs.bind();
      colors.bind();
      normals.bind();
         GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, vi);
         GL11.glDrawElements(mode, indices.length, GL11.GL_UNSIGNED_BYTE, 0);
         GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
      positions.unbind();
      textureUVs.unbind();
      colors.unbind();
      normals.unbind();
   GL30.glBindVertexArray(0);

That’s not how you are supposed to bind the buffers. Do like this in the order.


positions.bind();
glVertexAttribPointer(0, 4, GL_FLOAT, false, 0, 0);

textureUVs.bind();
glVertexAttribPointer(1, 2, GL_FLOAT, false, 0, 0);

colors.bind();
glVertexAttribPointer(2, 4, GL_FLOAT, false, 0, 0);

normals.bind();
glVertexAttribPointer(3, 3, GL_FLOAT, true, 0, 0);

glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vi);
glDrawElements(mode, indices.length, GL_UNSIGNED_BYTE, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

glBindBuffer(GL_ARRAY_BUFFER, 0);

The problem is you are binding your VBOs but not setting the pointers to them. Hope this helps.

VertexAttrib.class constructor:

/**
 * Initializes the vertex attribute object with some data
 * to put into its buffer
 * 
 * @param floats The data to add
 */
public VertexAttrib(int index, int size, boolean normalize, float ... floats) {
	data = BufferUtils.createFloatBuffer(floats.length);
	data.put(floats);
	data.flip();
	
	id = GL15.glGenBuffers();
	GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, id);
	GL15.glBufferData(GL15.GL_ARRAY_BUFFER, data, GL15.GL_STATIC_DRAW);
	GL20.glVertexAttribPointer(index, size, GL11.GL_FLOAT, normalize, 0, 0);
	
	this.index = index;
}

I’m using the pointers inside the ‘vertexattrib’ class. The pointer methods are being called. Thanks for the tip about normalized… normals!

There’s a problem too.


public void bind(){
@@    GL20.glEnableVertexAttribArray(index);
    System.out.println("enabled vaa: " + index);
}

You are just enabling the location but not making a pointer to it.

EDIT:

Found that you are setting the pointer in the constructor. Looking for other sources of the error…

You are creating the indices buffer as a FloatBuffer, but you are using GL_UNSIGNED_BYTE in the call to glDrawElements. Maybe changing that to GL_UNSIGNED_FLOAT should work.

There is no [icode]GL_UNSIGNED_FLOAT[/icode]. Also, I have a working example of a VBO, and that’s what it says to do :-/

Oops, my bad. I thought of saying GL_UNSIGNED_SHORT and make it a ShortBuffer.

Who ho! It works! Turns out, the indices were supposed to be integers. So you where right, the buffer was the wrong type. I bookmarked the page to give you another medal later. Thanks!

Screenie:

Glad that you got it working. Is that green quad your model?

Yeah, I didn’t do the matrices/camera yet. So thats why it still has the default 0-1 coordinate system. Its green because of the shaders, I just have to change a name for that. Thanks!