Camera rotate problem

First, sorry for my bad English.

I am developing my final project do college, a volumetric viewer for Android, and i have de a problem.
The first visualization is fine, but when i rotate the camera, the old visualization is drawed, and the new, with the new angle of camera, too.

Here is the link os the pictures demonstrating it:
https://dl.dropboxusercontent.com/u/77565492/Captura%20de%20tela%20de%202013-11-08%2019%3A29%3A18.png
https://dl.dropboxusercontent.com/u/77565492/Captura%20de%20tela%20de%202013-11-08%2019%3A29%3A27.png

and here is the code:



public class ViewerRenderer implements Renderer {

	private Square square;
	private Camera camera;
	
	private float vertices[] = {
			-1.0f, -1.0f,  0.0f,        // V1 - bottom left
			-1.0f,  1.0f,  0.0f,        // V2 - top left
			1.0f, -1.0f,  0.0f,        // V3 - bottom right
			1.0f,  1.0f,  0.0f         // V4 - top right
		};
	
	private FloatBuffer textureBuffer;	// buffer holding the texture coordinates
	private float texture[] = {    		
		// Mapping coordinates for the vertices
		0.0f, 1.0f,		// top left		(V2)
		0.0f, 0.0f,		// bottom left	(V1)
		1.0f, 1.0f,		// top right	(V4)
		1.0f, 0.0f		// bottom right	(V3)
	};
	
	private FloatBuffer vertexBuffer;
	
	private boolean cameraChanged = true;
	
	public ViewerRenderer(Square square, Camera camera) {
		this.square = square;

		this.camera = camera;
				
		ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4); 
		byteBuffer.order(ByteOrder.nativeOrder());
		vertexBuffer = byteBuffer.asFloatBuffer();
		vertexBuffer.put(vertices);
		vertexBuffer.position(0);

		byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
		byteBuffer.order(ByteOrder.nativeOrder());
		textureBuffer = byteBuffer.asFloatBuffer();
		textureBuffer.put(texture);
		textureBuffer.position(0);
	}
	
	public void onDrawFrame(GL10 gl) {
		//gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
		gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
		
		// Reset the Modelview Matrix
		gl.glLoadIdentity();
		
		if(cameraChanged) {
			//square.drawBackground(gl);
			
			GLU.gluLookAt(gl, camera.getEyeX(), camera.getEyeY(), camera.getEyeZ(), 
					camera.getCenterX(), camera.getCenterY(), camera.getCenterZ(),
					camera.getUpX(), camera.getUpY(), camera.getUpZ());
			
			//cameraChanged = false;
		}
		
		square.draw(gl);
	}
	
	public void onSurfaceChanged(GL10 gl, int width, int height) {
		if (height == 0) {
			height = 1;
		}

		gl.glViewport(0, 0, width, height); // Reset The Current Viewport
		gl.glMatrixMode(GL10.GL_PROJECTION); // Select The Projection Matrix
		gl.glLoadIdentity(); // Reset The Projection Matrix
		
		GLU.gluPerspective(gl, 45.0f, (float) width / (float) height, 0.1f,
				100.0f);
		
		gl.glMatrixMode(GL10.GL_MODELVIEW); // Select The Modelview Matrix
		gl.glLoadIdentity();
	}

	public void onSurfaceCreated(GL10 gl, EGLConfig config) {
		square.loadGLTextures(gl);
		
		gl.glEnable(GL10.GL_TEXTURE_2D); // Enable Texture Mapping ( NEW )
		gl.glShadeModel(GL10.GL_SMOOTH); // Enable Smooth Shading
		gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
		gl.glClearDepthf(1.0f); // Depth Buffer Setup
		gl.glEnable(GL10.GL_DEPTH_TEST); // Enables Depth Testing
		gl.glDepthFunc(GL10.GL_LEQUAL); // The Type Of Depth Testing To Do

		// Really Nice Perspective Calculations
		gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
	}

	public Camera getCamera() {
		return camera;
	}

	public void setCamera(Camera camera) {
		this.camera = camera;
		cameraChanged = true;
	}
}

public class Square {

	private FloatBuffer vertexBuffer;
	//private int[] textures = new int[1];
	private int[] textures;
	
	private List<Bitmap> bitmaps;

	private float vertices[] = {
			-1.0f, -1.0f,  0.0f,        // V1 - bottom left
			-1.0f,  1.0f,  0.0f,        // V2 - top left
			1.0f, -1.0f,  0.0f,        // V3 - bottom right
			1.0f,  1.0f,  0.0f         // V4 - top right
		};
	
	private byte indices[] = {
			//Faces definition
    		0,1,3, 0,3,2, 			//Face front
    		4,5,7, 4,7,6, 			//Face right
    		8,9,11, 8,11,10, 		//... 
    		12,13,15, 12,15,14, 	
    		16,17,19, 16,19,18, 	
    		20,21,23, 20,23,22, 	
								};
	
	private FloatBuffer textureBuffer;	// buffer holding the texture coordinates
	private float texture[] = {    		
		// Mapping coordinates for the vertices
		0.0f, 1.0f,		// top left		(V2)
		0.0f, 0.0f,		// bottom left	(V1)
		1.0f, 1.0f,		// top right	(V4)
		1.0f, 0.0f		// bottom right	(V3)
	};
	private ByteBuffer indexBuffer;
	
	public Square(List<Bitmap> bitmaps) {		
		textures = new int[bitmaps.size()];
		this.bitmaps = new ArrayList<Bitmap>();
		//usado para pintar da ultima imagem para a primeira
		for (int i = 0; i < bitmaps.size(); i++) {
			this.bitmaps.add(bitmaps.get(bitmaps.size()-(i+1)));
		}
		
		ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4); 
		byteBuffer.order(ByteOrder.nativeOrder());
		vertexBuffer = byteBuffer.asFloatBuffer();
		vertexBuffer.put(vertices);
		vertexBuffer.position(0);

		byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
		byteBuffer.order(ByteOrder.nativeOrder());
		textureBuffer = byteBuffer.asFloatBuffer();
		textureBuffer.put(texture);
		textureBuffer.position(0);
		
		indexBuffer = ByteBuffer.allocateDirect(indices.length);
		indexBuffer.put(indices);
		indexBuffer.position(0);
	}

	public void draw(GL10 gl) {
		gl.glMatrixMode(GL10.GL_MODELVIEW);
		
		/*gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glColor4f(0.0f, 1.0f, 0.0f, 0.5f);
		gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
		gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);*/
		
		float z = 0.0078125f;
		
		gl.glEnable(GL10.GL_TEXTURE_2D);
		
		gl.glDepthMask(false);
		gl.glEnable(GL10.GL_BLEND);
		gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
		
		gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
		
		for (int i = 0; i < textures.length; i++) {
			gl.glTranslatef(0.0f, 0.0f, z);
			
			// bind the previously generated texture
			gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[i]);

			// Point to our buffers
//			gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
//			gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

			// Set the face rotation
			gl.glFrontFace(GL10.GL_CW);

			// Point to our vertex buffer
			gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
			gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);

			// Draw the vertices as triangle strip
			gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
			
			//y += inc;
		}
		
		gl.glDisable(GL10.GL_BLEND);
		gl.glDepthMask(true);
		
		gl.glDisable(GL10.GL_TEXTURE_2D);

		// Disable the client state before leaving
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
	}
	
	public void loadGLTextures(GL10 gl) {
		Bitmap bitmap = null;
		gl.glGenTextures(textures.length, textures, 0);
		
		for (int i = 0; i < textures.length; i++) {
			// generate one texture pointer
			
			// ...and bind it to our array
			gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[i]);
			
			// create nearest filtered texture
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
			
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
			
			// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
			bitmap = bitmaps.get(i);
			GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
			gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
		}
		
		// Clean up
		//bitmap.recycle();
	}
	
}


What i doing wrong?

maybe something like
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
in the start of your draw function?

Only here:


public void onDrawFrame(GL10 gl) {
		//gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
		gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);

ups,didn’t see that.
Your still not using glClearColor(…), you commemnted it out.
It may be needed to overwrite the previous screen.
But ofcourse … you must have tried it by now…

I’ve tested it with this snippet of code uncommented, and continued working there.

Judged by the photos you link to, the frameDraw event will be called at the time you release the mouse-pointer from the ruler. That is … the screen has been drawn twice only? I’m thinking on performance from this perspective: Is the type ‘GL10’ a rendercontext or eqvivalent mayor composit? Wouldn’t it be copied if you don’t involve pointers & address-of operators, that is: ‘sending’ the argument by reference.
It’s a silly question that I’m far too inexperienced in addressing, but if the whold context has to be copied you probably should expect some sort of unexpected thing to happen.
… am I way off?

I moved the code of square#draw method to viewer_renderer#onDrawFrame, and the problem persists.

Re: “What i doing wrong?”
Don’t know yet:

But: how many textures are you loading? i.e is textures.length > 1? could be the problem?
You have the rendering of the geometry (unnecessarily) dependent on the number of textures you have loaded. It could be a problem later(if not already)Either way, good idea to change it:
Suppose you have two textures: you will have glTranslatef(0.0f, 0.0f, z) called after glLoadIdentity(); on the first glDrawArrays.
Then glTranslatef(0.0f, 0.0f, z) after glTranslatef(0.0f, 0.0f, z) on the subsequent glDrawArrays calls in that loop you have.

I’m loading, in my test, 5 textures, but it’s dynamic.
It’s not the best way to do this with many textures?

hi jonatasdaniel,
I noticed that you are writing java-code … it has another ‘by-ref’ than I used to. The way you do it is probably ok.
I never (yet, but comming soon) did any drawing with textures. Running through your code it surprises me that you’ve got two calls in
public void draw:
… gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
and again further down iterating through your textures:
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);

would that be the right procedure?

This part:


/*gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glColor4f(0.0f, 1.0f, 0.0f, 0.5f);
		gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
		gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);*/

it’s commented!

[QUOTE=jonatasdaniel;1256236]I’m loading, in my test, 5 textures, but it’s dynamic.
It’s not the best way to do this with many textures?[/QUOTE]
Not sure what you mean by dynamic: do you mean the user can select the number of textures at runtime?
After looking at your code a bit more, I think I may have given poor/irrelavant advice in my earlier post, sorry ‘bout that: that part of the code I referred does seem to be doing what you
intend it to do.
Here, I’ll have another go: here’s what I would try(brute force!): The problem might lie in the code you haven’t shown: the camera and the ui interaction code.
You could try isolating it by rotating the "camera’ automatically in DrawFrame() to see what happens. e.g

public void onDrawFrame(GL10 gl) {
 //gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
 gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
 
// Reset the Modelview Matrix
 gl.glLoadIdentity();
 
GLU.gluLookAt(gl, 0, 0, -10, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
long time = SystemClock.uptimeMillis() % 4000L;// or some other incrementing value
float angle = 0.090f * ((int) time);
gl.glRotatef(angle, 0, 0, 1.0f);
square.draw(gl);
 }

Hopefully something like that should continuously rotate the quads.
As long as this code doesn’t introduce more bugs, and the first (unwanted) visualisation is eliminated, it may indicate the problem lies in camera/ ui code.
Let us know how you go.

Nope, the number os textures is the number of files readed in SDCARD.

I made the try, look:
https://dl.dropboxusercontent.com/u/77565492/Captura%20de%20tela%20de%202013-11-15%2013%3A57%3A37.png

public class ViewerRenderer implements Renderer

tell us what Renderer is?

I presume its a GLSurfaceView.Renderer

from:
[SIZE=2]import android.opengl.GLSurfaceView.Renderer;[/SIZE]

well it appears that the problem persists. And we cannot see all your code. Thus these are the options I would consider next:

(1) fiddle with the code in various places to see what can be done (as we have done), while waiting for help here for a possible prob/solution.
(2) new project: whip up another basic opengl framework or grab one from the many examples that can be found that are known to work, then drop your specific volume rendering code in place of its existing rendering code.

If you have a deadline (as we all do?), and your main goal is to learn about volume rendering rather than opengl programming specifically, all you need is a working opengl rendering frame work to start with. You may never know how long (1) will take if at all. I would go with (2).
(And keep this old project around to debug in you spare time.)

Pretty drastic “solution” hey?

Hi jonatasdaniel,

just a shot in the dark:
http://stackoverflow.com/questions/18161928/glfw-glew-unresolved-external-symbol-with-older-functions
I noticed that this guy had trouble making glClearColor(… ) work…
After having used freeglut he didn’t pay attention to the fact that he now had to link to the opengl library himself.
Have a look at it.

[QUOTE=Steven Katic;1256314]

public class ViewerRenderer implements Renderer

tell us what Renderer is?

I presume its a GLSurfaceView.Renderer

from:
[SIZE=2]import android.opengl.GLSurfaceView.Renderer;[/SIZE][/QUOTE]

It’s GLSurfaceView.Renderer!

See the code!


package br.furb.rma.activities;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.opengl.GLSurfaceView.Renderer;
import android.opengl.GLU;
import br.furb.rma.models.Camera;
import br.furb.rma.view.Square;

public class ViewerRenderer implements Renderer {

	private Square square;
	private Camera camera;
	
	private float vertices[] = {
			-1.0f, -1.0f,  0.0f,        // V1 - bottom left
			-1.0f,  1.0f,  0.0f,        // V2 - top left
			1.0f, -1.0f,  0.0f,        // V3 - bottom right
			1.0f,  1.0f,  0.0f         // V4 - top right
		};
	
	private FloatBuffer textureBuffer;	// buffer holding the texture coordinates
	private float texture[] = {    		
		// Mapping coordinates for the vertices
		0.0f, 1.0f,		// top left		(V2)
		0.0f, 0.0f,		// bottom left	(V1)
		1.0f, 1.0f,		// top right	(V4)
		1.0f, 0.0f		// bottom right	(V3)
	};
	
	private FloatBuffer vertexBuffer;
	
	private boolean cameraChanged = true;
	
	public ViewerRenderer(Square square, Camera camera) {
		this.square = square;

		this.camera = cam
era;
				
		ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4); 
		byteBuffer.order(ByteOrder.nativeOrder());
		vertexBuffer = byteBuffer.asFloatBuffer();
		vertexBuffer.put(vertices);
		vertexBuffer.position(0);

		byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
		byteBuffer.order(ByteOrder.nativeOrder());
		textureBuffer = byteBuffer.asFloatBuffer();
		textureBuffer.put(texture);
		textureBuffer.position(0);
	}
	
	public void onDrawFrame(GL10 gl) {
		//gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
		gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
		
		// Reset the Modelview Matrix
		gl.glLoadIdentity();
		
		if(cameraChanged) {
			//square.drawBackground(gl);
			
			GLU.gluLookAt(gl, camera.getEyeX(), camera.getEyeY(), camera.getEyeZ(), 
					camera.getCenterX(), camera.getCenterY(), camera.getCenterZ(),
					camera.getUpX(), camera.getUpY(), camera.getUpZ());
			
			//cameraChanged = false;
		}
		
		square.draw(gl);
	}
	
	public void onSurfaceChanged(GL10 gl, int width, int height) {
		if (height == 0) {
			height = 1;
		}

		gl.glViewport(0, 0, width, height); // Reset The Current Viewport
		gl.glMatrixMode(GL10.GL_PROJECTION); // Select The Projection Matrix
		gl.glLoadIdentity(); // Reset The Projection Matrix
		
		GLU.gluPerspective(gl, 45.0f, (float) width / (float) height, 0.1f,
				100.0f);
		
		gl.glMatrixMode(GL10.GL_MODELVIEW); // Select The Modelview Matrix
		gl.glLoadIdentity();
	}

	public void onSurfaceCreated(GL10 gl, EGLConfig config) {
		square.loadGLTextures(gl);
		
		gl.glEnable(GL10.GL_TEXTURE_2D); // Enable Texture Mapping ( NEW )
		gl.glShadeModel(GL10.GL_SMOOTH); // Enable Smooth Shading
		gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
		gl.glClearDepthf(1.0f); // Depth Buffer Setup
		gl.glEnable(GL10.GL_DEPTH_TEST); // Enables Depth Testing
		gl.glDepthFunc(GL10.GL_LEQUAL); // The Type Of Depth Testing To Do

		// Really Nice Perspective Calculations
		gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
	}

	public Camera getCamera() {
		return camera;
	}

	public void setCamera(Camera camera) {
		this.camera = camera;
		cameraChanged = true;
	}
}



package br.furb.rma.view;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.List;

import javax.microedition.khronos.opengles.GL10;

import android.graphics.Bitmap;
import android.opengl.GLUtils;

public class Square {

	private FloatBuffer vertexBuffer;
	//private int[] textures = new int[1];
	private int[] textures;
	
	private List<Bitmap> bitmaps;

	private float vertices[] = {
			-1.0f, -1.0f,  0.0f,        // V1 - bottom left
			-1.0f,  1.0f,  0.0f,        // V2 - top left
			1.0f, -1.0f,  0.0f,        // V3 - bottom right
			1.0f,  1.0f,  0.0f         // V4 - top right
		};
	
	private byte indices[] = {
			//Faces definition
    		0,1,3, 0,3,2, 			//Face front
    		4,5,7, 4,7,6, 			//Face right
    		8,9,11, 8,11,10, 		//... 
    		12,13,15, 12,15,14, 	
    		16,17,19, 16,19,18, 	
    		20,21,23, 20,23,22, 	
								};
	
	private FloatBuffer textureBuffer;	// buffer holding the texture coordinates
	private float texture[] = {    		
		// Mapping coordinates for the vertices
		0.0f, 1.0f,		// top left		(V2)
		0.0f, 0.0f,		// bottom left	(V1)
		1.0f, 1.0f,		// top right	(V4)
		1.0f, 0.0f		// bottom right	(V3)
	};
	private ByteBuffer indexBuffer;
	
	public Square(List<Bitmap> bitmaps) {		
		textures = new int[bitmaps.size()];
		this.bitmaps = new ArrayList<Bitmap>();
		//usado para pintar da ultima imagem para a primeira
		for (int i = 0; i < bitmaps.size(); i++) {
			this.bitmaps.add(bitmaps.get(bitmaps.size()-(i+1)));
		}
		
		ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4); 
		byteBuffer.order(ByteOrder.nativeOrder());
		vertexBuffer = byteBuffer.asFloatBuffer();
		vertexBuffer.put(vertices);
		vertexBuffer.position(0);

		byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
		byteBuffer.order(ByteOrder.nativeOrder());
		textureBuffer = byteBuffer.asFloatBuffer();
		textureBuffer.put(texture);
		textureBuffer.position(0);
		
		indexBuffer = ByteBuffer.allocateDirect(indices.length);
		indexBuffer.put(indices);
		indexBuffer.position(0);
	}

	public void draw(GL10 gl) {
		gl.glMatrixMode(GL10.GL_MODELVIEW);
		
		/*gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glColor4f(0.0f, 1.0f, 0.0f, 0.5f);
		gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
		gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);*/
		
		float z = 0.0078125f;
		
		gl.glEnable(GL10.GL_TEXTURE_2D);
		
		gl.glDepthMask(false);
		gl.glEnable(GL10.GL_BLEND);
		gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
		
		gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
		
		for (int i = 0; i < textures.length; i++) {
			gl.glTranslatef(0.0f, 0.0f, z);
			
			// bind the previously generated texture
			gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[i]);

			// Point to our buffers
//			gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
//			gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

			// Set the face rotation
			gl.glFrontFace(GL10.GL_CW);

			// Point to our vertex buffer
			gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
			gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);

			// Draw the vertices as triangle strip
			gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
			
			//y += inc;
		}
		
		gl.glDisable(GL10.GL_BLEND);
		gl.glDepthMask(true);
		
		gl.glDisable(GL10.GL_TEXTURE_2D);

		// Disable the client state before leaving
		gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
		gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
	}
	
	public void loadGLTextures(GL10 gl) {
		Bitmap bitmap = null;
		gl.glGenTextures(textures.length, textures, 0);
		
		for (int i = 0; i < textures.length; i++) {
			// generate one texture pointer
			
			// ...and bind it to our array
			gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[i]);
			
			// create nearest filtered texture
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
			
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
			gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
			
			// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
			bitmap = bitmaps.get(i);
			GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
			gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
		}
		
		// Clean up
		//bitmap.recycle();
	}

	public int[] getTextures() {
		return textures;
	}
	
}

Any help will be very welcome, I’m desperate and I have no previous experience with OpenGL =/

what about your Activity and your GLSurfaceView inherited classes. Showing & describing their arrangement may help.

ViewerActivity


package br.furb.rma.activities;

import java.io.File;
import java.util.ArrayList;
import java.util.List;

import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.TextView;
import br.furb.rma.R;
import br.furb.rma.models.Camera;
import br.furb.rma.models.Dicom;
import br.furb.rma.models.DicomImage;
import br.furb.rma.reader.DicomReader;
import br.furb.rma.view.Square;

public class ViewerActivity extends Activity {

	private final static int FLAT_VIEWER = 0;
	
	private GLSurfaceView surfaceView;
	private Button btn2D;
	private TextView tvAngle;
	private SeekBar seekBar;
	
	private float angle = 120;
	private float radius = 3;
	
	private Camera camera;
	
	private Dicom dicom;
	private ViewerRenderer renderer;
	
	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.viewer_activity);
		
		camera = new Camera();
		camera.setEyeX(retornaX(angle, radius));
		camera.setEyeZ(retornaZ(angle, radius));
		
		//String dirName = getIntent().getExtras().getString("dir") + "/DICOMDIR";
		String dirName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/joelho_dalton/DICOMDIR";
		//String dirName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/joelho_jonny/dicomdir";
		final DicomReader reader = new DicomReader(new File(dirName));
		
//		reader.setListener(new DicomReaderListener() {
//			
//			@Override
//			public void onChange(String status) {
//				Message msg = new Message();
//				msg.what = 1;
//				msg.obj = status;
//				handler.sendMessage(msg);
//			}
//		});
		
		try {
			dicom = reader.maxImages(15).read();
		} catch(Exception e) {
			e.printStackTrace();
		}
		
		btn2D = (Button) findViewById(R.viewer.btn_2d);
		btn2D.setOnClickListener(new View.OnClickListener() {
			
			@Override
			public void onClick(View v) {
				flatViewerClick(v);
			}
		});
		
		tvAngle = (TextView) findViewById(R.viewer.angle);
		tvAngle.setText(angle + "º");
		
		seekBar = (SeekBar) findViewById(R.viewer.seekbar);
		seekBar.setMax(360);
		seekBar.setProgress((int) angle);
		seekBar.setOnSeekBarChangeListener(seekBarListener);
		
		surfaceView = (GLSurfaceView) findViewById(R.viewer.gl_surface_view);
		surfaceView.setZOrderOnTop(true);
		surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
		surfaceView.getHolder().setFormat(PixelFormat.RGB_888);
		
		List<Bitmap> bitmaps = new ArrayList<Bitmap>();
		for (DicomImage image : dicom.getImages()) {
			bitmaps.add(image.getBitmap());
		}
		
		Square square = new Square(bitmaps);
		
		renderer = new ViewerRenderer(square, camera);
		surfaceView.setRenderer(renderer);
	}

	public void flatViewerClick(View view) {
		Intent intent = new Intent(this, FlatViewerActivity.class);
		startActivityForResult(intent, FLAT_VIEWER);
	}
	
	public float retornaX(float angle, float radius) {
		return (float) (radius * Math.cos(Math.PI * angle / 180.0));
	}

	public float retornaZ(float angle, float radius) {
		return (float) (radius * Math.sin(Math.PI * angle / 180.0));
	}
	
	private SeekBar.OnSeekBarChangeListener seekBarListener = new SeekBar.OnSeekBarChangeListener() {
		
		@Override
		public void onStopTrackingTouch(SeekBar seekBar) {
			
		}
		
		@Override
		public void onStartTrackingTouch(SeekBar seekBar) {
			
		}
		
		@Override
		public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
			Camera camera = renderer.getCamera();
			angle = progress;
			
			camera.setEyeX(retornaX(angle, radius));
			camera.setEyeZ(retornaZ(angle, radius));
			renderer.setCamera(camera);
			tvAngle.setText(angle + "º");
		}
	};
	
	protected void onActivityResult(int requestCode, int resultCode, Intent data) {
		if(requestCode == FLAT_VIEWER) {
			btn2D.setEnabled(true);
		}
	};

}

GLSurfaceView is an Android component defined in my layout xml file.