Texture Projection Issues

I’m developing an extension to the World Wind Java open source project involving projective texture mapping. World Wind is similar to Google Earth and other 3-D geographic rendering applications, and uses JOGL… You can find more information here: World Wind Central

I’m having a problem that suggests I might not be understanding how coordinate systems work in either OpenGL or WW or both. I’m hoping someone here can look at what I’m doing and point out what’s wrong.

My goal is to place an image on the globe using projective texture mapping. My current code constructs a quad on the globe surface and projects a sample texture onto that so I can see what’s happening. The problem is that no matter how I set up the texture matrix, I’m not getting the correct result. I’ve attached part of the code from my rendering method to illustrate what I’m doing. In this code, I use the modelview and projection matrices that WW calculates for its own display, using dc.getView() to get them. Using this approach, my geometry comes out fine. No problems there. But the texture is another matter.

As I understand it, WW uses a model coordinate system with its origin at the center of the globe, the Y-axis through the north pole, and Z-axis through lat/lon = (0,0). Units are in meters.

I’ve noticed that if I place the center point of the quad I’m rendering at lat/lon = (0,0) I can see the texture properly centered on my geometry. However, the texture is projected as if the camera/projector is located at the center of the globe, pointing back toward the quad, instead of above the center of the quad and pointing into the globe. I know this because the texture is much too large, and if I do the math to work out where the camera is, I get the globe’s center. This also results in the texture being flipped because the camera’s y and z axes are opposite what they are in the globe’s cartesian coordinate system.

If I place the quad elsewhere on the globe’s surface, the situation gets worse as it seems like no matter what I do the camera acts like it’s pointed down the globe’s z-axis!

In the code, most everything is self-explanatory. All coordinates are in the globe’s cartesian system, and I obtain them using dc.getGlobe().computePointFromPosition() which converts lat/lon to cartesian coordinates. The camera position (cameraX, cameraY, cameraX) is 200000 meters above the center point of the quad and pointing down.

I know folks here aren’t necessarily WW savvy, but I’d appreciate it if you’d look at my OpenGL calls and make any suggestions as to what I might be doing wrong here. Thanks!

try
{
	double[] matrixArray = new double[16];

	// Define the modelview transformation for rendering
	gl.glMatrixMode(GL.GL_MODELVIEW);
	gl.glPushMatrix();
	gl.glLoadIdentity();
	Matrix modelview = dc.getView().getModelviewMatrix();
                
	// Load the modelview matrix
      	modelview.toArray(matrixArray, 0, false);
      	gl.glLoadMatrixd(matrixArray, 0);

      	// Set up the texture transformation
	gl.glMatrixMode(GL.GL_TEXTURE);
      	gl.glActiveTexture(GL.GL_TEXTURE0);
	gl.glPushMatrix();
      	gl.glLoadIdentity();
	if (this.bind(dc))
	{
      		// Set parameters telling OpenGL how to handle the texture
		GL gl = dc.getGL();
      	  	gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
		gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
		gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
      	  	gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);

		float[] sCoeffArray = {1f, 0f, 0f, 0f};
		float[] tCoeffArray = {0f, 1f, 0f, 0f};
		gl.glTexGenfv(GL.GL_S, GL.GL_EYE_PLANE, sCoeffArray, 0);
		gl.glTexGenfv(GL.GL_T, GL.GL_EYE_PLANE, tCoeffArray, 0);
		gl.glEnable(GL.GL_TEXTURE_GEN_S);
		gl.glEnable(GL.GL_TEXTURE_GEN_T);
		gl.glEnable(GL.GL_TEXTURE_2D);
                	
		double hScale = .5; 
		double vScale = .5;
		double hShift = .5;
		double vShift = .5;
                    
      	    	// Translate and scale to map NDC coordinates into texture coordinates (s,t).
		gl.glTranslated(hShift, vShift, 0.0);
		gl.glScaled(hScale, vScale, 1.0);
    
      	    	// Projective transform to map from eye coordinates to NDC coordinates 
		glu.gluPerspective(45.0, 1.0, 100000, 300000);

		// Modelview transform to place the coordinate system at the eye position
      	    	// and with the negative z axis through the model origin.
		glu.gluLookAt(cameraX, cameraY, cameraZ, 
			centerX, centerY, centerZ, 
			0, 1, 0);
	}
            
	// Now render the geometry
	gl.glMatrixMode(GL.GL_PROJECTION);
	gl.glPushMatrix();
	gl.glLoadIdentity();
	Matrix projection = dc.getView().getProjectionMatrix();
	projection.toArray(matrixArray, 0, false);
	gl.glLoadMatrixd(matrixArray, 0);
                
	// Plane tanget to the surface
	gl.glMatrixMode(GL.GL_MODELVIEW);
	gl.glActiveTexture(GL.GL_TEXTURE0);
	gl.glEnableClientState(GL.GL_VERTEX_ARRAY);
	gl.glVertexPointer(3, GL.GL_DOUBLE, 0, verts.rewind());
	gl.glDrawArrays(GL.GL_QUADS, 0, 4);

	// Line from camera to plane center point
	gl.glLineWidth(2.0f);
	gl.glColor3f(1f, 1f, 1f);
	gl.glBegin(GL.GL_LINES);
	gl.glVertex3d(cameraX, cameraY, cameraZ); // origin of the line
	gl.glVertex3d(centerX, centerY, centerZ); // ending point of the line
	gl.glEnd();
}
finally
{
    	gl.glDisable(GL.GL_TEXTURE_GEN_S);
       	gl.glDisable(GL.GL_TEXTURE_GEN_T);
	gl.glMatrixMode(GL.GL_TEXTURE);
	gl.glPopMatrix();
	gl.glMatrixMode(GL.GL_PROJECTION);
	gl.glPopMatrix();
	gl.glMatrixMode(GL.GL_MODELVIEW);
	gl.glPopMatrix();
	gl.glFlush();
}