At present point I have built a map of road center-lines and property boundaries. Which you can zoom and pan around in. There are also 6 points of interest (drawn as green points in the image below). This possibly could hold up to 6000 points.
[ATTACH=CONFIG]491[/ATTACH] [ATTACH=CONFIG]492[/ATTACH]
What I am trying to accomplish is, to change the points to circular textures in a square. I would like the positions to move with the zoom, however the scaling acts differently. I do not want the squared textures to scale at the same rate as the rest of drawing.
In fact, what I was thinking was to set the image to something like 5pt at 54000m and above, and scale slowly to 25pt until it gets to 10000m, anything below that remains at 25pt. Just using pt as an example unit.
[ATTACH=CONFIG]493[/ATTACH] [ATTACH=CONFIG]494[/ATTACH]
I already have the center of each point loaded into a floatbuffer that is how I drew the initial points.
I add a square into the constructor, with position data and texture coord data, color is already in the texture png.
I have started to create the vertex and fragment shaders in getPointVertexShader() and getPointFragmentShader(), however I’m am not sure on how to finish them off, so I could use a little help there.
The other pieces that are confusing me are setting position, scale of the squares, and piece it all together in the draw.
Below is the renderer code so far. Any help will be greatly appreciated!
public class vboCustomGLRenderer implements GLSurfaceView.Renderer {
private final Context mActivityContext;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in model position information. */
private int mLinePositionHandle;
/** This will be used to pass in model color information. */
private int mLineColorUniformLocation;
/** This will be used to pass in model position information. */
private int mPointPositionHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** Offset of the position data. */
private final int mPositionOffset = 0;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** How many elements per vertex for double values. */
private final int mPositionFloatStrideBytes = mPositionDataSize * mBytesPerFloat;
/** This is a handle to our per-vertex line shading program. */
private int mPerVertexLinesProgramHandle;
/** This is a handle to our points program. */
private int mPointsProgramHandle;
/** Store our model data in a float buffer. */
private final FloatBuffer mSquarePositions;
private final FloatBuffer mSquareTextureCoordinates;
/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;
/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;
/** This will be used to pass in the texture. */
private int mTextureUniformHandle;
/** This is a handle to our texture data. */
private int mTextureDataHandle;
public double eyeX = 0;
public double eyeY = 0;
public float eyeZ = 1.5f;
// We are looking toward the distance
public double lookX = eyeX;
public double lookY = eyeY;
public float lookZ = 0.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
public float upX = 0.0f;
public float upY = 1.0f;
public float upZ = 0.0f;
public double modelOffsetX = -(default_settings.mbrMinX + ((default_settings.mbrMaxX - default_settings.mbrMinX)/2));
public double modelOffsetY = -(default_settings.mbrMinY + ((default_settings.mbrMaxY - default_settings.mbrMinY)/2));
public double mScaleFactor = 1;
public double modelXShift = 0;
public double modelYShift = 0;
public double viewXShift = 0;
public double viewYShift = 0;
/**
* Initialize the model data.
*/
public vboCustomGLRenderer(final Context activityContext) {
mActivityContext = activityContext;
// X, Y, Z
final float[] squarePositionData =
{
// In OpenGL counter-clockwise winding is default. This means that when we look at a triangle,
// if the points are counter-clockwise we are looking at the "front". If not we are looking at
// the back. OpenGL has an optimization where all back-facing triangles are culled, since they
// usually represent the backside of an object and aren't visible anyways.
// Front face
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f
};
// S, T (or X, Y)
// Texture coordinate data.
// Because images have a Y axis pointing downward (values increase as you move down the image) while
// OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
// What's more is that the texture coordinates are the same for every face.
final float[] squareTextureCoordinateData =
{
// Front face
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
// Initialize the buffers.
mSquarePositions = ByteBuffer.allocateDirect(squarePositionData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSquarePositions.put(squarePositionData).position(0);
mSquareTextureCoordinates = ByteBuffer.allocateDirect(squareTextureCoordinateData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSquareTextureCoordinates.put(squareTextureCoordinateData).position(0);
}
boolean loadComplete = false;
public void setDraw(boolean loadComplete){
this.loadComplete = loadComplete;
}
public void setEye(double x, double y){
eyeX -= (x / screen_vs_map_horz_ratio);
lookX = eyeX;
eyeY += (y / screen_vs_map_vert_ratio);
lookY = eyeY;
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, (float)eyeX, (float)eyeY, eyeZ, (float)lookX, (float)lookY, lookZ, upX, upY, upZ);
}
public void setScaleFactor(float scaleFactor, float gdx, float gdy){
// Don't let the object get too small or too large.
//mScaleFactor = Math.max(0.1f, Math.min(mScaleFactor, 10000.0f));
mScaleFactor *= scaleFactor;
mRight = mRight / scaleFactor;
mLeft = -mRight;
mTop = mTop / scaleFactor;
mBottom = -mTop;
//The eye shift is in pixels will get converted to screen ratio when sent to setEye().
double eyeXShift = (((mWidth / 2) - gdx) - (((mWidth / 2) - gdx) / scaleFactor));
double eyeYShift = (((mHeight / 2) - gdy) - (((mHeight / 2) - gdy) / scaleFactor));
screen_vs_map_horz_ratio = (mWidth/(mRight-mLeft));
screen_vs_map_vert_ratio = (mHeight/(mTop-mBottom));
eyeX -= (eyeXShift / screen_vs_map_horz_ratio);
lookX = eyeX;
eyeY += (eyeYShift / screen_vs_map_vert_ratio);
lookY = eyeY;
Matrix.frustumM(mProjectionMatrix, 0, (float)mLeft, (float)mRight, (float)mBottom, (float)mTop, near, far);
}
protected String getLineVertexShader()
{
// TO DO: Explain why we normalize the vectors, explain some of the vector math behind it all. Explain what is eye space.
final String lineVertexShader =
"uniform mat4 u_MVPMatrix;
" // A constant representing the combined model/view/projection matrix.
+ "attribute vec4 a_Position;
" // Per-vertex position information we will pass in.
+ "attribute vec4 a_Color;
" // Per-vertex color information we will pass in.
+ "varying vec4 v_Color;
" // This will be passed into the fragment shader.
+ "void main()
" // The entry point for our vertex shader.
+ "{
"
+ " v_Color = a_Color;
" // Pass the color through to the fragment shader.
// It will be interpolated across the triangle.
+ " gl_Position = u_MVPMatrix
" // gl_Position is a special variable used to store the final position.
+ " * a_Position;
" // Multiply the vertex by the matrix to get the final point in
+ " gl_PointSize = 5.0;
"
+ "}
"; // normalized screen coordinates.
return lineVertexShader;
}
protected String getLineFragmentShader()
{
final String lineFragmentShader =
"precision mediump float;
" // Set the default precision to medium. We don't need as high of a
+ "uniform vec4 u_Color;
" // This is the color from the vertex shader interpolated across the
// triangle per fragment.
+ "void main()
" // The entry point for our fragment shader.
+ "{
"
+ " gl_FragColor = u_Color;
" // Pass the color directly through the pipeline.
+ "}
";
return lineFragmentShader;
}
protected String getPointVertexShader()
{
// Define a simple shader program for our points.
final String pointVertexShader =
"uniform mat4 u_MVPMatrix;
"
+ "attribute vec4 a_Position;
"
+ "attribute vec2 a_TexCoordinate;
" // Per-vertex texture coordinate information we will pass in.
+ "varying vec2 v_TexCoordinate;
" // This will be passed into the fragment shader.
+ "void main()
"
+ "{
"
+ " v_TexCoordinate = a_TexCoordinate;
" // Pass through the texture coordinate.
+ " gl_Position = u_MVPMatrix * a_Position;
" // gl_Position is a special variable used to store the final position.
+ "}
";
return pointVertexShader;
}
protected String getPointFragmentShader()
{
final String pointFragmentShader =
"precision mediump float;
" // Set the default precision to medium. We don't need as high of a precision in the fragment shader.
+ "uniform sampler2D u_Texture;
" // The input texture.
+ "varying vec2 v_TexCoordinate;
" // Interpolated texture coordinate per fragment.
+ "void main()
" // The entry point for our fragment shader.
+ "{
"
+ " gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));
" // Pass the color directly through the pipeline.
+ "}
";
return pointFragmentShader;
}
/**
* Helper function to compile a shader.
*
* @param shaderType The shader type.
* @param shaderSource The shader source code.
* @return An OpenGL handle to the shader.
*/
private int compileShader(String shader, final int shaderType, final String shaderSource)
{
int shaderHandle = GLES20.glCreateShader(shaderType);
if (shaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(shaderHandle, shaderSource);
// Compile the shader.
GLES20.glCompileShader(shaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
Log.e("vboCustomGLRenderer: compileShader", "Error compiling shader: " + shader + " " + GLES20.glGetShaderInfoLog(shaderHandle));
GLES20.glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
if (shaderHandle == 0)
{
throw new RuntimeException("Error creating shader." );
}
return shaderHandle;
}
/**
* Helper function to compile and link a program.
*
* @param vertexShaderHandle An OpenGL handle to an already-compiled vertex shader.
* @param fragmentShaderHandle An OpenGL handle to an already-compiled fragment shader.
* @param attributes Attributes that need to be bound to the program.
* @return An OpenGL handle to the program.
*/
private int createAndLinkProgram(final int vertexShaderHandle, final int fragmentShaderHandle, final String[] attributes)
{
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
if (attributes != null)
{
final int size = attributes.length;
for (int i = 0; i < size; i++)
{
GLES20.glBindAttribLocation(programHandle, i, attributes[i]);
}
}
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0)
{
Log.e("vboCustomGLRenderer: createAndLinkProgram", "Error compiling program: " + GLES20.glGetProgramInfoLog(programHandle));
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
return programHandle;
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
//White
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, (float)eyeX, (float)eyeY, eyeZ, (float)lookX, (float)lookY, lookZ, upX, upY, upZ);
final String lineVertexShader = getLineVertexShader();
final String lineFragmentShader = getLineFragmentShader();
final int lineVertexShaderHandle = compileShader("lineVertexShader", GLES20.GL_VERTEX_SHADER, lineVertexShader);
final int lineFragmentShaderHandle = compileShader("lineFragmentShader", GLES20.GL_FRAGMENT_SHADER, lineFragmentShader);
mPerVertexLinesProgramHandle = createAndLinkProgram(lineVertexShaderHandle, lineFragmentShaderHandle, new String[] {"a_Position", "a_Color"});
mMVPMatrixHandle = GLES20.glGetUniformLocation(mPerVertexLinesProgramHandle, "u_MVPMatrix");
mLinePositionHandle = GLES20.glGetAttribLocation(mPerVertexLinesProgramHandle, "a_Position");
mLineColorUniformLocation = GLES20.glGetUniformLocation(mPerVertexLinesProgramHandle, "u_Color");
GLES20.glUseProgram(mPerVertexLinesProgramHandle);
final String pointsVertexShader = getPointVertexShader();
final String pointsFragmentShader = getPointFragmentShader();
final int pointVertexShaderHandle = compileShader("pointsVertexShader", GLES20.GL_VERTEX_SHADER, pointsVertexShader);
final int pointFragmentShaderHandle = compileShader("pointsFragmentShader", GLES20.GL_FRAGMENT_SHADER, pointsFragmentShader);
mPointsProgramHandle = createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle, new String[] {"a_Position", "a_TexCoordinate"});
// Load the texture
mTextureDataHandle = loadTexture(mActivityContext, com.ANDRRA1.R.drawable.andrra_point);
//mPointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointsProgramHandle, "u_MVPMatrix");
mTextureUniformHandle = GLES20.glGetUniformLocation(mPointsProgramHandle, "u_Texture");
mPointPositionHandle = GLES20.glGetAttribLocation(mPointsProgramHandle, "a_Position");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mPointsProgramHandle, "a_TexCoordinate");
}
static double mWidth = 0;
static double mHeight = 0;
static double mLeft = 0;
static double mRight = 0;
static double mTop = 0;
static double mBottom = 0;
double mRatio = 0;
double screen_width_height_ratio;
double screen_height_width_ratio;
final float near = 1.5f;
final float far = 10.0f;
double screen_vs_map_horz_ratio = 0;
double screen_vs_map_vert_ratio = 0;
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
//Log.d("","onSurfaceChanged");
screen_width_height_ratio = (double) width / height;
screen_height_width_ratio = (double) height / width;
//Initialize
if (mRatio == 0){
mWidth = (double) width;
mHeight = (double) height;
//map height to width ratio
double map_extents_width = default_settings.mbrMaxX - default_settings.mbrMinX;
double map_extents_height = default_settings.mbrMaxY - default_settings.mbrMinY;
double map_width_height_ratio = map_extents_width/map_extents_height;
//float map_height_width_ratio = map_extents_height/map_extents_width;
if (screen_width_height_ratio > map_width_height_ratio){
mRight = (screen_width_height_ratio * map_extents_height)/2;
mLeft = -mRight;
mTop = map_extents_height/2;
mBottom = -mTop;
}
else{
mRight = map_extents_width/2;
mLeft = -mRight;
mTop = (screen_height_width_ratio * map_extents_width)/2;
mBottom = -mTop;
}
mRatio = screen_width_height_ratio;
}
if (screen_width_height_ratio != mRatio){
final double wRatio = width/mWidth;
final double oldWidth = mRight - mLeft;
final double newWidth = wRatio * oldWidth;
final double widthDiff = (newWidth - oldWidth)/2;
mLeft = mLeft - widthDiff;
mRight = mRight + widthDiff;
final double hRatio = height/mHeight;
final double oldHeight = mTop - mBottom;
final double newHeight = hRatio * oldHeight;
final double heightDiff = (newHeight - oldHeight)/2;
mBottom = mBottom - heightDiff;
mTop = mTop + heightDiff;
mWidth = (double) width;
mHeight = (double) height;
mRatio = screen_width_height_ratio;
}
screen_vs_map_horz_ratio = (mWidth/(mRight-mLeft));
screen_vs_map_vert_ratio = (mHeight/(mTop-mBottom));
Matrix.frustumM(mProjectionMatrix, 0, (float)mLeft, (float)mRight, (float)mBottom, (float)mTop, near, far);
}
ListIterator<mapLayer> orgNonAssetCatLayersList_it;
ListIterator<FloatBuffer> mapLayerObjectList_it;
ListIterator<Byte> mapLayerObjectTypeList_it;
mapLayer MapLayer;
@Override
public void onDrawFrame(GL10 unused) {
//Log.d("","onDrawFrame");
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
if (loadComplete){
drawPreset();
orgNonAssetCatLayersList_it = default_settings.orgNonAssetCatMappableLayers.listIterator();
while (orgNonAssetCatLayersList_it.hasNext()) {
MapLayer = orgNonAssetCatLayersList_it.next();
if (MapLayer.BatchedPointVBO != null){
}
if (MapLayer.BatchedLineVBO != null){
drawLineString(MapLayer.BatchedLineVBO, MapLayer.lineStringObjColor);
}
if (MapLayer.BatchedPolygonVBO != null){
drawPolygon(MapLayer.BatchedPolygonVBO, MapLayer.polygonObjColor);
}
}
MapLayer = default_settings.orgAssetCatNDRRALayer;
if (MapLayer.BatchedPointVBO != null){
drawTexturedPoint(MapLayer.BatchedPointVBO);
}
if (MapLayer.BatchedLineVBO != null){
}
if (MapLayer.BatchedPolygonVBO != null){
}
}
}
private void drawPreset()
{
Matrix.setIdentityM(mModelMatrix, 0);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
}
/**
* Draws a textured square, representing a point.
*/
private void drawTexturedPoint(final FloatBuffer geometryBuffer)
{
// Pass in the position information
mSquarePositions.position(0);
GLES20.glVertexAttribPointer(mPointPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false, 0, mSquarePositions);
GLES20.glEnableVertexAttribArray(mPointPositionHandle);
// Pass in the texture coordinate information
mSquareTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mSquareTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
}
private void drawLineString(final FloatBuffer geometryBuffer, final float[] colorArray)
{
// Pass in the position information
geometryBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mLinePositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false, mPositionFloatStrideBytes, geometryBuffer);
GLES20.glEnableVertexAttribArray(mLinePositionHandle);
GLES20.glUniform4f(mLineColorUniformLocation, colorArray[0], colorArray[1], colorArray[2], 1f);
GLES20.glLineWidth(1.0f);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, geometryBuffer.capacity()/mPositionDataSize);
}
private void drawPolygon(final FloatBuffer geometryBuffer, final float[] colorArray)
{
// Pass in the position information
geometryBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mLinePositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false, mPositionFloatStrideBytes, geometryBuffer);
GLES20.glEnableVertexAttribArray(mLinePositionHandle);
GLES20.glUniform4f(mLineColorUniformLocation, colorArray[0], colorArray[1], colorArray[2], 1f);
GLES20.glLineWidth(1.0f);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, geometryBuffer.capacity()/mPositionDataSize);
}
}