PDA

View Full Version : GL_POINTS test, help me understand this.



pradyumna
11-13-2013, 01:18 AM
I am on OS X
Using Xcode

Problem : I can see 3 points on the screen even if there are only 2 vertices in the array.

math_3d.h

struct Vector3f {
float x;
float y;
float z;

Vector3f() {
}

Vector3f(float _x, float _y, float _z) {
x = _x;
y = _y;
z = _z;
}
};


main.cpp


#include <iostream>
#include <glew.h>
#include <OpenGL/gl.h>
#include <OpenGL/glu.h>
#include <GLUT/glut.h>
#include "math_3d.h"

GLuint VBO;

static Vector3f Vertices[2];

static void RenderSceneCB()
{
glClear(GL_COLOR_BUFFER_BIT);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);

glDrawArrays(GL_POINTS, 0, sizeof(Vertices));

glDisableVertexAttribArray(0);
glutSwapBuffers();
}

static void InitializeGlutCallbacks()
{
glutDisplayFunc(RenderSceneCB);
}

static void CreateVertexBuffer()
{

Vertices[0] = Vector3f(0.4f, 0.4f, 0.0f);
Vertices[1] = Vector3f(-0.5f, -0.5f, 0.0f);

glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
}

int main(int argc, char * argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA);
glutInitWindowSize(1024, 768);
glutInitWindowPosition(100, 100);
glutCreateWindow("Points");

InitializeGlutCallbacks();
GLenum glewRes = glewInit();
if (glewRes != GLEW_OK) {
fprintf(stderr, "Error: '%s'\n", glewGetErrorString(glewRes));
return 1;
}

glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
CreateVertexBuffer();
glutMainLoop();

return 0;
}

Dark Photon
11-13-2013, 05:14 AM
sizeof() returns a size in bytes. glDrawArrays OTOH takes the size in indices (vertices in this case) as the 3rd argument.

I believe you want to use ARRAY_SIZE( Vertices ) instead, where ARRAY_SIZE is defined by:


#define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0]))


This will evaluate to 2 in your case, instead of the 24 you're getting now.