Since you are using GLUT, you can use glutGet(GLUT_ELAPSED_TIME) to measure time in a cross-platform way. One thing to be aware of when using any timer function, they are not guaranteed precise beyond 10-100 millisecond. You can get around that by averaging multiple frames over a second (10xprecision of timer).
If you just want to control the speed of the display loop you can simply put a blocking condition in the display loop. I have attached a piece of minimal code that measures FPS with averaging and a blocking condition in the display loop to throttle the speed of the display (to turn on the throttling loop change in the idle() function “#undef REGULATE_FPS” to “#define REGULATE_FPS”). When REGULATE_FPS is undef’ed then rendering loop will go as fast as hardware allows and FPS will be >>60 (assuming vsync off on my machine I measure FPS ~8000) and when REGULATE_FPS is define’ed then the FPS will be fixed to 1 frame per 35milliseconds = ~29FPS which FPS() will measure. Note that it takes approximately 1 second after startup for the FPS() to accurately begin displaying the measured frames per second in the printf call.
ps for advanced users: if you want to go non-cross platform to avoid having an imprecise clock measure, please try to avoid QueryPerformanceCounter in windows since the Performance counter value may unexpectedly leap forward. One approach I have found is to use the CPU RDSTC assembly op-code on Intel/AMD processors – see the wiki at Time Stamp Counter and for working code find cycle.h at the bottom. I like RDTSC opcode since it works in linux as well as windows. But note, in my applications I have been perfectly happy to live with averaging over multiple frames to deal with the imprecise clock functions. end of comment for advanced users.
//this shows one way to get FPS using an inprecise clock
//or to throttle the drawing rate to a fixed FPS
//g++ glut_fps_demo.cpp -lGL -lglut
#include <cstdlib>
#include <iostream>
//linux openGL headers
#include <GL/gl.h>
#include <GL/glut.h>
GLint gFramesPerSecond = 0;
void FPS(void) {
static GLint Frames = 0; // frames averaged over 1000mS
static GLuint Clock; // [milliSeconds]
static GLuint PreviousClock = 0; // [milliSeconds]
static GLuint NextClock = 0; // [milliSeconds]
++Frames;
Clock = glutGet(GLUT_ELAPSED_TIME); //has limited resolution, so average over 1000mS
if ( Clock < NextClock ) return;
gFramesPerSecond = Frames/1; // store the averaged number of frames per second
PreviousClock = Clock;
NextClock = Clock+1000; // 1000mS=1S in the future
Frames=0;
}
void idle() {
#define REGULATE_FPS
#ifdef REGULATE_FPS
static GLuint PreviousClock=glutGet(GLUT_ELAPSED_TIME);
static GLuint Clock=glutGet(GLUT_ELAPSED_TIME);
static GLfloat deltaT;
Clock = glutGet(GLUT_ELAPSED_TIME);
deltaT=Clock-PreviousClock;
if (deltaT < 35) {return;} else {PreviousClock=Clock;}
#endif
//put your specific idle code here
//... this code will run at FPS
printf(".");
//end your specific idle code here
FPS(); //only call once per frame loop
glutPostRedisplay();
}
void display() {
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
// Set the drawing color (RGB: WHITE)
printf("FPS %d
",gFramesPerSecond);
glColor3f(1.0,1.0,1.0);
glBegin(GL_LINE_STRIP); {
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glVertex3f(0.25,0.75,0.0);
glVertex3f(0.25,0.25,0.0);
}
glEnd();
glutSwapBuffers();
}
void init() {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0,1.0,0.0,1.0,-1.0,1.0);
}
void keyboard(unsigned char key, int x, int y)
{
switch (key) {
case 27: // escape key
exit(0);
break;
default:
break;
}
}
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode (GLUT_DOUBLE | GLUT_RGB);
glutCreateWindow("FPS test");
glutIdleFunc(idle);
glutDisplayFunc(display);
glutKeyboardFunc(keyboard);
init();
glutMainLoop();
return 0;
}