was curious if there anyway to keep a program from leaking memory while minimized.
i have tried with sfml and sdl and both do the same thing.
sample program:
#include <SFML/Graphics.hpp>
#include <GL/gl.h>
void InitOpenGL(){
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glViewport(0, 0, 640, 480);
glEnable(GL_TEXTURE_2D);
glDisable(GL_DEPTH_TEST);
glEnable(GL_BLEND);
glEnable(GL_ALPHA_TEST) ;
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho(0.0f, 640.0f, 480.0f, 0.0f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void DrawImage(GLuint texture, GLfloat x, GLfloat y, GLfloat w, GLfloat h){
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glVertex2f(x, y);
glTexCoord2f(1.0f, 0.0f);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glVertex2f(x+w, y);
glTexCoord2f(1.0f, 1.0f);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glVertex2f(x+w, y+h);
glTexCoord2f(0.0f, 1.0f);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glVertex2f(x, y+h);
glEnd();
}
GLuint CreateTexture(std::string image_path){
sf::Image temp_image;
temp_image.LoadFromFile(image_path);
GLuint temp_texture;
glGenTextures(1, &temp_texture);
glBindTexture(GL_TEXTURE_2D, temp_texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, temp_image.GetWidth(), temp_image.GetHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, temp_image.GetPixelsPtr());
return temp_texture;
}
int main (){
sf::RenderWindow app;
bool is_running = true;
sf::Event event;
app.Create(sf::VideoMode(640, 480, 32), "Test App");
InitOpenGL();
GLuint test1, test2;
test1 = CreateTexture("test1.png");
test2 = CreateTexture("test2.png");
while (is_running){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
app.GetEvent(event);
if (event.Type == sf::Event::Closed)
is_running = false;
DrawImage(test1, 0, 0, 64, 64);
DrawImage(test2, 320, 0, 64, 64);
app.Display();
}
return 0;
}
the program doesnt seem to have any problem if only drawing one image but when draw two or more the memory usage of the program increase by about 100KB about every second but ONLY WHEN MINIMIZED TO TASKBAR :-/ its fine when window viewable…
the specs on machine im building on are…
Renderer: Intel 845G
Vendor: Intel
Memory: 64 MB
Version: 1.3.0 - Build 4.14.10.4342
Shading language version: N/A
taken from a program called opengl viewer
i know this is an old machine…
would i just have to use a different api for window management and such?
would using windows api even matter or anything at all?