Copy texture pixels to another texture.

I’m using OpenGL with python but I know it works pretty much the same way as with other languages. I am trying to get my game which was previously working for pygame to work with OpenGL. I need to draw textures onto other textures to do the same thing s the pygame blit method. So far I have:

	def blit(self,surface,offset,area=None, special_flags = 0):
		#Create textures if not done already
		if self.texture == None:
			create_texture(self)
		if surface.texture == None:
			create_texture(surface)
		#Render child to parent
		

The texture variable for the classes contains the texture ids. Given the offset from the parent (self) how can I draw the child texture (surface) to it? And I need the process to be fast. The create_texture function simply creates the texture with any image or colour, ready to be placed onto other textures or for other textures to be placed on it.

I’ve looked at pixel buffers but I don’t know know to use them.

Thank you.

I have this on the end now:

#Render child to parent
		frame_buffer = glGenFramebuffersEXT(1)
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, frame_buffer)
		render_buffer = glGenRenderbuffersEXT(1)
		glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, render_buffer);
		glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT,self.surface_size[0],self.surface_size[1])
		glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, render_buffer)
		glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, self.texture, 0)
		print glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT)
		glPushAttrib(GL_VIEWPORT_BIT)
		glViewport(0,0,self.surface_size[0],self.surface_size[1])
		glMatrixMode(GL_MODELVIEW)
		glLoadIdentity() #Loads model matrix
		glBindTexture(GL_TEXTURE_2D, surface.texture)
		glBegin(GL_QUADS)
		glVertex2i(0, 0) #Top Left
		glVertex2i(0,surface.surface_size[1]) #Bottom Left
		glVertex2i(*surface.surface_size) #Bottom, Right
		glVertex2i(surface.surface_size[0], 0) #Top, Right
		glEnd()
		glPopAttrib()
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0)
		glDeleteFramebuffersEXT(1, frame_buffer)
		glDeleteRenderbuffersEXT(1,render_buffer)

I get and “invalid framebuffer operation” error.

glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT) outputs 36054

No idea what that means. I get nothing through google. I’m clueless. I knew this wouldn’t work but at east I gave it my best shot.

If anyone can fix this I will be most happy.

You’re so close. Why would you just give up?

First of all, 36054 == 0x8cd6 == GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. See:

“Not all framebuffer attachment points are framebuffer attachment complete. This means that at least one attachment point with a renderbuffer or texture attached has its attached object no longer in existence or has an attached image with a width or height of zero, or the color attachment point has a non-color-renderable image attached, or the depth attachment point has a non-depth-renderable image attached, or the stencil attachment point has a non-stencil-renderable image attached.”

Perhaps you haven’t allocated the texels for the texture you’re trying to render to? Use glTexImage2D and allocate the base map.

Ah, thank you. So you can’t just use an empty texture. I understand now. I’ll try to to make some changes and come back with the results.

I need to fill the texture with a blank colour. I haven’t done that yet. I’ll see what I can find.

This doesn’t go well. unistr doesn’t like it.

def create_texture(surface):
	surface.texture =  glGenTextures(1)
	glMatrixMode(GL_MODELVIEW)
	glLoadIdentity() #Loads model matrix
	glBindTexture(GL_TEXTURE_2D, surface.texture) #Binds the current 2D texture to the texture to be drawn
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR) #Required to be set for maping the pixel data
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR) #Similar as above
	if surface.data == None:
		print (surface.colour[0] << 24) |  (surface.colour[1] << 16) | (surface.colour[2] << 8) | (surface.colour[3])
		data = unichr((surface.colour[0] << 24) |  (surface.colour[1] << 16) | (surface.colour[2] << 8) | (surface.colour[3])) * (surface.surface_size[0] * surface.surface_size[1])
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, surface.surface_size[0], surface.surface_size[1], 0, GL_RGBA,GL_UNSIGNED_BYTE, surface.data) #Put surface pixel data into texture

I have a new error:

    glDeleteFramebuffersEXT(1, frame_buffer)
  File "/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/site-packages/OpenGL/lazywrapper.py", line 19, in __call__
    return wrapper( baseFunction, *args, **named )
  File "/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/site-packages/OpenGL/GL/EXT/framebuffer_object.py", line 32, in glDeleteFramebuffersEXT
    return baseOperation( n, framebuffers )
ctypes.ArgumentError: argument 2: <type 'exceptions.TypeError'>: No array-type handler for type <type 'numpy.uint32'> (value: 1) registered

I seem to be using glDeleteFramebuffersEXT wrong.

I fixed my last error with a rather bad method,

def create_texture(surface):
	surface.texture =  glGenTextures(1)
	glMatrixMode(GL_MODELVIEW)
	glLoadIdentity() #Loads model matrix
	glBindTexture(GL_TEXTURE_2D, surface.texture) #Binds the current 2D texture to the texture to be drawn
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR) #Required to be set for maping the pixel data
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR) #Similar as above
	if surface.data == None:
		surf = pygame.Surface((1,1),SRCALPHA)
		surf.fill(surface.colour)
		data = pygame.image.tostring(surf, "RGBA") * (surface.surface_size[0] * surface.surface_size[1])
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, surface.surface_size[0], surface.surface_size[1], 0, GL_RGBA,GL_UNSIGNED_BYTE, surface.data) #Put surface pixel data into texture

print glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT) gives 36053.

How do I find out what the numbers correspond to?

I’ve managed to get rid of the error using a bizzare method:

def blit(self,surface,offset,area=None, special_flags = 0):
		#Create textures if not done already
		if self.texture == None:
			create_texture(self)
		if surface.texture == None:
			create_texture(surface)
		#Render child to parent
		frame_buffer =  glGenFramebuffersEXT(1)
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, frame_buffer)
		render_buffer = glGenRenderbuffersEXT(1)
		glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, render_buffer);
		glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT,self.surface_size[0],self.surface_size[1])
		glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, render_buffer)
		glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, self.texture, 0)
		print glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT)
		glPushAttrib(GL_VIEWPORT_BIT)
		glViewport(0,0,self.surface_size[0],self.surface_size[1])
		glMatrixMode(GL_MODELVIEW)
		glLoadIdentity() #Loads model matrix
		glBindTexture(GL_TEXTURE_2D, surface.texture)
		glBegin(GL_QUADS)
		glVertex2i(*offset) #Top Left
		glVertex2i(offset[0],offset[1] + surface.surface_size[1]) #Bottom Left
		glVertex2i(offset[0] + surface.surface_size[0],offset[1] + surface.surface_size[1]) #Bottom, Right
		glVertex2i(surface.surface_size[0],surface.surface_size[1]) #Top, Right
		glEnd()
		glPopAttrib()
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0)
		glDeleteFramebuffersEXT(1, [int(frame_buffer)])
		glDeleteRenderbuffersEXT(1,[int(render_buffer)])

I had to create a list of the c type uint converted into the python int. At least it appears so.

Now it says glFramebufferTexture2DEXT is an invalid method. I thought that only happened between glBegin and glEnd? Can it happen at any other time? It doesn’t come up with that error for many of the textures. I see nothing different that will break it. I will have a larger look myself but if anyone knows why the error could have been shown, it will help me a lot.

Thank you. It may seem as if I’ll never get it working but I’ve got a lot working in the past. I’m quite persistant. OpenGL is important or the game will be a lot worse than people expect it to be mainly due to speed.

Well, the “hard” way is to print the number in hex (printf: %x), which yields 0x8cd5, then grep (search) for it in glext.h:

> grep -i 0x8cd5 /usr/include/GL/glext.h
#define GL_FRAMEBUFFER_COMPLETE 0x8CD5
#define GL_FRAMEBUFFER_COMPLETE_EXT 0x8CD5

which is what you want.

However, this mechanical stuff is best left to your CPU.

The “easy” way is to just write a function to translate the number to a string for you:

void checkFBOStatus ()
{
  GLenum status = glCheckFramebufferStatus( GL_FRAMEBUFFER );

  const char *err_str = 0;
  char buf[80];

  if ( status != GL_FRAMEBUFFER_COMPLETE )
  {
    switch ( status )
    {
      case GL_FRAMEBUFFER_UNSUPPORTED:
        err_str = "UNSUPPORTED";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
        err_str = "INCOMPLETE ATTACHMENT";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER:
        err_str = "INCOMPLETE DRAW BUFFER";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER:
        err_str = "INCOMPLETE READ BUFFER";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
        err_str = "INCOMPLETE MISSING ATTACHMENT";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE:
        err_str = "INCOMPLETE MULTISAMPLE";
        break;
      case GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS:
        err_str = "INCOMPLETE LAYER TARGETS";
        break;
        
      // Removed in version #117 of the EXT_framebuffer_object spec
      //case GL_FRAMEBUFFER_INCOMPLETE_DUPLICATE_ATTACHMENT:

      default:
        sprintf( buf, "0x%x", status );
        err_str = buf ;
        break;
    }

    printf( "ERROR: glCheckFramebufferStatus() returned '%s'", err_str );
  }
}

Note that depending on how old your glext.h is, you might need to slap an _EXT on some of these defines to get this to compile, or even comment some out if they’re just not there.

See this or the extension specs for more details on what these return values mean:

Thank you. I’ll look for that file in the future.

I’ll be looking at the texture that is causing this bizarre invalid operation problem.

Okay, I’ve managed to determine the error is caused the first time this method is called on the second frame. This can also be interpreted as the second time a texture is drawn onto another texture. I suspect that once a texture has been drawn it doesn’t like it the second time on another frame? The textures that are drawn to the screen texture will need to be redrawn to the screen each frame as the screen is cleared after each frame.

Using a texture for the entire screen is stupid, I know. I will change that once I got this error sorted out.

Okay, I’ve managed to determine the error is caused the first time this method is called on the second frame.

Each of the above incomplete codes corresponds to a specific kind of problem. Which code are you getting?

Here’s the complete output of the error:

OpenGL.error.GLError: GLError(
err = 1282,
description = ‘invalid operation’,
baseOperation = glFramebufferTexture2DEXT,
cArguments = (
GL_FRAMEBUFFER_EXT,
GL_COLOR_ATTACHMENT0_EXT,
GL_TEXTURE_2D,
6L,
0,
)
)

I’m now using direct rendering to the screen. That solves it for rendering to the screen but it still remains a problem when a texture inside another texture needs to change.

Thank you.

I’m working on some code to copy lines of text drawn on the screen into a texture, then rendering that texture as a single quad (as opposed to rendering each character in the text as an individual quad).

Although I have managed to copy the texture with success, the text becomes noticeably thinner when copied with an alpha channel:

The code I use is as follows:


// For saving all text on screen as a texture
void font_capture()
{
   glBindTexture(GL_TEXTURE_2D, fullscreen_texture);
   glCopyTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 0, 0, 512, 512, 0);
}

// For drawing the saved texture
void font_draw(float x, float y, float width)
{
   glPushMatrix();
   glLoadIdentity();
   glTranslatef(x, y, 0);
   glDisable(GL_COLOR_MATERIAL);
   glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
   glEnable(GL_TEXTURE_2D);
   glDisable(GL_CULL_FACE);
   glDisable(GL_LIGHTING);
   glEnable(GL_BLEND);
   glDisable(GL_DEPTH_TEST);
   glDepthMask(false);

   glColor3f(1, 1, 1);
   glBindTexture(GL_TEXTURE_2D, fullscreen_texture);
   glBegin(GL_QUADS);
   glTexCoord2f(0, 0);
   glVertex3f(0, width, 0.0f);
   glTexCoord2f(0, 1);
   glVertex3f(0.0f, 0.0f, 0.0f);
   glTexCoord2f(1, 1);
   glVertex3f(width, 0, 0.0f);
   glTexCoord2f(1, 0);
   glVertex3f(width, width, 0.0f);
   glEnd();

   glPopMatrix();

   glDisable(GL_TEXTURE_2D);
   glEnable(GL_LIGHTING);
   glEnable(GL_CULL_FACE);
   glEnable(GL_DEPTH_TEST);
   glDepthMask(true);
}

When saving the texture, I have tried setting GL_RGBA2, GL_RGBA4, GL_RGBA8, GL_RGBA12, GL_RGBA16 as alternative parameters; 2 and 4 resulted in white streaks appearing in the quad, while 8 onwards had no visible difference for both quad and text.

Saving the texture using GL_RGB results in an exact copy of the text and quad, but the texture hides all objects rendered behind it.

As I wish to draw objects behind the text, is there any way to preserve the alpha layer, yet allow the text in the copied texture to look the same as when it is directly drawn to the screen?

Thank you.

Just realized that it may be inappropriate for me to post a new question in this thread (especially when Matthew’s problem hasn’t been solved yet)…

If anyone is offended by it, just say so and I will shift my question to a new thread.

regfurby : Offended, no, but it is true this is bad practice… For your question : this effect is normal as you end up as doing double blending, this can not look the same a single blending. Doing tricks such as taking in account premultiplied alpha can work, with a slight different blendfunc. Please continue on your own thread.

Heh, well back to my problem.

Testing the rendering of the textures to the screen shows the rectangle primitives are being drawn correctly when I set the colour with half alpha so I can see the rectangles, no matter what.

The problem is clearly that the textures aren’t being rendered correctly to other textures with the frame buffer method.

I’ll show some more of my relevant code:

Edit: It seems when I want to render an image to the screen without being rendered to another texture first, it still fails. :frowning:


def create_texture(surface):
	surface.texture =  glGenTextures(1)
	glMatrixMode(GL_MODELVIEW)
	glLoadIdentity() #Loads model matrix
	glBindTexture(GL_TEXTURE_2D, surface.texture) #Binds the current 2D texture to the texture to be drawn
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR) #Required to be set for maping the pixel data
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR) #Similar as above
	if surface.data == None:
		surf = pygame.Surface((1,1),SRCALPHA)
		surf.fill(surface.colour)
		data = pygame.image.tostring(surf, "RGBA") * (surface.surface_size[0] * surface.surface_size[1])
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, surface.surface_size[0], surface.surface_size[1], 0, GL_RGBA,GL_UNSIGNED_BYTE, surface.data) #Put surface pixel data into texture
def draw_texture(texture,offset,size,c):
	glMatrixMode(GL_MODELVIEW)
	glLoadIdentity() #Loads model matrix
	glColor4fv(c)
	glBindTexture(GL_TEXTURE_2D, texture)
	glBegin(GL_QUADS)
	glTexCoord2f(0.0, 0.0)
	glVertex2i(*offset) #Top Left
	glTexCoord2f(0.0, 1.0)
	glVertex2i(offset[0],offset[1] + size[1]) #Bottom Left
	glTexCoord2f(1.0, 1.0)
	glVertex2i(offset[0] + size[0],offset[1] + size[1]) #Bottom, Right
	glTexCoord2f(1.0, 0.0)
	glVertex2i(offset[0] + size[0],offset[1]) #Top, Right
	glEnd()
class Surface():
	def __init__(self,size,extra = None):
		self.__offset = (0,0)
		self.children = []
		self.blitted = False
		self.last_offset = [0,0]
		self.surface_size = [size[0],size[1]]
		self.colour = [0,0,0,255]
		self.data = None
		self.rounded = 0
		self.parent = None
		self.parent_offset = (0,0)
		self.texture = None
	def blit(self,surface,offset,area=None, special_flags = 0):
		#Create textures if not done already
		if self.texture == None:
			create_texture(self)
		if surface.texture == None:
			create_texture(surface)
		#Render child to parent
		frame_buffer =  glGenFramebuffersEXT(1)
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, frame_buffer)
		render_buffer = glGenRenderbuffersEXT(1)
		glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, render_buffer);
		glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT,self.surface_size[0],self.surface_size[1])
		glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, render_buffer)
		glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, self.texture, 0)
		glPushAttrib(GL_VIEWPORT_BIT)
		glViewport(0,0,self.surface_size[0],self.surface_size[1])
		draw_texture(surface.texture,offset,surface.surface_size,[float(c)/255.0 for c in surface.colour])
		glPopAttrib()
		glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0)
		glDeleteFramebuffersEXT(1, [int(frame_buffer)])
		glDeleteRenderbuffersEXT(1,[int(render_buffer)])
		try:
			offset_before_last = surface.last_offset
			surface.last_offset = [offset[0] + self.__offset[0],offset[1] + self.__offset[1]]
			self.children.append([0,surface]) #0 states it is a surface
			surface.parent = self
			surface.parent_offset = offset
			if surface.get_offset() != surface.last_offset or not surface.blitted:
				surface.__set_offset(surface.last_offset)
				self.__recursive_offset_add(surface,offset_before_last,surface.last_offset) #Add to the children's offsets
			surface.blitted = True
		except AttributeError:
			pass
	def __recursive_offset_add(self,surface,offset_before_last,last_offset):
		for child in surface.children:
			try:
				child.__set_offset((child.get_offset()[0] - offset_before_last[0] + last_offset[0],child.get_offset()[1] - offset_before_last[1]  + last_offset[1]))
				self.__recursive_offset_add(child,offset_before_last,last_offset)
			except AttributeError:
				pass
	def get_offset(self):
		return self.__offset
	def __set_offset(self,offset):
		self.__offset = offset
	def fill(self,colour):
		colour = list(colour)
		if len(colour) < 4:
			colour.append(255)
		self.children = []
		self.textures = []
		self.colour = colour
	def get_size(self):
		return self.surface_size
	def get_width(self):
		return self.surface_size[0]
	def get_height(self):
		return self.surface_size[1]
class ScaledScreen(Surface):
	game_size = None
	first_screen = None
	screen = None
	fs = False #Fullscreen false to start
	clock = None
	resize = True
	game_gap = None
	game_scaled = None
	title = None
	fps = -1
	enter_fullscreen = False
	exit_fullscreen = False
	scale_to_screen = False
	iconify = False
	on_focus_fullscreen = False
	f_key = False
	def __init__(self,title,game_size,on_exit):
		pygame.init()
		self.title = title
		self.game_size = game_size
		screen_info = pygame.display.Info() #Required to set a good resolution for the game screen
		self.first_screen = (screen_info.current_w, screen_info.current_h - 120) #Take 120 pixels from the height because the menu bar, window bar and dock takes space
		pygame.display.set_caption(self.title)
		self.clock = pygame.time.Clock()
		self.game_gap = (0,0)
		self.on_exit = on_exit
		self.mod_key = 1024 if sys.platform == "darwin" else 64
		pygame.display.set_mode(self.first_screen,RESIZABLE|DOUBLEBUF|OPENGL)
		#OpenGL Parts
		Surface.__init__(self,game_size)
		self.textures = []
		self.screen_change = True
	def update(self,events):
		#Updates screen properly
		win_size_done = False #Changes to True if the window size is got by the VIDEORESIZE event below
		for event in events:
			if event.type == QUIT:
				self.on_exit()
			if event.type == VIDEORESIZE:
				ss = [event.w,event.h]
				self.resize = True
				win_size_done = True
		keys = pygame.key.get_pressed() #Get the pressed keys
		if pygame.key.get_mods() & self.mod_key:
			if(keys[K_q] or keys[K_w]):
				self.on_exit()
			if keys[K_f]:
				if self.f_key == False:
					self.f_key = True
					if self.fs == False:
						self.enter_fullscreen = True
					else:
						self.exit_fullscreen = True
			else:
				self.f_key = False
		if self.on_focus_fullscreen and pygame.display.get_active():
			self.on_focus_fullscreen = False
			self.enter_fullscreen = True
		if self.enter_fullscreen:
			self.screen_change = True
			pygame.mouse.set_visible(False)
			self.screen = pygame.display.set_mode((self.first_screen[0],self.first_screen[1]+ 120))
			if self.scale_to_screen:
				self.game_scaled = (self.screen.get_width(),self.screen.get_height())
			else:
				self.game_scaled = get_resolution(self.screen,(self.screen.get_width(),self.screen.get_height()),self.game_size)
			self.game_gap = [(self.screen.get_width() - self.game_scaled[0])/2,(self.screen.get_height() - self.game_scaled[1])/2]
			pygame.display.set_mode((0,0), FULLSCREEN|HWSURFACE|DOUBLEBUF|OPENGL)
			self.fs = True
			self.enter_fullscreen = False
			self.resize = False
		elif self.exit_fullscreen:
			self.screen_change = True
			pygame.mouse.set_visible(True)
			pygame.display.set_mode(self.first_screen,RESIZABLE|DOUBLEBUF|OPENGL)
			self.fs = False
			self.resize = True
			self.game_gap = (0,0)
			self.exit_fullscreen = False
			if self.iconify:
				self.on_focus_fullscreen = True
		#Scale game to screen resolution, keeping aspect ratio
		if self.resize:
			self.screen_change = True
			if(win_size_done == False): #Sizes not gotten by resize event
				ss = [self.screen.get_width(),self.screen.get_height()]
			self.game_scaled = get_resolution(self.screen,ss,self.game_size)
			pygame.display.set_mode(self.game_scaled,RESIZABLE|DOUBLEBUF|OPENGL)
			self.resize = False #Next time do not scale unless resize or fullscreen events occur
		if self.iconify:
			pygame.display.iconify() #Minimise
			self.iconify = False
		#Open GL Screen setup
		if self.screen_change:
			glViewport(0,0,self.game_scaled[0],self.game_scaled[1]) #Creates the viewport which is mapped to the window
			glEnable(GL_LINE_SMOOTH,GL_FASTEST) #Create antialiased lines
			glEnable(GL_BLEND) #Enable alpha blending
			glEnable(GL_TEXTURE_2D) #Enable 2D Textures
			glDisable(GL_DEPTH_TEST) #Disable depth
			glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) #The recomended blending functions.
			glMatrixMode(GL_PROJECTION)
			glLoadIdentity() #Load the projection matrix
			gluOrtho2D(0,1280,720,0) #Set an orthorgraphic view
		self.screen_change = False
		pygame.display.flip() #Flip buffer
		self.textures = []
		self.children = []
		self.clock.tick(60)
		self.fps = self.clock.get_fps()
		pygame.display.set_caption(self.title + " - " + str(int(self.fps)) + "fps")
		glClear(GL_COLOR_BUFFER_BIT)
	def blit(self,surface,offset,area=None, special_flags = 0):
		if surface.texture == None:
			create_texture(surface)
		print surface.colour
		draw_texture(surface.texture,offset,surface.surface_size,[float(c)/255.0 for c in surface.colour])

I removed quite a bit of irrelevant code.

Oddly this in the update method shows the picture:

#Open GL Screen setup
		if self.screen_change:
			self.opengl_window_setup()
		self.screen_change = False
		image = open_image(os.path.dirname(sys.argv[0]) + "/images/menu_images/1.png")
		self.blit(image,(20,20))
		pygame.display.flip() #Flip buffer
		self.textures = []
		self.children = []
		self.clock.tick(60)
		self.fps = self.clock.get_fps()
		pygame.display.set_caption(self.title + " - " + str(int(self.fps)) + "fps")
		glClear(GL_COLOR_BUFFER_BIT)

It’s done after the opengl setup part but I’ve added the setup code to the init method as well, which will be called before everything else in the game.