depth test problem in OpenGL 3.x

hi

i’m working with OpenGL 3.2 and i cant doing depth test

i have enable depth testing:

glEnable(GL_DEPTH_TEST);

and my routine in creating context:


#if DENGINE_PLATFORM == DENGINE_PLATFORM_WIN32 
m_hdc = (HDC)(win_handle);
HGLRC m_hrc;
m_hrc  = wglCreateContext(m_hdc);
wglMakeCurrent(m_hdc, m_hrc);
//return m_hrc;
PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
if(!wglCreateContextAttribsARB)
{
return m_hrc ;
}
else
{
GLint major,minor;
glGetIntegerv(GL_MAJOR_VERSION, &major); 
glGetIntegerv(GL_MINOR_VERSION, &minor); 
int attribs[] = {
WGL_CONTEXT_MAJOR_VERSION_ARB, major,
WGL_CONTEXT_MINOR_VERSION_ARB, minor,
WGL_CONTEXT_FLAGS_ARB, WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB, 
0
};

HGLRC gl3Ctx = wglCreateContextAttribsARB(m_hdc, 0, attribs);
if(gl3Ctx)
{
wglDeleteContext(m_hrc);
m_hrc =gl3Ctx;
wglMakeCurrent(m_hdc,m_hrc);
DLogger.Log("OpenGL 3.x context created",io::ELML_SYS_MESSAGE);
}
else
{
DLogger.Log("can not initialize OpenGL 3.x context",io::ELML_WARNING) ;
}
}

return m_hrc;

#elif DENGINE_PLATFORM == DENGINE_PLATFORM_LINUX 
#endif
return NULL;

Did you check with glGetIntegerv(GL_DEPTH_BITS, &depthBits)?

and it depends on the pixelformat you choose (on windows, SetPixelFormat, DescribePixelFormat, etc.)

yes, i seted depth bits

my code for the cerating window:


m_uHeight = height;
m_uWidth = width ;
int pf; // pixel format
WNDCLASS wc;

// descriptor of pixel format
PIXELFORMATDESCRIPTOR pfd;
// hinstace is a flag for one time registering window class
static HINSTANCE hInstance = 0;

if (!hInstance)
{
hInstance = GetModuleHandle(NULL);
wc.style         = CS_OWNDC;
wc.lpfnWndProc   = (WNDPROC)WindowProc;
wc.cbClsExtra    = 0;
wc.cbWndExtra    = 0;
wc.hInstance     = hInstance;
wc.hIcon         = LoadIcon(NULL, IDI_WINLOGO);
wc.hCursor       = LoadCursor(NULL, IDC_ARROW);
wc.hbrBackground = NULL;
wc.lpszMenuName  = NULL;
#ifdef _UNICODE
wc.lpszClassName = L"DEngine";
#else
wc.lpszClassName = "DEngine";
#endif

if (!RegisterClass(&wc)) 
{
DLogger.Log("Err: window can not registred");
App.exit();
}
}

// creating window
#ifdef _UNICODE 
m_hwnd = CreateWindowW(L"DEngine",title.getUnicode(),WS_OVERLAPPEDWINDOW |
WS_CLIPSIBLINGS | WS_CLIPCHILDREN,100,100,width,height,NULL,
				NULL,hInstance,NULL);
#else
m_hwnd = CreateWindowA("DEngineitle.getANSI(),,WS_OVERLAPPEDWINDOW |
WS_CLIPSIBLINGS | WS_CLIPCHILDREN,100,100,width,height,NULL,
NULL,hInstance,NULL); 
#endif

if ( m_hwnd == NULL)
{
App.exit();
}

// getting device context
m_hdc = GetDC(m_hwnd);

/* there is no guarantee that the contents of the stack that become
the pfd are zeroed, therefore _make sure_ to clear these bits.
*/
memset(&pfd, 0, sizeof(pfd));
pfd.nSize        = sizeof(pfd);
pfd.nVersion     = 1;
pfd.dwFlags      = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType   = PFD_TYPE_RGBA;
pfd.cColorBits   = 32;
pfd.cDepthBits   = 24;

pf = ChoosePixelFormat(m_hdc, &pfd);
if (pf == 0) 
{
DLogger.Log("Err: can not choose pixel format");
App.exit();
} 
		 
if (SetPixelFormat(m_hdc, pf, &pfd) == FALSE)
{
DLogger.Log("Err: can not set pixel format");
				App.exit();
} 

DescribePixelFormat(m_hdc , pf, sizeof(PIXELFORMATDESCRIPTOR), &pfd);

ReleaseDC(m_hwnd, m_hdc );

DRenderer.createContext((void*)(m_hdc));

ShowWindow(m_hwnd,SW_SHOWNORMAL);

glGetIntegerv(GL_DEPTH_BITS, &depthBits) dosent return a true number (like as -89342384)

when i created OpenGL 3.x context, depth buffer dosent created with it. buf before it glGetIntegerv(GL_DEPTH_BITS, &depthBits) return 32.

If you think the bug is in your code, upload your code somewhere. I’m on Windows and have VC++ so make sure there is a project with it such as dsw or sln.
If you have some other type of project, then perhaps someone else can test it.

//Edit : actually I just tested and if I call glGetIntegerv(GL_DEPTH_BITS, &DepthBits); I get GL_INVALID_ENUM with a forward context.
If I use
sint attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 0,
WGL_CONTEXT_FLAGS_ARB, 0,
0
};

then glGetIntegerv(GL_DEPTH_BITS, &DepthBits) always returns 24.
This is possibly a driver bug.

thanks, i think its work correctly ( i need more test) but glGetIntegerv(GL_DEPTH_BITS, &DepthBits) return GL_INVALID_ENUM

problem resolved with OpenGL3.0 context but not eith OpenGL3.2?!!!