I’m using strips and i’m implementing VAO into my rasteriser.
Unfortunately, my vertice look like this: ;))
typedef struct SVR_VERTEX
{
VR_SHORT x; // $0
VR_SHORT y; // $2
VR_SHORT z; // $4
VR_SHORT rienz; // $6
VR_CHAR nx; //non transformed normal (-128,127) $8
VR_CHAR ny; // $9
VR_CHAR nz; // $0a
VR_CHAR rien; // $0b
VR_BYTE r; //Fantastic colors.. $0c
VR_BYTE g; // $0d
VR_BYTE b; // $0e
VR_BYTE a; // $0f
union {
VR_UV texCoord[4]; // $10
struct{
VR_FLOAT u0,v0; // $10,$14
VR_FLOAT u1,v1; // $18,$1c
VR_FLOAT u2,v2; // $20,$24
VR_FLOAT u3,v3; // $28,$3c
};
};
}VR_VERTEX;
as u see, i’m only using floating point values for my texCoords…
and note that primitives sizes can varry depending on the texture channels used… (minPrimSize $10, maxPrimSize $30)
Below the code needed to store prims onboard… (simply using GL_STATIC_ATI)
size = sizePrim * pInfos->nb;
pInfos->object = glNewObjectBufferATI(size, pList, GL_STATIC_ATI);
and the rendering stuff…
static VR_VOID displayVertexListGlATI(VR_LONG handle,VR_PRIM_TYPES type,VR_DWORD start,VR_DWORD nb)
{
VR_VERTEX *pList;
VR_LIST_INFOS *pInfos;
VR_DWORD sizePrim,offset,maxChannels,channel;
VR_DWORD object;
pInfos = &pListInfos[handle];
pList = (VR_VERTEX*) pInfos->pList;
if (pList == NULL) return;
sizePrim = vrRasterPrimSize[pInfos->bits.uvChannels];
object = (VR_DWORD) pInfos->plistCache;
glEnableClientState(GL_VERTEX_ARRAY);
glArrayObjectATI(GL_VERTEX_ARRAY, 3, GL_SHORT, sizePrim , object, 0);
if (pInfos->bits.useColors){
glEnableClientState(GL_COLOR_ARRAY_EXT);
glArrayObjectATI(GL_COLOR_ARRAY_EXT, 4, GL_UNSIGNED_BYTE, sizePrim, object, 0x0c);
}
if (pInfos->bits.useNormals){
glEnableClientState(GL_NORMAL_ARRAY_EXT);
glArrayObjectATI(GL_NORMAL_ARRAY_EXT, 3, GL_BYTE, sizePrim, object, 0x8);
}
if (pInfos->bits.useTexCoords){
if (pInfos->bits.uvChannels > rasterInfos.caps.nbTextureUnits)
maxChannels = rasterInfos.caps.nbTextureUnits;
else
maxChannels = pInfos->bits.uvChannels;
offset = 0x10;
for (channel = 0;channel < maxChannels;channel++){
glClientActiveTextureARB(GL_TEXTURE0_ARB+channel);
glMatrixMode(GL_TEXTURE);
glLoadMatrixf((float*)¤tMatrix[channel]);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glArrayObjectATI(GL_TEXTURE_COORD_ARRAY, 2, GL_FLOAT, sizePrim, object, offset);
offset += 8;
}
}
glDrawArrays(primtypes[type],start,nb);
glDisableClientState(GL_VERTEX_ARRAY);
if (pInfos->bits.useColors)
glDisableClientState(GL_COLOR_ARRAY_EXT);
if (pInfos->bits.useNormals)
glDisableClientState(GL_NORMAL_ARRAY_EXT);
if (pInfos->bits.useTexCoords){
for (channel = 0;channel < maxChannels;channel++){
glClientActiveTextureARB(GL_TEXTURE0_ARB+channel);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
}
}
Ok, nothing terrific! i’m using glDrawArrays bcoz my prims are triangleStrips and i think that i’m using
correctly all the glArrayObjectATI stuff…
So what? well, nothing is displayed! what i’m suspecting is that VAO mechanism has got probs with my vertex data structure,
i mean when the structure is not using FLOAT values (the most common way around in sample code etc…) it doesn’t work…
Anyway, i have also modified the simpleVAO sample from ATI to use SHORT values for coords and it seems that the
same problem occurs. :)=
Anybody has noticed this previously? i’m using latest beta drivers from ATIDevrel…
thx!
ad!:=) cool ‘gallery’ to look at for coffee &| cigarette time -> http://www.orkysquad.org/main.php