lcdavis13
05-06-2014, 11:23 AM
Hi folks,
I'm trying to write a matrix class for use in a matrix stack that I will apply to vertices before sending them to a VBO. But I'm a little confused by row-major versus column-major order. This is my matrix class so far, but it doesn't seem to be applying any translations to the vertices I send it.
Please help me find my bug (it's probably a silly mistake on my part), and any friendly advice about implementing a matrix stack for GL is much appreciated. Thanks new friends!
class Matrix
{
public:
//---Construction
Matrix()
{
identify();
}
Matrix(float m[4][4])
{
for (int y = 0; y < 4; y++)
for (int x = 0; x < 4; x++)
matrix[y][x] = m[y][x];
}
//---Arithmetic
const Matrix operator * (const Matrix& m) const
{
float result[4][4];
for (int y = 0; y < 4; y++)
for (int x = 0; x < 4; x++)
{
result[y][x] = 0.f;
for (int z = 0; z < 4; z++)
result[y][x] += matrix[y][z]*m.matrix[z][x];
}
return Matrix(result);
}
//---Transformations
Matrix& identify()
{
memset(matrix, 0.f, 4*4*sizeof(float));
for (int i = 0; i < 4; i++)
matrix[i][i] = 1.f;
return *this;
}
Matrix& translate(float tx, float ty, float tz)
{
Matrix other;
other.matrix[0][4] = tx;
other.matrix[1][4] = ty;
other.matrix[2][4] = tz;
*this = (*this)*other;
return *this;
}
//---Apply to vector
Matrix& transform(float vector[4])
{
float result[4];
//Calculate result
for (int y = 0; y < 4; y++)
{
result[y] = 0.f;
for (int z = 0; z < 4; z++)
result[y] += matrix[y][z]*vector[z];
}
//Copy result into vector
for (int y = 0; y < 4; y++)
vector[y] = result[y];
return *this;
}
private:
float matrix[4][4];
};
The test I'm running is this:
float vector[4] = {0.f, 0.f, 0.f, 1.f};
matrix.identify().translate(0.f, 1.f, 0.f).transform(vector);
printf("%f, %f, %f, %f\n", vector[0], vector[1], vector[2], vector[3]);
I'm trying to write a matrix class for use in a matrix stack that I will apply to vertices before sending them to a VBO. But I'm a little confused by row-major versus column-major order. This is my matrix class so far, but it doesn't seem to be applying any translations to the vertices I send it.
Please help me find my bug (it's probably a silly mistake on my part), and any friendly advice about implementing a matrix stack for GL is much appreciated. Thanks new friends!
class Matrix
{
public:
//---Construction
Matrix()
{
identify();
}
Matrix(float m[4][4])
{
for (int y = 0; y < 4; y++)
for (int x = 0; x < 4; x++)
matrix[y][x] = m[y][x];
}
//---Arithmetic
const Matrix operator * (const Matrix& m) const
{
float result[4][4];
for (int y = 0; y < 4; y++)
for (int x = 0; x < 4; x++)
{
result[y][x] = 0.f;
for (int z = 0; z < 4; z++)
result[y][x] += matrix[y][z]*m.matrix[z][x];
}
return Matrix(result);
}
//---Transformations
Matrix& identify()
{
memset(matrix, 0.f, 4*4*sizeof(float));
for (int i = 0; i < 4; i++)
matrix[i][i] = 1.f;
return *this;
}
Matrix& translate(float tx, float ty, float tz)
{
Matrix other;
other.matrix[0][4] = tx;
other.matrix[1][4] = ty;
other.matrix[2][4] = tz;
*this = (*this)*other;
return *this;
}
//---Apply to vector
Matrix& transform(float vector[4])
{
float result[4];
//Calculate result
for (int y = 0; y < 4; y++)
{
result[y] = 0.f;
for (int z = 0; z < 4; z++)
result[y] += matrix[y][z]*vector[z];
}
//Copy result into vector
for (int y = 0; y < 4; y++)
vector[y] = result[y];
return *this;
}
private:
float matrix[4][4];
};
The test I'm running is this:
float vector[4] = {0.f, 0.f, 0.f, 1.f};
matrix.identify().translate(0.f, 1.f, 0.f).transform(vector);
printf("%f, %f, %f, %f\n", vector[0], vector[1], vector[2], vector[3]);