Problem - DXT1 textures not displaying

[RESOLVED]

Forgot to generate a sampler… Guess I could have waited couple of hours before posting this…

[STRIKE]*Problem:
Model with DXT1 compressed texture appears black.

*Background info:
I made two programs, one converts Wavefront OBJ file, its MTL file and associated DDS texture to one file of my specification.
The second one is meant to be used as a viewer for those “myfiles”. This is the program with which I have the problem.

*Full code of the “viewer”(it was meant as a simple quickie to display one model so it’s all-in-one and not-so-pretty):
-Program

#include <GL/glew.h>
#include <SFML/Window.hpp>
#include <fstream>
#include <string>

int main(int argc,char** argv)
{
    if(argc!=2) return -1;
    //Create window, context, initialize glew.
    sf::Window screen(sf::VideoMode(500,500),"DigiViewer",sf::Style::Titlebar|sf::Style::Close);
    glewInit();

    //Helpful variables.
    bool running(true);
    sf::Event event;
    std::string linebuffer("");
    sf::Clock timer;
    sf::Time frameduration;

    //OpenGL stuff setup.
    glClearColor(0.15f,0.15f,0.15f,1.0f);
    glEnable(GL_DEPTH_TEST);

    //Load and compile vertex shader.
    std::string vshadercode("");
    std::ifstream vshaderfile("v.glsl");
    while(getline(vshaderfile,linebuffer)) vshadercode+=linebuffer+"
";
    vshaderfile.close();
    const char* vshadercode0=vshadercode.c_str();
    unsigned int vshader=glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vshader,1,&vshadercode0,NULL);
    glCompileShader(vshader);

    //Load and compile fragment shader.
    std::string fshadercode("");
    std::ifstream fshaderfile("f.glsl");
    while(getline(fshaderfile,linebuffer)) fshadercode+=linebuffer+"
";
    fshaderfile.close();
    const char* fshadercode0=fshadercode.c_str();
    unsigned int fshader=glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fshader,1,&fshadercode0,NULL);
    glCompileShader(fshader);

    //Link program.
    unsigned int program=glCreateProgram();
    glAttachShader(program,vshader);
    glAttachShader(program,fshader);
    glLinkProgram(program);
    glDeleteShader(fshader);
    glDeleteShader(vshader);

    //VAO creation, binding. Setting tex. unit 0 as active unit.
    unsigned int vertexarray(0);
    glGenVertexArrays(1,&vertexarray);
    glBindVertexArray(vertexarray);
    glActiveTexture(GL_TEXTURE0);

    /*Load myfile. Spec:
    6 bytes - magic number MYFILE(don't bother checking it)
    2 bytes unsigned short - number of vertices
    2 bytes unsigned short - number of indices
    2 bytes unsigned short - texture width=height in texels(texture is square)
    4 bytes unsigned int - texture linear size
    number of vertices * 5(xyzuv) *4(float) bytes - table of vertices
    number of indices * 2(short) bytes - table of indices
    size bytes - texture data*/

    unsigned short verticescount(0);
    unsigned short indicescount(0);
    unsigned short texturedimensions(0);
    unsigned int linearsize(0);

    std::ifstream model(argv[1],std::ios_base::in|std::ios_base::binary);
    model.seekg(6,std::ios::beg);
    model.read((char*)&verticescount,2);
    model.read((char*)&indicescount,2);
    model.read((char*)&texturedimensions,2);
    model.read((char*)&linearsize,4);
    float* vertices=new float[verticescount*5];
    model.read((char*)vertices,verticescount*20);
    unsigned short* indices=new unsigned short[indicescount];
    model.read((char*)indices,indicescount*2);
    unsigned char* texturedata=new unsigned char[linearsize];
    model.read((char*)texturedata,linearsize);
    model.close();

    /////////////////////REMOVE////////////////////
    std::ofstream test0("test.txt");
    test0.close();
    std::ofstream test("test.txt",std::ios_base::out|std::ios_base::app);
    test<<"Vertices count: "<<verticescount<<"
";
    test<<"Indices count: "<<indicescount<<" ("<<indicescount/3<<" faces)
";
    test<<"Texture dimensions: "<<texturedimensions<<"x"<<texturedimensions<<"
";
    test<<"
Vertices:
";
    for(unsigned short i(0);i<verticescount;i++) test<<i<<" x:"<<vertices[(i*5)]<<" y:"<<vertices[(i*5)+1]<<" z:"<<vertices[(i*5)+2]<<" u:"<<vertices[(i*5)+3]<<" v:"<<vertices[(i*5)+4]<<"
";
    test<<"
Indices:
";
    for(unsigned short i(0);i<indicescount;i++)
    {
        if(i%3==0) test<<"f: ";
        test<<indices[i]<<" ";
        if(i%3==2) test<<"
";
    }
    test.close();
    ////////////////////////////////////////////////

    //VBO creation, binding, buffering in data for vertices(x,y,z,u,v in floats, positions to 0, texcoords to 1)
    unsigned int vertexbuffer(0);
    glGenBuffers(1,&vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER,vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER,verticescount*20,vertices,GL_STATIC_DRAW);
    glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE,20,(const void*)0);
    glVertexAttribPointer(1,2,GL_FLOAT,GL_FALSE,20,(const void*)12);

    //VBO creation, binding, buffering data for indices.
    unsigned int indexbuffer(0);
    glGenBuffers(1,&indexbuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,indexbuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER,indicescount*2,indices,GL_STATIC_DRAW);

    //Texture creation, binding, buffering image data.
    //Only width=height and size are variables, other attributes of image(mipmap,compression) are const across any potential loaded images as I make them myself.
    unsigned int texture(0);
    glGenTextures(1,&texture);
    glBindTexture(GL_TEXTURE_2D,texture);
    glCompressedTexImage2D(GL_TEXTURE_2D,0,GL_COMPRESSED_RGB_S3TC_DXT1_EXT,texturedimensions,texturedimensions,0,linearsize,texturedata);

    //Creating uniform for the texture at unit 0.
    unsigned int mysampler=glGetUniformLocation(program,"mysampler");
    glUniform1i(mysampler,0);

    //Enable and use everything.
    glUseProgram(program);
    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(1);

    do
    {
        //Some stuff to do each frame.
        frameduration=timer.restart();
        while(screen.pollEvent(event)) if(event.type==sf::Event::Closed) running=false;
        glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);

        //Draw the current element buffer and show it.
        glDrawElements(GL_TRIANGLES,indicescount,GL_UNSIGNED_SHORT,0);
        screen.display();
    }
    while(running);

    //Get rid of OGL objects.
    glDisableVertexAttribArray(0);
    glDisableVertexAttribArray(1);
    glDeleteTextures(1,&texture);
    glDeleteBuffers(1,&indexbuffer);
    glDeleteBuffers(1,&vertexbuffer);
    glDeleteVertexArrays(1,&vertexarray);

    //Bye bye.
    screen.close();

    return 0;
}

-V. shader

#version 330 core

//x,y,z floats
layout(location=0) in vec3 vertexposition;
//u,v floats
layout(location=1) in vec2 vertexuv;
//Just pass this to the f.shader
out vec2 fragmentuv;

void main()
{
    gl_Position=vec4(vertexposition,1.0f);
    fragmentuv=vertexuv;
}

-F. shader

#version 330 core

//Our sampler from loaded image
uniform sampler2D mysampler;
in vec2 fragmentuv;
out vec3 color;

void main()
{
    color=texture2D(mysampler,fragmentuv).rgb;
}

[HR][/HR]
So this is what I do texturewise:
I’m setting texture unit 0 as an active one

glActiveTexture(GL_TEXTURE0);

I’m creating and binding a texture

unsigned int texture(0);
glGenTextures(1,&texture);
glBindTexture(GL_TEXTURE_2D,texture);

I’m sending all the texture data: texturedimensions is the width/height, linearsize is, well, linearsize and texture data is the raw image data, all from dds file.

glCompressedTexImage2D(GL_TEXTURE_2D,0,GL_COMPRESSED_RGB_S3TC_DXT1_EXT,texturedimensions,texturedimensions,0,linearsize,texturedata);

I’m getting uniform id and binding it to tex. unit 0 - the currently bound texture

unsigned int mysampler=glGetUniformLocation(program,"mysampler");
glUniform1i(mysampler,0);

In my fragment shader I get the sampler uniform

uniform sampler2D mysampler;

And use it to sample colors for the fragments

color=texture2D(mysampler,fragmentuv).rgb;

At this point I’m pretty sure thats everything I need. But the texture does not appear, the model is black. I checked and both shaders compile without problem, the program links OK too, there are no errors after glCompressedTexImage2D, my graphic card supports EXT_texture_compression_s3tc so I don’t know why it doesn’t work (let’s word it better - I don’t see it yet, but I spent 48 hours on looking at this code). I also thought that the first program, the converting one might be saving the raw DXT1 image data wrongly, but I really doubt it (I can provide its code as well if needed).

I would be really grateful if someone could enlighten me and point out the problem. Thank you in advance.[/STRIKE]