0
votes

I am currently trying to render textured objects in Opengl. Everything worked fine until I wanted to render a texture with transparency. Instead of showing the the object transparent it just rendered in total black.

The method fo loading the texture file is this:

//  structures for reading and information variables
char magic[4];
unsigned char header[124];
unsigned int width, height, linearSize, mipMapCount, fourCC;
unsigned char* dataBuffer;
unsigned int bufferSize;

fstream file(path, ios::in|ios::binary);

//  read magic and header
if (!file.read((char*)magic, sizeof(magic))){
    cerr<< "File " << path << " not found!"<<endl;
    return false;
}

if (magic[0]!='D' || magic[1]!='D' || magic[2]!='S' || magic[3]!=' '){
    cerr<< "File does not comply with dds file format!"<<endl;
    return false;
}

if (!file.read((char*)header, sizeof(header))){
    cerr<< "Not able to read file information!"<<endl;
    return false;
}

//  derive information from header
height = *(int*)&(header[8]);
width = *(int*)&(header[12]);
linearSize = *(int*)&(header[16]);
mipMapCount = *(int*)&(header[24]);
fourCC = *(int*)&(header[80]);

//  determine dataBuffer size
bufferSize = mipMapCount > 1 ? linearSize * 2 : linearSize;
dataBuffer = new unsigned char [bufferSize*2];

//  read data and close file
if (file.read((char*)dataBuffer, bufferSize/1.5))
    cout<<"Loading texture "<<path<<" successful"<<endl;
else{
    cerr<<"Data of file "<<path<<" corrupted"<<endl;
    return false;
}

file.close();

//  check pixel format
unsigned int format;

switch(fourCC){
case FOURCC_DXT1:
    format = GL_COMPRESSED_RGBA_S3TC_DXT1_EXT;
    break;
case FOURCC_DXT3:
    format = GL_COMPRESSED_RGBA_S3TC_DXT3_EXT;
    break;
case FOURCC_DXT5:
    format = GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
    break;
default:
    cerr << "Compression type not supported or corrupted!" << endl;
    return false;
}

glGenTextures(1, &ID);


glBindTexture(GL_TEXTURE_2D, ID);
glPixelStorei(GL_UNPACK_ALIGNMENT,1);

unsigned int blockSize = (format == GL_COMPRESSED_RGBA_S3TC_DXT1_EXT) ? 8 : 16;
unsigned int offset = 0;

/* load the mipmaps */
for (unsigned int level = 0; level < mipMapCount && (width || height); ++level) {
    unsigned int size = ((width+3)/4)*((height+3)/4)*blockSize;
    glCompressedTexImage2D(GL_TEXTURE_2D, level, format, width, height,
                        0, size, dataBuffer + offset);

    offset += size;
    width  /= 2;
    height /= 2;
}

textureType = DDS_TEXTURE;

return true;

In the fragment shader I just set the gl_FragColor = texture2D( myTextureSampler, UVcoords )

I hope that there is an easy explanation such as some code missing. In the openGL initialization i glEnabled GL_Blend and set a blend function.

Does anyone have an idea of what I did wrong?

1

1 Answers

2
votes
  1. Make sure the blend function is the correct function for what you are trying to accomplish. For what you've described that should be glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);

  2. You probably shouldn't set the blend function in your openGL initialization function but should wrap it around your draw calls like:

    glEnable(GL_BLEND)
    glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
    
    //gl draw functions (glDrawArrays,glDrawElements,etc..)
    
    glDisable(GL_BLEND)
    
  3. Are you clearing the 2D texture binding before you swap buffers? i.e ...

    glBindTexture(GL_TEXTURE_2D, 0);