I know this is more than 3+ months old.. but I have some code that can benefit anyone who comes by. Converting BMP (RGB/BGR) into (RGBA/BGRA) using SDL_SetColorKey and the standard OpenGL stuff. (TIP: if you use photoshop or anything else.. make sure to turn off the tools' anti-aliasing!!! fill/magic wand/et&)
first make sure to have the blend on and set in your initialization for GL
Code: Select all
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
next i have my image header (change header #includes to fit your OS/compiler)
Code: Select all
#ifndef IMAGE_H
#define IMAGE_H
#include <GLUT/glut.h>
#include <SDL/SDL.h>
#include <iostream>
using std::string;
class GLImage
{
private:
public:
GLImage(void);
~GLImage(void);
GLuint texture;
GLuint width,height;
string file;
};
bool GL_LoadImage(const char* file, GLImage& img, bool alpha=false);
#endif
and the cpp
Code: Select all
#include "Image.h"
GLImage::GLImage(void)
:texture(0),
width(0),
height(0)
{}
GLImage::~GLImage(void)
{
glDeleteTextures( 1, &texture );
}
bool GL_LoadImage(const char* file, GLImage& img, bool alpha)
{
SDL_Surface *tmp = NULL, *newImage = NULL;
GLenum texture_format=0;
tmp = SDL_LoadBMP(file);
img.width = tmp->w;
img.height = tmp->h;
img.file = file;
if (!tmp) {
fprintf(stderr, "Error: '%s' could not be opened: %s\n", file, SDL_GetError());
return false;
}
//get number of channels in the SDL surface, needed for OpenGL
int nofcolors=tmp->format->BytesPerPixel;
//contains an alpha channel
/*if(nofcolors==4)
{
if(tmp->format->Rmask==0x000000ff)
texture_format=GL_RGBA;
else
texture_format=GL_BGRA;
}
else*/ if(nofcolors==3) //no alpha channel
{
if(tmp->format->Rmask==0x000000ff)
{
texture_format=GL_RGB;
}
else
{
texture_format=GL_BGR;
}
}
else
{
fprintf(stderr,"Error: '%s' could not be opened %d\n", file,nofcolors);
SDL_FreeSurface(tmp);
return false;
}
if (alpha)
{
// set the color key, works on BGR or RGB
if(SDL_SetColorKey(tmp, SDL_SRCCOLORKEY | SDL_RLEACCEL, SDL_MapRGB(tmp->format, 255, 0, 255)) == -1)
fprintf(stderr, "Warning: colorkey will not be used, reason: %s\n", SDL_GetError());
// create new image with alpha
newImage = SDL_DisplayFormatAlpha(tmp);
// make sure it was loaded correctly
if(!newImage)
{
fprintf(stderr, "Warning: Couldn't add alpha layer to %s, reason: %s\n", file, SDL_GetError());
}
}
// Have OpenGL generate a texture object handle for us
glGenTextures( 1, &img.texture );
// Bind the texture
glBindTexture( GL_TEXTURE_2D, img.texture );
// Set the texture's stretching properties
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
// if not alpha or previous attempt at alpha failed
if(!alpha || !newImage)
{
// load 24 bit image data
glTexImage2D( GL_TEXTURE_2D, 0, 3, tmp->w, tmp->h, 0,
texture_format, GL_UNSIGNED_BYTE, tmp->pixels );
}
else
{
// load 32 bit image data
glTexImage2D( GL_TEXTURE_2D, 0, 4, newImage->w, newImage->h, 0,
++texture_format, GL_UNSIGNED_BYTE, newImage->pixels );
}
SDL_FreeSurface(newImage);
SDL_FreeSurface(tmp);
return true;
}
WIth this you can put in your main...
Code: Select all
GLImage img;
GL_LoadImage("Image.bmp",img,true);
I have tested this code, though if you have any improvements go ahead and tell me. I'm using this code to load 2D sprite maps and other images in my game.
OpenGL: 1.2
SDL: 1.2.14
You can take away my Windows, I'll be my own FreeBSD.