openGL Texturing Only Fragments of Texture When using VBOs

I am currently trying to use hardware acceleration to draw a textured quad. When I draw a quad using glBegin(GL_QUADS) ... glEnd() my quad draws fine. So I believe that I have loaded the texture properly, it just isn't texturing predictably when using VBOs. The texture I am testing with is called "ball.png," it is a blue circle that should be the first search result when googling "ball.png."

When the following program is compiled and run only small fragments of the ball is drawn. Often it will draw blue colors only on the corners right by the vertices which isn't even where blue in the texture is.

To me it seams that the blend function or some other sort of GL setting I have applied is making "ball.png," draw unpredictably. Else, it could be something to do with how openGL is calculating normals. In either case I am not very familiar with messing with those settings. If anyone could advise me which part to play with to make this code work, it would be greatly appreciated.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
#include <GL/glew.h>
#include <GL/glut.h>

#include "SDL/SDL.h"
#include "SDL/SDL_image.h"

//Screen attributes
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_BPP = 32;
const char WINDOW_TITLE[] = "VBOs Example";

//The frame rate
const int FRAMES_PER_SECOND = 60;

//Window
SDL_Surface *window;
SDL_Event event;

//IDs to reference array data stored by GL.
GLuint textureDataName, textureUVname, positionName;

static const GLsizei VertexCount = 4;

static const GLsizeiptr PositionSize = VertexCount * 3 * sizeof(GLfloat);
static const GLfloat PositionData[] =
{
  -1.0f,-1.0f, 0.0f,
  1.0f,-1.0f, 0.0f,
  1.0f, 1.0f, 0.0f,
  -1.0f, 1.0f, 0.0f
};

static const GLsizeiptr TexSize = VertexCount * 2 * sizeof(GLubyte);
static const GLint TexData[] =
{
  0,0,
  1,0,
  1,1,
  0,1
};

bool init_GL()
{
  glEnable( GL_TEXTURE_2D );

  //Tell GL to apply alpha channels when texturing.
  glEnable( GL_BLEND );
  glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA );

  //Arrays are needed for hardware acceleration.
  glEnableClientState(GL_VERTEX_ARRAY);
  glEnableClientState(GL_TEXTURE_COORD_ARRAY);

  glClearColor(0.0f, 0.0f, 0.0f, 1.0f);

  //I don't think this effects the current scene but is useful later.
  glShadeModel(GL_SMOOTH);

  glMatrixMode(GL_PROJECTION);
  glLoadIdentity();
  //set camera position.
  gluPerspective(45.0f, ((float)SCREEN_WIDTH)/((float)SCREEN_HEIGHT), 0.1f, 100.0f);

  glMatrixMode(GL_MODELVIEW);
  glLoadIdentity();
  //move camera back a bit so scene origin is in view.
  glTranslatef(0.0f, 0.0f,-4.0f);

  //Error check gl settings
  if( glGetError() != GL_NO_ERROR )
    return false;

  return true;
}

bool init()
{
  //Initialize SDL
  if( SDL_Init( SDL_INIT_EVERYTHING ) != 0 )
    return false;

  //Pop open window
  SDL_WM_SetCaption( WINDOW_TITLE, NULL );
  window = SDL_SetVideoMode( SCREEN_WIDTH, SCREEN_HEIGHT,
			     SCREEN_BPP, SDL_OPENGL | SDL_DOUBLEBUF);

  //glew will be needed for hardware acceleration.
  if( glewInit()  != GLEW_OK)
    return false;
  if( init_GL() == false ) //apply GL settings
    return false;

  //when all initialization is done, Error check.
  if( window == NULL )
    return false;

  return true;
}

void initScene()
{
  //generate texture.
  glGenTextures( 1, &textureDataName );
  glBindTexture(GL_TEXTURE_2D, textureDataName);

  //texture storage preferences.
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

  SDL_Surface *s = IMG_Load("ball.png");
  glTexImage2D(GL_TEXTURE_2D, 0, 4, s->w, s->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, s->pixels);
  SDL_FreeSurface(s);//surface not needed. glTexImage2D stored it in RAM.

  //generate and store quad into RAM.
  glGenBuffers(1, &positionName);
  glGenBuffers(1, &textureUVname);

  glBindBuffer(GL_ARRAY_BUFFER, positionName);
  glBufferData(GL_ARRAY_BUFFER, PositionSize, PositionData, GL_STREAM_DRAW);
  glVertexPointer(3, GL_FLOAT, 0, 0);

  glBindBuffer(GL_ARRAY_BUFFER, textureUVname);
  glBufferData(GL_ARRAY_BUFFER, TexSize, TexData, GL_STREAM_DRAW);
  glTexCoordPointer(2, GL_INT, 0, 0);
}

int main(int argc, char* argv[])
{
  if ( init() == false )
    return 1;

  //main loop setup
  bool quit = false;
  int frameTicks;
  initScene();

  //main loop
  while( quit == false )
    {
      //Start the frame timer
      frameTicks = SDL_GetTicks();

      //User Input:
      while( SDL_PollEvent( &event ) )
	{
	  if ( event.type == SDL_QUIT )
	    quit = true;
	  if ( event.type == SDL_KEYUP && event.key.keysym.sym == SDLK_ESCAPE )
	    quit = false;
	}

      //TODO:physics.

      //draw
      glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
      glDrawArrays(GL_QUADS, 0, VertexCount);
      SDL_GL_SwapBuffers();

      //Cap the frame rate
      frameTicks = SDL_GetTicks() - frameTicks;
      if( frameTicks < 1000 / FRAMES_PER_SECOND )
        {
	  SDL_Delay( ( 1000 / FRAMES_PER_SECOND ) - frameTicks );
        }
    }

  //de-allocate memory
  glDeleteBuffers(1, &positionName);
  glDeleteBuffers(1, &textureUVname);
  glDeleteTextures(1, &textureDataName);
  SDL_FreeSurface( window );
  SDL_Quit();

  return 0;
}
Last edited on
As I said in the last post if I replace the line "glDrawArrays(GL_QUADS, 0, VertexCount);" with:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
  glBegin( GL_QUADS );
  //top left
  glTexCoord2d( 0, 0 );
  glVertex3f(  -1.0f,-1.0f, 0.0f );
  //top right
  glTexCoord2d( 1, 0 );
  glVertex3f( 1.0f,-1.0f, 0.0f );
  //bottom right
  glTexCoord2d( 1, 1 );
  glVertex3f( 1.0f, 1.0f, 0.0f );
  //bottom left
  glTexCoord2d( 0, 1 );
  glVertex3f( -1.0f, 1.0f, 0.0f );
  glEnd();


The quad draws as I would like it to. This doesn't satisfy me as I am trying to use hardware acceleration. Note, the vertices used here are the same value and order as the vertices in the vertex array.
Last edited on
I am still having troubles with this. I haven't a clue what part of this clue is going wrong. Here I've added a colour array to further demonstrate that there is a quad in view even with a glDrawArrays function. If you comment out the glEnable(GL_TEXTURE_2D); you will see a rather psychedelic quad. BUT IT SILL WONT TEXTURE!! I am wondering if the reason I haven't gotten any replies could be because this forum isn't very graphic based. I've helped some people on the beginner section and Ive noticed people really jump on mathimaticall questions then disregard graphical questions.

If you know of a forum with more SDL and openGL adept users I would really like to know where to look to get some answers to this problem.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
#include <GL/glew.h> // Include the GLEW header file
#include <GL/glut.h>

#include "SDL/SDL.h"
#include "SDL/SDL_image.h"

//Screen attributes
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_BPP = 32;
const char WINDOW_TITLE[] = "VBO example";

//The frame rate
const int FRAMES_PER_SECOND = 60;

SDL_Surface *window;
SDL_Event event;

//IDs to reference array data stored by GL.
GLuint textureDataName, textureUVname, positionName, colorName;

static const GLsizei VertexCount = 6;

static const GLsizeiptr PositionSize = VertexCount * 3 * sizeof(GLfloat);
static const GLfloat PositionData[] =
{
  -1.0f,-1.0f, 0.0f,
   1.0f,-1.0f, 0.0f,
   1.0f, 1.0f, 0.0f,
   1.0f, 1.0f, 0.0f,
  -1.0f, 1.0f, 0.0f,
  -1.0f,-1.0f, 0.0f
};

static const GLsizeiptr TexSize = VertexCount * 2 * sizeof(GLubyte);
static const GLint TexData[] =
{
  0,0,
  1,0,
  1,1,
  1,1,
  0,1,
  0,0
};

static const GLsizeiptr ColorSize = VertexCount * 3 * sizeof(GLubyte);
static const GLubyte ColorData[] =
{
	255,   0,   0,
	255, 255,   0,
	  0, 255,   0,
	  0, 255,   0,
	  0,   0, 255,
	255,   0,   0
};

int size = 256*256*4;
unsigned char *image = (unsigned char *) malloc(sizeof(unsigned char)*size);

bool init_GL()
{
  glEnable( GL_TEXTURE_2D );
  //glEnable( GL_DEPTH_TEST );
  //glEnable( GL_NORMALIZE );

  //Tell GL to apply alpha channels when texturing.
  glEnable( GL_BLEND );
  glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA );

  //Arrays are needed for hardware acceleration.
  glEnableClientState(GL_VERTEX_ARRAY);
  glEnableClientState(GL_TEXTURE_COORD_ARRAY);
  glEnableClientState(GL_COLOR_ARRAY);

  glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  glColor4f(1.0f, 1.0f, 1.0f, 1.0f);

  //I don't think this effects the current scene but is useful later.
  glShadeModel(GL_SMOOTH);

  glMatrixMode(GL_PROJECTION);
  glLoadIdentity();
  float screen_ratio;
  screen_ratio = ((float)SCREEN_WIDTH)/((float)SCREEN_HEIGHT);
  //set camera position.
  gluPerspective(45.0f, screen_ratio, 0.1f, 100.0f);

  glMatrixMode(GL_MODELVIEW);
  glLoadIdentity();
  //move camera back a bit so scene origin is in view.
  glTranslatef(0.0f, 0.0f,-4.0f);

  //Error check gl settings
  if( glGetError() != GL_NO_ERROR )
    return false;

  return true;
}

bool init()
{
  //Initialize SDL
  if( SDL_Init( SDL_INIT_EVERYTHING ) != 0 )
    return false;

  //Pop open window
  SDL_WM_SetCaption( WINDOW_TITLE, NULL );
  window = SDL_SetVideoMode( SCREEN_WIDTH, SCREEN_HEIGHT,
			     SCREEN_BPP, SDL_OPENGL | SDL_DOUBLEBUF);

  //glew will be needed for hardware acceleration.
  if( glewInit()  != GLEW_OK)
    return false;
  if( init_GL() == false ) //apply GL settings
    return false;

  //when all initialization is done, Error check.
  if( window == NULL )
    return false;

  return true;
}

void packTexture(GLsizei W, GLsizei H, GLuint Name, GLvoid *data)
{
	  //generate texture.
  glBindTexture(GL_TEXTURE_2D, textureDataName);

  //texture storage preferences.
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);

  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, W, H,
			   0, GL_RGBA, GL_UNSIGNED_BYTE, data);
}

void initScene()
{
  glGenTextures( 1, &textureDataName );
  //for (int i=0; i<size; i=i+1)
	//image[i] = (i/256);  //create the texture
  //packTexture(256, 256, textureDataName, image);
  SDL_Surface *s = IMG_Load("ball.png");
  packTexture(s->w, s->h, textureDataName, s->pixels);
  SDL_FreeSurface(s);//surface not needed. glTexImage2D stored it in RAM.

  //generate and store quad into RAM.
  glGenBuffers(1, &positionName);
  glGenBuffers(1, &textureUVname);
  glGenBuffers(1, &colorName);

  glBindBuffer(GL_ARRAY_BUFFER, colorName);
  glBufferData(GL_ARRAY_BUFFER, ColorSize, ColorData, GL_STREAM_DRAW);
  glColorPointer(3, GL_UNSIGNED_BYTE, 0, 0);

  glBindBuffer(GL_ARRAY_BUFFER, positionName);
  glBufferData(GL_ARRAY_BUFFER, PositionSize, PositionData, GL_STREAM_DRAW);
  glVertexPointer(3, GL_FLOAT, 0, 0);

  glBindBuffer(GL_ARRAY_BUFFER, textureUVname);
  glBufferData(GL_ARRAY_BUFFER, TexSize, TexData, GL_STREAM_DRAW);
  glTexCoordPointer(2, GL_INT, 0, 0);
}

int main(int argc, char* argv[])
{
  if ( init() == false )
    return 1;

  //main loop setup
  bool quit = false;
  int frameTicks;
  initScene();
	  //double x1 = -1, x2 = 1;
  //double y1 = -1, y2 = 1;

  //main loop
  while( quit == false )
    {
      //Start the frame timer
      frameTicks = SDL_GetTicks();

      //User Input:
      while( SDL_PollEvent( &event ) )
	    {
	      if ( event.type == SDL_QUIT )
	        quit = true;
	      if ( event.type == SDL_KEYUP &&
		    event.key.keysym.sym == SDLK_ESCAPE )
		  quit = false;
	    }

    //TODO:physics.
	//glRotatef(1,0,0,1);

      //draw
      glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
      glDrawArrays(GL_TRIANGLES, 0, 6);
      SDL_GL_SwapBuffers();

      //Cap the frame rate
      frameTicks = SDL_GetTicks() - frameTicks;
      if( frameTicks < 1000 / FRAMES_PER_SECOND )
        {
	  SDL_Delay( ( 1000 / FRAMES_PER_SECOND ) - frameTicks );
        }
    }

  //de-allocate memory
  glDeleteBuffers(1, &positionName);
  glDeleteBuffers(1, &textureUVname);
  glDeleteTextures(1, &textureDataName);
  glDeleteTextures(1, &colorName);
  SDL_FreeSurface( window );
  SDL_Quit();

  return 0;
}
This: static const GLsizeiptr TexSize = VertexCount * 2 * sizeof(GLubyte);

is wrong. It should be:

static const GLsizeiptr TexSize = VertexCount * 2 * sizeof(GLint);

You may also want to try GLfloat for your texture coordinate type.

If you know of a forum with more SDL and openGL adept users I would really like to know where to look to get some answers to this problem.

I'm a graphics programmer so I try to answer any graphics question that I can/notice here. You can also try OpenGL.org or Gamedev.net.
Last edited on
Thank you so much! That did the trick, I am humbled. Also, I will take your recommendation and use floats as I have seen it widely used in tutorials and I'm assuming openGL will convert my ints to floats anyway. Thanks again.
Topic archived. No new replies allowed.