I was going along just fine working on an opengles 2.0 application until I tested it on an older phone that doesn't support VAO and now I seem to have fallen into a marsh.
I started using opengl after VAO's were sorta standard and everywhere so I never had to render without using one. Now that I have to write code that supports it I am having some trouble.
vertex shader
attribute vec3 position;
attribute vec4 icolor;
varying vec4 fcolor;
void main()
{
gl_Position = vec4(position, 1.0);
fcolor = icolor;
}
fragment shader
precision mediump float;
varying vec4 fcolor;
void main (void)
{
gl_FragColor = fcolor;
}
application side of things
init code:
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glGenBuffers(1, &indicesBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(rend2d->indices), rend2d->indices, GL_STATIC_DRAW);
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
rendering code:
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(sp);
GLint posLoc = glGetAttribLocation(sp, "position");
GLint colLoc = glGetAttribLocation(sp, "icolor");
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glVertexAttribPointer(posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(colLoc, 4, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glDrawElements(GL_TRIANGLES, rend2d->vertexCount, GL_UNSIGNED_INT, 0);
my error might be glaringly obvious but I just don't see what part I am currently not doing correctly and hoping to get some help with semi-modern opengl. This is mainly to provide support for apps that are opengles 2.0 but do not support the
GL_OES_vertex_array_object extension.
I wanted to post the answer because it was many little things that were wrong. First I'll post the data structure that I was using to hold my gl data.
typedef struct
{
GLuint vertexCount;
GLfloat vertices[12];
GLfloat colors[16];
GLuint indices[6];
GLfloat texCoords[8];
} renderable2d;
the first problem was here. As #derhass pointed out on the irc channel opengles 2.0 doesn't support 32bit indices. So the first step was to change that gluint above to glushort
typedef struct
{
GLushort vertexCount; //I made this a short as well
GLfloat vertices[12];
GLfloat colors[16];
GLushort indices[6]; //make this a short instead of an int
GLfloat texCoords[8];
}
once that part was fixed, then I had to generate my buffers, bind them and put the data in them, then unbind.
//bind n setup vertices
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//bind n setup colors
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//bind n setup indices
glGenBuffers(1, &indicesBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(rend2d->indices), rend2d->indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
and finally on to the rendering code
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(sp);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glEnableVertexAttribArray(colLoc);
glVertexAttribPointer(colLoc, 4, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glDrawElements(GL_TRIANGLES, rend2d->vertexCount, GL_UNSIGNED_SHORT, 0);
after doing all that I got things sorted out on both devices. Just for clarity rend2d is just a textured quad so rend2d->vertexCount = 6; With more complex models well you'll get that info somewhere else.
1) GL_UNSIGNED_INT is not officially supported in ES 2.0 as an index type
your GL_UNSIGNED_INT must be either one GL_UNSIGNED_BYTE or GL_UNSIGNED_SHORT
2) Assume they are friends when you use VBO without VAO.
glBindBuffer();
glVertexAttribPointer();
glEnableVertexAttribArray(); // you don't call it
In init()
GLint posLoc = glGetAttribLocation(sp, "position");
GLint colLoc = glGetAttribLocation(sp, "icolor");
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, GL_FLOAT,0,0,0);
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(colLoc)
glVertexAttribPointer(colLoc, GL_FLOAT,0,0,0);
In rendering()
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, GL_FLOAT,0,0,0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glEnableVertexAttribArray(colLoc)
glVertexAttribPointer(colLoc, GL_FLOAT,0,0,0);
3) Just don't use VAOs in ES 2.0 in which, VAO is not officially supported. However, IOS does as an exception.
Related
Is there any way to draw mesh dynamically.
like on press of a button send new float array to openGL ES 30 VBO.
currently i draw both rectangle and triangle STATIC in Native C++ they are visible, But when i pass triangle array from java using JNI. I got an error
call to OpenGL ES API with no current context (logged once per thread)
Native C++.
GLuint fvao, bvao;
GLuint VBO[2];
void setupBuffers() {
glGenVertexArrays(1,&bvao);
glBindVertexArray(bvao);
glGenBuffers(1, &VBO[0]);
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(rect), rect, GL_STATIC_DRAW);
.
.
.
// Triangle --------------------------------------
glGenVertexArrays(1,&fvao);
glBindVertexArray(fvao);
glGenBuffers(1, &VBO[1]);
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glBufferData(GL_ARRAY_BUFFER, 0, nullptr, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0); // pos
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat), (GLvoid*)(0 * sizeof(GLfloat)));
glEnableVertexAttribArray(1); // color
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(2); // texture
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat), (GLvoid*)(6 * sizeof(GLfloat)));
glBindVertexArray(0); //Unbind VAO
}
// UPDATE -----------------------
void updateBuffer(float triangle[]) {
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]); //Bind array for OpenGL to use
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
// update Buffer JNI
extern "C" JNIEXPORT void JNICALL
Java_com_test_nativetest_GL3JNILib_update(JNIEnv * env, jclass obj, jfloatArray triangle)
{
updateBuffer(triangle);
}
If anyone can tell me what i am missing or doing wrong. Any help would be appreciated.
I'm building an Android app to render 3d (wavefront.obj) model. By using tinyobjloader, I can successfully load the model.
Code:
std::vector<glm::vec3> vertices;
std::vector<glm::vec2> uvs;
std::vector<glm::vec3> normals;
tinyobj::attrib_t attrib;
std::vector<tinyobj::shape_t> shapes;
std::vector<tinyobj::material_t> materials;
for(size_t s =0; s < shapes.size(); s++)
{
size_t index_offset = 0;
for(size_t f = 0; f < shapes[s].mesh.num_face_vertices.size(); f++)
{
int fv = shapes[s].mesh.num_face_vertices[f];
for(size_t v = 0; v < fv; v++)
{
tinyobj::index_t idx = shapes[s].mesh.indices[index_offset + v];
tinyobj::real_t vx = attrib.vertices[3*idx.vertex_index+0];
tinyobj::real_t vy = attrib.vertices[3*idx.vertex_index+1];
tinyobj::real_t vz = attrib.vertices[3*idx.vertex_index+2];
tinyobj::real_t nx = attrib.normals[3*idx.normal_index+0];
tinyobj::real_t ny = attrib.normals[3*idx.normal_index+1];
tinyobj::real_t nz = attrib.normals[3*idx.normal_index+2];
tinyobj::real_t ux = attrib.texcoords[2*idx.texcoord_index+0];
tinyobj::real_t uy = attrib.texcoords[2*idx.texcoord_index+1];
vertices.push_back(glm::vec3(vx,vy,vz));
normals.push_back(glm::vec3(nx,ny,nz));
uvs.push_back(glm::vec2(ux,uy));
}
index_offset += fv;
}
}
Because the original .obj file has multiple faces format, i.e:
f 1/2/3 3/2/1 3/2/3
f 1/2/3 1/3/4 1/4/5 6/7/2
so that I use Blender Triangulate with 'Beauty' option to convert quad to triangle. But the rendered result is weird
I have built two function to initOpenGL (run once) and render().
initOpenGL code:
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec3), &vertices[0], GL_STATIC_DRAW);
glGenBuffers(1, &UVBO);
glBindBuffer(GL_ARRAY_BUFFER, UVBO);
glBufferData(GL_ARRAY_BUFFER, uvs.size() * sizeof(glm::vec2), &uvs[0], GL_STATIC_DRAW);
//Linking Vertex Attribute
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
//bind texture
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, UVBO);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
//load texture
//texture1
glGenTextures(1, &texture1);
glBindTexture(GL_TEXTURE_2D, texture1);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
if(patternSrc1)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, patWidth1, patHeight1, 0, GL_RGBA, GL_UNSIGNED_BYTE,
patternSrc1);
//glGenerateMipmap(GL_TEXTURE_2D);
}
render() code:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram);
camera.ProcessOneFinger(moveStpX, moveStpY);
camera.ProcessTwoFinger(move2X, move2Y);
projection = glm::perspective(camera.GetZoom(), (GLfloat)600/(GLfloat)1024, nearPlane, farPlane);
view = camera.GetViewMatrix();
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "projection"), 1, GL_FALSE, glm::value_ptr(projection));
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "view"), 1, GL_FALSE, glm::value_ptr(view));
glm::mat4 model;
model = glm::translate(model, glm::vec3(0.0f, 0.0f, 0.0f));
GLfloat angle = 20.0f;
model = glm::rotate(model, angle, glm::vec3( 1.0f, 0.3f, 0.5f));
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "model"), 1, GL_FALSE, glm::value_ptr( model ) );
glDrawArrays(GL_TRIANGLES, 0, vertices.size());
Model detail
Texture:
Model.obj file after triangulated:
https://pastebin.com/vUjHv8Fr
Thank you!!!
This looks to me like your texture is upside down. There's basically two potential errors that could each lead to this happening. Most likely, the texture image itself is upside down, e.g., because the image data passed to glTexImage2D() is in the wrong order. Unlike most other APIs, OpenGL (by default) expects pixel data in row-wise order starting from the bottom row. Check the way you load the texture data to make sure it's in the right order.
If this is not the problem, then it might be that your texture coordinates are for a left-handed texture coordinate system. OpenGL, however, uses right-handed texture coordinates where the origin is the lower-left corner of the texture image rather than the upper left corner. I'm not a Blender guy, but there's probably an export setting for this…
New to OpenGl and currently working on Android NDK. I am currently getting this error, and can't figure it out. I feel like it's something basic, but I could be wrong. the appRender is run every frame by the java render side.
Error:
E/emuglGLESv2_enc: glDrawArrays: a vertex attribute array is enabled with no data bound
app.cpp:
void appRender(long tick, int width, int height){
const float vertices[] =
{
0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f
};
glClear(GL_COLOR_BUFFER_BIT);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// Setup colors for each vertex
GLfloat colors[3*4];
for (int v = 0; v < 3; v++){
colors[4*v+0] = 0;
colors[4*v+1] = 1;
colors[4*v+2] = 0;
colors[4*v+3] = 1;
}
// Setup color buffer
GLuint colorBuffer;
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
1,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(
1,
4,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindTexture(GL_TEXTURE_2D, 0);
//textRenderer.RenderTexts(0.5f);
//boxRenderer.RenderBoxes(0.5f);
}
So I found it, and yes, I'm bad.
glVertexAttribPointer(1,3,...) -> glVertexAttribPointer(0,3,...)
I have a 2D game project that I'm porting to Android that utilizes OpenGL ES 2.0. I am having trouble getting anything drawn on the screen (except for a solid color from clearing the screen). Everything renders just fine when running in my Windows environment, but of course the environment is set up differently for the different version of OpenGL.
I followed the native-activity sample and took advice from several other OpenGL ES 2.0 resources to compose what I currently have.
I have checked everything I know how to with no anomalous results. As mentioned, glClear works, and displays the color set by glClearColor. I also know that every frame is being rendered, as changing glClearColor frame-by-frame displays the different colors. Of course, the application properly compiles. My textures are loaded from the proper location in the app's cache. glGetError is returning GL_NO_ERROR at every step in the process, so what I am doing appears to be accepted by OpenGL. My shaders are loaded without error. I have also tested this on both a few emulators and my physical android device, so it isn't localized to a specific device configuration.
I speculate that it must be some mistake in how I initialize and set up OpenGL. I am hoping someone more versed in OpenGL ES than I am will be able to help root out my problem. I am pasting the different relevant sections of my code below. engine is a global struct I am presently using out of laziness.
Initializing the display
static int AND_InitDisplay() {
// Desired display attributes
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 16,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
EGLint const attrib_list[3] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, NULL, attrib_list);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
// Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
return 0;
}
Drawing a frame
static void AND_drawFrame() {
if (engine->display == NULL) {
LOGW("DB E: DISPLAY IS NULL");
// No display.
return;
}
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
}
Example of preparing VBO data
I understand many wouldn't like the idea of using multiple VBOs for the same geometry. I would love to hear if this code isn't orthodox or is incorrect, but I am not focused on this unless it the root of my problem.
GLfloat charPosVerts[] = {
p0.x, p0.y, 0.f,
p1.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p1.y, 0.f
};
GLfloat charTexVerts[] = {
0.0, 0.0,
textures[texid].w, 0.0,
textures[texid].w, textures[texid].h,
0.0, 0.0,
textures[texid].w, textures[texid].h,
0.0, textures[texid].h
};
GLfloat charColorVerts[] = {
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a
};
glGenBuffers(1, &(e->vboPos));
glGenBuffers(1, &(e->vboTex));
glGenBuffers(1, &(e->vboColor));
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glBufferData(GL_ARRAY_BUFFER, sizeof(charPosVerts), charPosVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glBufferData(GL_ARRAY_BUFFER, sizeof(charTexVerts), charTexVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glBufferData(GL_ARRAY_BUFFER, sizeof(charColorVerts), charColorVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
Example of drawing VBO
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, CORE_GetBmpOpenGLTex(texix));
glUniform1i(shaderIDs.uniTexture, 0);
// Draw the sprite
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
glDrawArrays(GL_TRIANGLES, 0, 18);
Vertex Shader
The shaders are very simple.
attribute vec3 position;
attribute vec2 texCoord;
attribute vec4 colors;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main() {
gl_Position = vec4(position, 1.0);
texCoordVar = texCoord;
colorsVar = colors;
}
Fragment Shader
uniform sampler2D texture;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main()
{
gl_FragColor = texture2D(texture, texCoordVar) * colorsVar;
}
Thanks for looking at this long post. Help is very much appreciated.
The posted code is not drawing anything. From the AND_drawFrame() function:
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
Based on this, the draw code is either never invoked, or the window is cleared after drawing, which would wipe out everything that was drawn before.
I have an Android application, that render some 3D-model, loaded from OBJ-file. At first I try to render only vertices (without normals and texture-coord info).
After loading OBJ-file vertices to vector of triangles I try to create VBO:
struct obj_model_t {
GLuint vertex_buf;
GLuint tex_coord_buf;
GLuint normals_buf;
unsigned int count;
};
...
obj_model_t out_model;
// loading triangles to std::vector<glm::vec3> out_vertices;
glGenBuffers(1, &out_model.vertex_buf);
glBindBuffer(GL_ARRAY_BUFFER, out_model.vertex_buf);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * out_vertices.size(), &out_vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
And then, in render function I try to bind this VBO and draw triangles from that:
void engine_t::render(double elapsedTime) {
// clear buffer and other preparations for render
mat4 mvp = camera_.projection_matrix() * camera_.view_matrix();
glUseProgram(gProgram);
checkGlError("glUseProgram");
glUniformMatrix4fv(g_uMVPMatrix_Handle, 1, GL_FALSE, &mvp[0][0]);
glBindBuffer(GL_ARRAY_BUFFER, model_.vertex_buf);
checkGlError("glBindBuffer");
glEnableVertexAttribArray(g_vPosition_Handle);
checkGlError("glEnableVertexAttribArray");
glVertexAttribPointer(g_vPosition_Handle, 3, GL_FLOAT, GL_FALSE, 0, (void *)0);
checkGlError("glVertexAttribPointer");
glDrawArrays(GL_TRIANGLES, 0, model_.count); // E/Adreno200-ES20(27772): gl_draw_error_checks:418>: GL_INVALID_OPERATION
checkGlError("glDrawArrays"); // after glDrawArrays glError (0x502)
glDisableVertexAttribArray(g_vPosition_Handle);
checkGlError("glDisableVertexAttribArray");
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
But if I replace out_vertices from loading function to global space and use:
glVertexAttribPointer(g_vPosition_Handle, 3, GL_FLOAT, GL_FALSE, 0, (void *)&out_vertices[0]);
without binding model_.vertex_buf (without call glBindBuffer(GL_ARRAY_BUFFER, model_.vertex_buf);) my model renders normally.
How I can fix this problem and use VBO to draw my vertices?