Related
I just started programming opengl-es 2.0 and I'm currently struggling to find an issue concerned with setting the color of a wavefront object im drawing (https://pastebin.com/cEvpj8rt). The drawing is working just fine until I start to manipulate the color at which point im being confronted with opengl error 1281 and I'm unable to pinpoint the cause in my code. I've broken down the shader code to what I believe is the bare minimum required for the fragment shader to work:
void main() {
gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0);
}
To eliminate any additional source of error I am setting the color with a constant value as can be seen above. I doubt the error lies with the simple code above but is concerned with the code in my adapted renderer implementation. (it is based on the renderer that came with a sample from the ar-core github repo. The full code of the initial renderer can be found here: https://github.com/google-ar/arcore-android-sdk/blob/master/samples/java_arcore_hello_ar/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java while the adapted version can be seen here: https://pastebin.com/9cmKVnLV) Below you can find an excerpt of the code responsible for setting up and drawing the object. I reckoned the issue to be connected to the texturing which is why I removed the code.
I know its a bit much to ask for help given my lack of understanding on the matter at hand but I'd be glad for any hint/advice at this point. The error occurs after the first draw in the following method:
public void draw(float[] cameraView, float[] cameraPerspective) {
multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
glUseProgram(mProgram);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBufferId);
glVertexAttribPointer(mPositionAttribute, COORDS_PER_VERTEX,
GL_FLOAT, false, 0, mVerticesBaseAddress);
glVertexAttribPointer(mNormalAttribute, 3,
GL_FLOAT, false, 0, mNormalsBaseAddress);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Set the ModelViewProjection matrix in the shader.
glUniformMatrix4fv(mModelViewUniform, 1,
false, mModelViewMatrix, 0);
glUniformMatrix4fv(mModelViewProjectionUniform, 1,
false, mModelViewProjectionMatrix, 0);
glEnableVertexAttribArray(mPositionAttribute);
glEnableVertexAttribArray(mNormalAttribute);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
glDrawElements(GL_TRIANGLES, mIndexCount, GL_UNSIGNED_SHORT, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisableVertexAttribArray(mPositionAttribute);
glDisableVertexAttribArray(mNormalAttribute);
// this is where the error is detected
OpenGlHelper.checkGLError(TAG, "After draw");
}
Here the method which is used for initialization:
public void createOnGlThread(Context context) throws IOException {
InputStream objInputStream = context.getAssets()
.open(OBJ_ASSET_NAME);
Obj obj = ObjReader.read(objInputStream);
obj = ObjUtils.convertToRenderable(obj);
IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
FloatBuffer vertices = ObjData.getVertices(obj);
FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
FloatBuffer normals = ObjData.getNormals(obj);
ShortBuffer indices = ByteBuffer.allocateDirect(2 * wideIndices.limit())
.order(ByteOrder.nativeOrder()).asShortBuffer();
while (wideIndices.hasRemaining()) {
indices.put((short) wideIndices.get());
}
indices.rewind();
int[] buffers = new int[2];
glGenBuffers(2, buffers, 0);
mVertexBufferId = buffers[0];
mIndexBufferId = buffers[1];
// Load vertex buffer
mVerticesBaseAddress = 0;
mTexCoordsBaseAddress = mVerticesBaseAddress + 4 * vertices.limit();
mNormalsBaseAddress = mTexCoordsBaseAddress + 4 * texCoords.limit();
final int totalBytes = mNormalsBaseAddress + 4 * normals.limit();
glBindBuffer(GL_ARRAY_BUFFER, mVertexBufferId);
glBufferData(GL_ARRAY_BUFFER, totalBytes, null, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, mVerticesBaseAddress,
4 * vertices.limit(), vertices);
glBufferSubData(GL_ARRAY_BUFFER, mTexCoordsBaseAddress,
4 * texCoords.limit(), texCoords);
glBufferSubData(GL_ARRAY_BUFFER, mNormalsBaseAddress,
4 * normals.limit(), normals);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Load index buffer
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
mIndexCount = indices.limit();
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 2 * mIndexCount,
indices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
OpenGlHelper.checkGLError(TAG, "OBJ buffer load");
mProgram = glCreateProgram();
glAttachShader(mProgram, OpenGlHelper.loadGLShader(TAG, context,
GL_VERTEX_SHADER, R.raw.sphere_vertex));
glAttachShader(mProgram, OpenGlHelper.loadGLShader(TAG, context,
GL_FRAGMENT_SHADER, R.raw.sphere_fragment));
glLinkProgram(mProgram);
glUseProgram(mProgram);
OpenGlHelper.checkGLError(TAG, "Program creation");
mModelViewUniform = glGetUniformLocation(mProgram, "u_ModelView");
mModelViewProjectionUniform =
glGetUniformLocation(mProgram, "u_ModelViewProjection");
mPositionAttribute = glGetAttribLocation(mProgram, "a_Position");
mNormalAttribute = glGetAttribLocation(mProgram, "a_Normal");
OpenGlHelper.checkGLError(TAG, "Program parameters");
setIdentityM(mModelMatrix, 0);
}
I was going along just fine working on an opengles 2.0 application until I tested it on an older phone that doesn't support VAO and now I seem to have fallen into a marsh.
I started using opengl after VAO's were sorta standard and everywhere so I never had to render without using one. Now that I have to write code that supports it I am having some trouble.
vertex shader
attribute vec3 position;
attribute vec4 icolor;
varying vec4 fcolor;
void main()
{
gl_Position = vec4(position, 1.0);
fcolor = icolor;
}
fragment shader
precision mediump float;
varying vec4 fcolor;
void main (void)
{
gl_FragColor = fcolor;
}
application side of things
init code:
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glGenBuffers(1, &indicesBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(rend2d->indices), rend2d->indices, GL_STATIC_DRAW);
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
rendering code:
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(sp);
GLint posLoc = glGetAttribLocation(sp, "position");
GLint colLoc = glGetAttribLocation(sp, "icolor");
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glVertexAttribPointer(posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(colLoc, 4, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glDrawElements(GL_TRIANGLES, rend2d->vertexCount, GL_UNSIGNED_INT, 0);
my error might be glaringly obvious but I just don't see what part I am currently not doing correctly and hoping to get some help with semi-modern opengl. This is mainly to provide support for apps that are opengles 2.0 but do not support the
GL_OES_vertex_array_object extension.
I wanted to post the answer because it was many little things that were wrong. First I'll post the data structure that I was using to hold my gl data.
typedef struct
{
GLuint vertexCount;
GLfloat vertices[12];
GLfloat colors[16];
GLuint indices[6];
GLfloat texCoords[8];
} renderable2d;
the first problem was here. As #derhass pointed out on the irc channel opengles 2.0 doesn't support 32bit indices. So the first step was to change that gluint above to glushort
typedef struct
{
GLushort vertexCount; //I made this a short as well
GLfloat vertices[12];
GLfloat colors[16];
GLushort indices[6]; //make this a short instead of an int
GLfloat texCoords[8];
}
once that part was fixed, then I had to generate my buffers, bind them and put the data in them, then unbind.
//bind n setup vertices
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//bind n setup colors
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//bind n setup indices
glGenBuffers(1, &indicesBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(rend2d->indices), rend2d->indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
and finally on to the rendering code
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(sp);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glEnableVertexAttribArray(colLoc);
glVertexAttribPointer(colLoc, 4, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesBuffer);
glDrawElements(GL_TRIANGLES, rend2d->vertexCount, GL_UNSIGNED_SHORT, 0);
after doing all that I got things sorted out on both devices. Just for clarity rend2d is just a textured quad so rend2d->vertexCount = 6; With more complex models well you'll get that info somewhere else.
1) GL_UNSIGNED_INT is not officially supported in ES 2.0 as an index type
your GL_UNSIGNED_INT must be either one GL_UNSIGNED_BYTE or GL_UNSIGNED_SHORT
2) Assume they are friends when you use VBO without VAO.
glBindBuffer();
glVertexAttribPointer();
glEnableVertexAttribArray(); // you don't call it
In init()
GLint posLoc = glGetAttribLocation(sp, "position");
GLint colLoc = glGetAttribLocation(sp, "icolor");
glGenBuffers(1, &verticesBuffer);
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->vertices), rend2d->vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, GL_FLOAT,0,0,0);
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(rend2d->colors), rend2d->colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(colLoc)
glVertexAttribPointer(colLoc, GL_FLOAT,0,0,0);
In rendering()
glBindBuffer(GL_ARRAY_BUFFER, verticesBuffer);
glEnableVertexAttribArray(posLoc);
glVertexAttribPointer(posLoc, GL_FLOAT,0,0,0);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glEnableVertexAttribArray(colLoc)
glVertexAttribPointer(colLoc, GL_FLOAT,0,0,0);
3) Just don't use VAOs in ES 2.0 in which, VAO is not officially supported. However, IOS does as an exception.
I try to render in a FBO Object with QT 5.2, on PC everything works fine, but the Mobile-Screen stays black. BTW. It's a Android phone.
Here's the code for the initalisation :
GLANN::GLANN(unsigned int width, unsigned int height, unsigned int renderPasses, Scene *renderScene,
QWidget *parent, QGLWidget *shareWidget)
: QGLWidget(parent, shareWidget)
{
makeCurrent();
setFixedWidth(width);
setFixedHeight(height);
this->width = width;
this->height = height;
qsrand((uint)QTime::currentTime().msec());
mScene = renderScene;
SceneImage = renderScene->getSceneImage();
numObjects = SceneImage->width();
renderedImage = new Playground(texSize,texSize);
}
void GLANN::initializeGL(){
setAutoBufferSwap(true);
initializeGLFunctions();
glEnable(GL_CULL_FACE);
glClearColor(0.0, 0.0, 0.0, 0.0);
initShader();
initTextures();
// Generate 2 VBOs
glGenBuffers(1, &vboId0);
glGenBuffers(1, &vboId1);
VertexData vertices[] = {
// Vertex data for face 0
{QVector3D(-1.0, -1.0, 1.0), QVector2D(0.0, 0.0)}, // v0
{QVector3D( 1.0, -1.0, 1.0), QVector2D(1.0, 0.0)}, // v1
{QVector3D(-1.0, 1.0, 1.0), QVector2D(0.0, 1.0)}, // v2
{QVector3D( 1.0, 1.0, 1.0), QVector2D(1.0, 1.0)}, // v3
};
// Transfer vertex data to VBO 0
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(VertexData), vertices, GL_STATIC_DRAW);
GLushort indices[] = {
0, 1, 2, 3, 3,
};
// Transfer index data to VBO 1
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 5 * sizeof(GLushort), indices, GL_STATIC_DRAW);
//Init the Framebuffer
initFbo();
// Use QBasicTimer because its faster than QTimer
timer.start(0, this);
}
bool GLANN::initFbo(){
fbo = new QOpenGLFramebufferObject(texSize, texSize);
return true;
}
And here for the Rendering:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
// Set random seed
program.setUniformValue("seedX", ((float)qrand()/RAND_MAX));
program.setUniformValue("seedY", ((float)qrand()/RAND_MAX));
//Set number of alredy rendered passes
program.setUniformValue("numRenderPass",mRenderPasses);
//Set program to fbo render mode
program.setUniformValue("fbo",true);
//Bind last rendered Image
//pixelsRenderedImage = bindTexture(*renderedImage);
//Load Identity
//glLoadIdentity();
//Move to rendering point
//glTranslatef( -1.0, -1.0, 0.0f );
// Draw geometry
// Tell OpenGL which VBOs to use
// Render to our framebuffer
fbo->bind();
glViewport(0,0,texSize,texSize);
// Tell OpenGL which VBOs to use
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
// Offset for position
int offset = 0;
// Tell OpenGL programmable pipeline how to locate vertex position data
int vertexLocation = program.attributeLocation("a_position");
program.enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
// Offset for texture coordinate
offset += sizeof(QVector3D);
// Tell OpenGL programmable pipeline how to locate vertex texture coordinate data
int texcoordLocation = program.attributeLocation("a_texcoord");
program.enableAttributeArray(texcoordLocation);
glVertexAttribPointer(texcoordLocation, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, pixelsScene);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, pixelsRandom);
// Draw cube geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
fbo->release();
pixelsRenderedImage = fbo->takeTexture();
//Set Program to screen frendering
program.setUniformValue("fbo",false);
//Set Viewport back to default
glViewport(0,0,width,height);
//Render To Screen
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
// Draw quad geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
Hope it's clear what I'm trying to do here...
EDIT : REMOVED glEnable(TEXTURE_2D).
Still getting a blackscreen but no Errors from glError();
GLANN::GLANN(unsigned int width, unsigned int height, unsigned int renderPasses, Scene *renderScene,
QWidget *parent, QGLWidget *shareWidget)
: QGLWidget(parent, shareWidget)
{
setFixedWidth(width);
setFixedHeight(height);
this->width = width;
this->height = height;
qsrand((uint)QTime::currentTime().msec());
mScene = renderScene;
SceneImage = renderScene->getSceneImage();
numObjects = SceneImage->width();
renderedImage = new Playground(texSize,texSize);
}
void GLANN::initializeGL(){
//setAutoBufferSwap(true);
makeCurrent();
initializeGLFunctions();
qDebug() << glGetError() << "Line 28";
//glEnable(GL_CULL_FACE);
glClearColor(0.0, 0.0, 0.0, 0.0);
initShader();
qDebug() << glGetError() << "Line 36";
initTextures();
qDebug() << glGetError() << "Line 40";
// Generate 2 VBOs
glGenBuffers(1, &vboId0);
glGenBuffers(1, &vboId1);
VertexData vertices[] = {
// Vertex data for face 0
{QVector3D(-1.0, -1.0, 1.0), QVector2D(0.0, 0.0)}, // v0
{QVector3D( 1.0, -1.0, 1.0), QVector2D(1.0, 0.0)}, // v1
{QVector3D(-1.0, 1.0, 1.0), QVector2D(0.0, 1.0)}, // v2
{QVector3D( 1.0, 1.0, 1.0), QVector2D(1.0, 1.0)}, // v3
};
// Transfer vertex data to VBO 0
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(VertexData), vertices, GL_STATIC_DRAW);
GLushort indices[] = {
0, 1, 2, 3, 3,
};
// Transfer index data to VBO 1
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 5 * sizeof(GLushort), indices, GL_STATIC_DRAW);
qDebug() << glGetError() << "Line 57";
//Init the Framebuffer
initFbo();
qDebug() << glGetError() << "Line 62";
// Use QBasicTimer because its faster than QTimer
timer.start(0, this);
}
bool GLANN::initFbo(){
fbo = new QOpenGLFramebufferObject(texSize, texSize);
return true;
}
void GLANN::resizeGL(int w, int h){
glViewport(0,0,w,h);
}
void GLANN::mouseMoveEvent(QMouseEvent* event){
if(event->buttons() == Qt::LeftButton){
LineObject newPoly(xTemp,yTemp, 1.0f*event->pos().x()/width, 1.0f-1.0f*event->pos().y()/width,qRgba(255,255,255,255),0.4,0.5,0.3,0.0);
mScene->addObject(newPoly);
SceneImage = mScene->getSceneImage();
numObjects = SceneImage->width();
//Bind WeightmapTexture
pixelsScene = QGLWidget::bindTexture(*SceneImage);
qDebug() << xTemp << yTemp << numObjects;
//Bind last rendered Image
renderedImage->fill(qRgba(0,0,0,255));
pixelsRenderedImage = bindTexture(*renderedImage);
mRenderPasses = 0;
// Set number of Objects
program.setUniformValue("numObjects",numObjects);
xTemp = 1.0f*event->pos().x()/width;
yTemp = 1.0f-1.0f*event->pos().y()/height;
}
}
void GLANN::mousePressEvent(QMouseEvent* event){
if(event->button() == Qt::LeftButton){
xTemp = 1.0f*event->pos().x()/width;
yTemp = 1.0f-1.0f*event->pos().y()/height;
}
}
void GLANN::paintGL(){
render();
//getFeedbackTexture();
//increment number of rendered passes
mRenderPasses++;
}
void GLANN::render(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
// Set random seed
program.setUniformValue("seedX", ((float)qrand()/RAND_MAX));
program.setUniformValue("seedY", ((float)qrand()/RAND_MAX));
//Set number of alredy rendered passes
program.setUniformValue("numRenderPass",mRenderPasses);
//Set program to fbo render mode
program.setUniformValue("fbo",true);
//Bind last rendered Image
//pixelsRenderedImage = bindTexture(*renderedImage);
//Load Identity
//glLoadIdentity();
//Move to rendering point
//glTranslatef( -1.0, -1.0, 0.0f );
// Draw geometry
// Tell OpenGL which VBOs to use
// Render to our framebuffer
fbo->bind();
glViewport(0,0,texSize,texSize);
// Tell OpenGL which VBOs to use
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
// Offset for position
int offset = 0;
// Tell OpenGL programmable pipeline how to locate vertex position data
int vertexLocation = program.attributeLocation("a_position");
program.enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
// Offset for texture coordinate
offset += sizeof(QVector3D);
// Tell OpenGL programmable pipeline how to locate vertex texture coordinate data
int texcoordLocation = program.attributeLocation("a_texcoord");
program.enableAttributeArray(texcoordLocation);
glVertexAttribPointer(texcoordLocation, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
qDebug() << glGetError() << "Line 167";
//glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, pixelsScene);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, pixelsRandom);
// Draw cube geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
qDebug() << glGetError() << "Line 183";
fbo->release();
pixelsRenderedImage = fbo->texture();
//Set Program to screen frendering
program.setUniformValue("fbo",false);
//Set Viewport back to default
glViewport(0,0,width,height);
//Render To Screen
//glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
// Draw quad geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
qDebug() << glGetError();
}
Problem is probably caused by this:
Qt Android. Get device screen resolution
And this:
Errors after changed OpenGL code from ES 1.0 to ES 2.0
I have a 2D game project that I'm porting to Android that utilizes OpenGL ES 2.0. I am having trouble getting anything drawn on the screen (except for a solid color from clearing the screen). Everything renders just fine when running in my Windows environment, but of course the environment is set up differently for the different version of OpenGL.
I followed the native-activity sample and took advice from several other OpenGL ES 2.0 resources to compose what I currently have.
I have checked everything I know how to with no anomalous results. As mentioned, glClear works, and displays the color set by glClearColor. I also know that every frame is being rendered, as changing glClearColor frame-by-frame displays the different colors. Of course, the application properly compiles. My textures are loaded from the proper location in the app's cache. glGetError is returning GL_NO_ERROR at every step in the process, so what I am doing appears to be accepted by OpenGL. My shaders are loaded without error. I have also tested this on both a few emulators and my physical android device, so it isn't localized to a specific device configuration.
I speculate that it must be some mistake in how I initialize and set up OpenGL. I am hoping someone more versed in OpenGL ES than I am will be able to help root out my problem. I am pasting the different relevant sections of my code below. engine is a global struct I am presently using out of laziness.
Initializing the display
static int AND_InitDisplay() {
// Desired display attributes
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 16,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
EGLint const attrib_list[3] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, NULL, attrib_list);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
// Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
return 0;
}
Drawing a frame
static void AND_drawFrame() {
if (engine->display == NULL) {
LOGW("DB E: DISPLAY IS NULL");
// No display.
return;
}
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
}
Example of preparing VBO data
I understand many wouldn't like the idea of using multiple VBOs for the same geometry. I would love to hear if this code isn't orthodox or is incorrect, but I am not focused on this unless it the root of my problem.
GLfloat charPosVerts[] = {
p0.x, p0.y, 0.f,
p1.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p1.y, 0.f
};
GLfloat charTexVerts[] = {
0.0, 0.0,
textures[texid].w, 0.0,
textures[texid].w, textures[texid].h,
0.0, 0.0,
textures[texid].w, textures[texid].h,
0.0, textures[texid].h
};
GLfloat charColorVerts[] = {
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a
};
glGenBuffers(1, &(e->vboPos));
glGenBuffers(1, &(e->vboTex));
glGenBuffers(1, &(e->vboColor));
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glBufferData(GL_ARRAY_BUFFER, sizeof(charPosVerts), charPosVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glBufferData(GL_ARRAY_BUFFER, sizeof(charTexVerts), charTexVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glBufferData(GL_ARRAY_BUFFER, sizeof(charColorVerts), charColorVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
Example of drawing VBO
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, CORE_GetBmpOpenGLTex(texix));
glUniform1i(shaderIDs.uniTexture, 0);
// Draw the sprite
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
glDrawArrays(GL_TRIANGLES, 0, 18);
Vertex Shader
The shaders are very simple.
attribute vec3 position;
attribute vec2 texCoord;
attribute vec4 colors;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main() {
gl_Position = vec4(position, 1.0);
texCoordVar = texCoord;
colorsVar = colors;
}
Fragment Shader
uniform sampler2D texture;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main()
{
gl_FragColor = texture2D(texture, texCoordVar) * colorsVar;
}
Thanks for looking at this long post. Help is very much appreciated.
The posted code is not drawing anything. From the AND_drawFrame() function:
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
Based on this, the draw code is either never invoked, or the window is cleared after drawing, which would wipe out everything that was drawn before.
I have an Android application, that render some 3D-model, loaded from OBJ-file. At first I try to render only vertices (without normals and texture-coord info).
After loading OBJ-file vertices to vector of triangles I try to create VBO:
struct obj_model_t {
GLuint vertex_buf;
GLuint tex_coord_buf;
GLuint normals_buf;
unsigned int count;
};
...
obj_model_t out_model;
// loading triangles to std::vector<glm::vec3> out_vertices;
glGenBuffers(1, &out_model.vertex_buf);
glBindBuffer(GL_ARRAY_BUFFER, out_model.vertex_buf);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * out_vertices.size(), &out_vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
And then, in render function I try to bind this VBO and draw triangles from that:
void engine_t::render(double elapsedTime) {
// clear buffer and other preparations for render
mat4 mvp = camera_.projection_matrix() * camera_.view_matrix();
glUseProgram(gProgram);
checkGlError("glUseProgram");
glUniformMatrix4fv(g_uMVPMatrix_Handle, 1, GL_FALSE, &mvp[0][0]);
glBindBuffer(GL_ARRAY_BUFFER, model_.vertex_buf);
checkGlError("glBindBuffer");
glEnableVertexAttribArray(g_vPosition_Handle);
checkGlError("glEnableVertexAttribArray");
glVertexAttribPointer(g_vPosition_Handle, 3, GL_FLOAT, GL_FALSE, 0, (void *)0);
checkGlError("glVertexAttribPointer");
glDrawArrays(GL_TRIANGLES, 0, model_.count); // E/Adreno200-ES20(27772): gl_draw_error_checks:418>: GL_INVALID_OPERATION
checkGlError("glDrawArrays"); // after glDrawArrays glError (0x502)
glDisableVertexAttribArray(g_vPosition_Handle);
checkGlError("glDisableVertexAttribArray");
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
But if I replace out_vertices from loading function to global space and use:
glVertexAttribPointer(g_vPosition_Handle, 3, GL_FLOAT, GL_FALSE, 0, (void *)&out_vertices[0]);
without binding model_.vertex_buf (without call glBindBuffer(GL_ARRAY_BUFFER, model_.vertex_buf);) my model renders normally.
How I can fix this problem and use VBO to draw my vertices?