Render to Framebuffer in OpenGL ES on Android with QT - android

I try to render in a FBO Object with QT 5.2, on PC everything works fine, but the Mobile-Screen stays black. BTW. It's a Android phone.
Here's the code for the initalisation :
GLANN::GLANN(unsigned int width, unsigned int height, unsigned int renderPasses, Scene *renderScene,
QWidget *parent, QGLWidget *shareWidget)
: QGLWidget(parent, shareWidget)
{
makeCurrent();
setFixedWidth(width);
setFixedHeight(height);
this->width = width;
this->height = height;
qsrand((uint)QTime::currentTime().msec());
mScene = renderScene;
SceneImage = renderScene->getSceneImage();
numObjects = SceneImage->width();
renderedImage = new Playground(texSize,texSize);
}
void GLANN::initializeGL(){
setAutoBufferSwap(true);
initializeGLFunctions();
glEnable(GL_CULL_FACE);
glClearColor(0.0, 0.0, 0.0, 0.0);
initShader();
initTextures();
// Generate 2 VBOs
glGenBuffers(1, &vboId0);
glGenBuffers(1, &vboId1);
VertexData vertices[] = {
// Vertex data for face 0
{QVector3D(-1.0, -1.0, 1.0), QVector2D(0.0, 0.0)}, // v0
{QVector3D( 1.0, -1.0, 1.0), QVector2D(1.0, 0.0)}, // v1
{QVector3D(-1.0, 1.0, 1.0), QVector2D(0.0, 1.0)}, // v2
{QVector3D( 1.0, 1.0, 1.0), QVector2D(1.0, 1.0)}, // v3
};
// Transfer vertex data to VBO 0
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(VertexData), vertices, GL_STATIC_DRAW);
GLushort indices[] = {
0, 1, 2, 3, 3,
};
// Transfer index data to VBO 1
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 5 * sizeof(GLushort), indices, GL_STATIC_DRAW);
//Init the Framebuffer
initFbo();
// Use QBasicTimer because its faster than QTimer
timer.start(0, this);
}
bool GLANN::initFbo(){
fbo = new QOpenGLFramebufferObject(texSize, texSize);
return true;
}
And here for the Rendering:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
// Set random seed
program.setUniformValue("seedX", ((float)qrand()/RAND_MAX));
program.setUniformValue("seedY", ((float)qrand()/RAND_MAX));
//Set number of alredy rendered passes
program.setUniformValue("numRenderPass",mRenderPasses);
//Set program to fbo render mode
program.setUniformValue("fbo",true);
//Bind last rendered Image
//pixelsRenderedImage = bindTexture(*renderedImage);
//Load Identity
//glLoadIdentity();
//Move to rendering point
//glTranslatef( -1.0, -1.0, 0.0f );
// Draw geometry
// Tell OpenGL which VBOs to use
// Render to our framebuffer
fbo->bind();
glViewport(0,0,texSize,texSize);
// Tell OpenGL which VBOs to use
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
// Offset for position
int offset = 0;
// Tell OpenGL programmable pipeline how to locate vertex position data
int vertexLocation = program.attributeLocation("a_position");
program.enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
// Offset for texture coordinate
offset += sizeof(QVector3D);
// Tell OpenGL programmable pipeline how to locate vertex texture coordinate data
int texcoordLocation = program.attributeLocation("a_texcoord");
program.enableAttributeArray(texcoordLocation);
glVertexAttribPointer(texcoordLocation, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, pixelsScene);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, pixelsRandom);
// Draw cube geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
fbo->release();
pixelsRenderedImage = fbo->takeTexture();
//Set Program to screen frendering
program.setUniformValue("fbo",false);
//Set Viewport back to default
glViewport(0,0,width,height);
//Render To Screen
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
// Draw quad geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
Hope it's clear what I'm trying to do here...
EDIT : REMOVED glEnable(TEXTURE_2D).
Still getting a blackscreen but no Errors from glError();
GLANN::GLANN(unsigned int width, unsigned int height, unsigned int renderPasses, Scene *renderScene,
QWidget *parent, QGLWidget *shareWidget)
: QGLWidget(parent, shareWidget)
{
setFixedWidth(width);
setFixedHeight(height);
this->width = width;
this->height = height;
qsrand((uint)QTime::currentTime().msec());
mScene = renderScene;
SceneImage = renderScene->getSceneImage();
numObjects = SceneImage->width();
renderedImage = new Playground(texSize,texSize);
}
void GLANN::initializeGL(){
//setAutoBufferSwap(true);
makeCurrent();
initializeGLFunctions();
qDebug() << glGetError() << "Line 28";
//glEnable(GL_CULL_FACE);
glClearColor(0.0, 0.0, 0.0, 0.0);
initShader();
qDebug() << glGetError() << "Line 36";
initTextures();
qDebug() << glGetError() << "Line 40";
// Generate 2 VBOs
glGenBuffers(1, &vboId0);
glGenBuffers(1, &vboId1);
VertexData vertices[] = {
// Vertex data for face 0
{QVector3D(-1.0, -1.0, 1.0), QVector2D(0.0, 0.0)}, // v0
{QVector3D( 1.0, -1.0, 1.0), QVector2D(1.0, 0.0)}, // v1
{QVector3D(-1.0, 1.0, 1.0), QVector2D(0.0, 1.0)}, // v2
{QVector3D( 1.0, 1.0, 1.0), QVector2D(1.0, 1.0)}, // v3
};
// Transfer vertex data to VBO 0
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(VertexData), vertices, GL_STATIC_DRAW);
GLushort indices[] = {
0, 1, 2, 3, 3,
};
// Transfer index data to VBO 1
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 5 * sizeof(GLushort), indices, GL_STATIC_DRAW);
qDebug() << glGetError() << "Line 57";
//Init the Framebuffer
initFbo();
qDebug() << glGetError() << "Line 62";
// Use QBasicTimer because its faster than QTimer
timer.start(0, this);
}
bool GLANN::initFbo(){
fbo = new QOpenGLFramebufferObject(texSize, texSize);
return true;
}
void GLANN::resizeGL(int w, int h){
glViewport(0,0,w,h);
}
void GLANN::mouseMoveEvent(QMouseEvent* event){
if(event->buttons() == Qt::LeftButton){
LineObject newPoly(xTemp,yTemp, 1.0f*event->pos().x()/width, 1.0f-1.0f*event->pos().y()/width,qRgba(255,255,255,255),0.4,0.5,0.3,0.0);
mScene->addObject(newPoly);
SceneImage = mScene->getSceneImage();
numObjects = SceneImage->width();
//Bind WeightmapTexture
pixelsScene = QGLWidget::bindTexture(*SceneImage);
qDebug() << xTemp << yTemp << numObjects;
//Bind last rendered Image
renderedImage->fill(qRgba(0,0,0,255));
pixelsRenderedImage = bindTexture(*renderedImage);
mRenderPasses = 0;
// Set number of Objects
program.setUniformValue("numObjects",numObjects);
xTemp = 1.0f*event->pos().x()/width;
yTemp = 1.0f-1.0f*event->pos().y()/height;
}
}
void GLANN::mousePressEvent(QMouseEvent* event){
if(event->button() == Qt::LeftButton){
xTemp = 1.0f*event->pos().x()/width;
yTemp = 1.0f-1.0f*event->pos().y()/height;
}
}
void GLANN::paintGL(){
render();
//getFeedbackTexture();
//increment number of rendered passes
mRenderPasses++;
}
void GLANN::render(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
// Set random seed
program.setUniformValue("seedX", ((float)qrand()/RAND_MAX));
program.setUniformValue("seedY", ((float)qrand()/RAND_MAX));
//Set number of alredy rendered passes
program.setUniformValue("numRenderPass",mRenderPasses);
//Set program to fbo render mode
program.setUniformValue("fbo",true);
//Bind last rendered Image
//pixelsRenderedImage = bindTexture(*renderedImage);
//Load Identity
//glLoadIdentity();
//Move to rendering point
//glTranslatef( -1.0, -1.0, 0.0f );
// Draw geometry
// Tell OpenGL which VBOs to use
// Render to our framebuffer
fbo->bind();
glViewport(0,0,texSize,texSize);
// Tell OpenGL which VBOs to use
glBindBuffer(GL_ARRAY_BUFFER, vboId0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboId1);
// Offset for position
int offset = 0;
// Tell OpenGL programmable pipeline how to locate vertex position data
int vertexLocation = program.attributeLocation("a_position");
program.enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
// Offset for texture coordinate
offset += sizeof(QVector3D);
// Tell OpenGL programmable pipeline how to locate vertex texture coordinate data
int texcoordLocation = program.attributeLocation("a_texcoord");
program.enableAttributeArray(texcoordLocation);
glVertexAttribPointer(texcoordLocation, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (const void *)offset);
qDebug() << glGetError() << "Line 167";
//glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, pixelsScene);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, pixelsRandom);
// Draw cube geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
qDebug() << glGetError() << "Line 183";
fbo->release();
pixelsRenderedImage = fbo->texture();
//Set Program to screen frendering
program.setUniformValue("fbo",false);
//Set Viewport back to default
glViewport(0,0,width,height);
//Render To Screen
//glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, pixelsRenderedImage);
// Draw quad geometry using indices from VBO 1
glDrawElements(GL_TRIANGLE_STRIP, 5, GL_UNSIGNED_SHORT, 0);
qDebug() << glGetError();
}

Problem is probably caused by this:
Qt Android. Get device screen resolution
And this:
Errors after changed OpenGL code from ES 1.0 to ES 2.0

Related

Black rectangle on top of android camera preview

I am working on android app that uses NDK camera2API with opengl.
When I launch the application on the device, a black rectangle appears on top, although the application should run in full screen.
The architecture of the application from the java side uses the navigation graph.
To fullscreen mode, I use this:
class MainActivity : AppCompatActivity() {
...
...
companion object {
/** Combination of all flags required to put activity into immersive mode */
const val FLAGS_FULLSCREEN=
View.SYSTEM_UI_FLAG_LOW_PROFILE or
View.SYSTEM_UI_FLAG_FULLSCREEN or
View.SYSTEM_UI_FLAG_LAYOUT_STABLE or
View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
/** Milliseconds used for UI animations */
const val ANIMATION_FAST_MILLIS = 50L
const val ANIMATION_SLOW_MILLIS = 100L
private const val IMMERSIVE_FLAG_TIMEOUT = 100L
}
On android side I create texture for usage in cpp:
GLES30.glGenTextures(1, textures, 0)
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0])
surfaceTexture = SurfaceTexture(textures[0])
Shaders:
static const char* vertex_shader_src = R"(
attribute vec3 vertexPosition;
attribute vec2 uvs;
uniform mat4 texMatrix; // this from surfaceTexture getTransformMatrix
varying vec2 varUvs;
void main()
{
varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;
gl_Position = vec4(vertexPosition, 1.0);
}
)";
static const char* fragment_shader_src = R"(
#extension GL_OES_EGL_image_external : require
precision mediump float;
uniform samplerExternalOES texSampler;
varying vec2 varUvs;
void main()
{
gl_FragColor = texture2D(texSampler, varUvs);
}
)";
Vertex and index
static float vertices[] {
// x, y, z, u, v
-1, -1, 0, 0, 0,
-1, 1, 0, 0, 1,
1, 1, 0, 1, 1,
1, -1, 0, 1, 0
};
static GLuint indices[] { 2, 1, 0, 0, 3, 2 };
This is render code
void ogl::draw_frame(const float texMat[]) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glClearColor(0,0,0,1);
glUseProgram(program);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(tex_sampler, 0);
glUniformMatrix4fv(tex_matrix, 1, false, texMat);
glBindBuffer(GL_ARRAY_BUFFER, buffers[0]);
glEnableVertexAttribArray(vertex_position);
glVertexAttribPointer(vertex_position, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, 0);
glEnableVertexAttribArray(uvs);
glVertexAttribPointer(uvs, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void *)(3 * sizeof(float)));
glViewport(0, 0, width, height);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffers[1]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
}
Closed.
the problem was that I used the dimensions that the camera used (for example, the camera resolution is 480x640, and the actual window size is 480 x 752, the difference is 112 pixels)

Android OpenGL UV mapping error after triangulating

I'm building an Android app to render 3d (wavefront.obj) model. By using tinyobjloader, I can successfully load the model.
Code:
std::vector<glm::vec3> vertices;
std::vector<glm::vec2> uvs;
std::vector<glm::vec3> normals;
tinyobj::attrib_t attrib;
std::vector<tinyobj::shape_t> shapes;
std::vector<tinyobj::material_t> materials;
for(size_t s =0; s < shapes.size(); s++)
{
size_t index_offset = 0;
for(size_t f = 0; f < shapes[s].mesh.num_face_vertices.size(); f++)
{
int fv = shapes[s].mesh.num_face_vertices[f];
for(size_t v = 0; v < fv; v++)
{
tinyobj::index_t idx = shapes[s].mesh.indices[index_offset + v];
tinyobj::real_t vx = attrib.vertices[3*idx.vertex_index+0];
tinyobj::real_t vy = attrib.vertices[3*idx.vertex_index+1];
tinyobj::real_t vz = attrib.vertices[3*idx.vertex_index+2];
tinyobj::real_t nx = attrib.normals[3*idx.normal_index+0];
tinyobj::real_t ny = attrib.normals[3*idx.normal_index+1];
tinyobj::real_t nz = attrib.normals[3*idx.normal_index+2];
tinyobj::real_t ux = attrib.texcoords[2*idx.texcoord_index+0];
tinyobj::real_t uy = attrib.texcoords[2*idx.texcoord_index+1];
vertices.push_back(glm::vec3(vx,vy,vz));
normals.push_back(glm::vec3(nx,ny,nz));
uvs.push_back(glm::vec2(ux,uy));
}
index_offset += fv;
}
}
Because the original .obj file has multiple faces format, i.e:
f 1/2/3 3/2/1 3/2/3
f 1/2/3 1/3/4 1/4/5 6/7/2
so that I use Blender Triangulate with 'Beauty' option to convert quad to triangle. But the rendered result is weird
I have built two function to initOpenGL (run once) and render().
initOpenGL code:
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec3), &vertices[0], GL_STATIC_DRAW);
glGenBuffers(1, &UVBO);
glBindBuffer(GL_ARRAY_BUFFER, UVBO);
glBufferData(GL_ARRAY_BUFFER, uvs.size() * sizeof(glm::vec2), &uvs[0], GL_STATIC_DRAW);
//Linking Vertex Attribute
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
//bind texture
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, UVBO);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
//load texture
//texture1
glGenTextures(1, &texture1);
glBindTexture(GL_TEXTURE_2D, texture1);
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
if(patternSrc1)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, patWidth1, patHeight1, 0, GL_RGBA, GL_UNSIGNED_BYTE,
patternSrc1);
//glGenerateMipmap(GL_TEXTURE_2D);
}
render() code:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram);
camera.ProcessOneFinger(moveStpX, moveStpY);
camera.ProcessTwoFinger(move2X, move2Y);
projection = glm::perspective(camera.GetZoom(), (GLfloat)600/(GLfloat)1024, nearPlane, farPlane);
view = camera.GetViewMatrix();
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "projection"), 1, GL_FALSE, glm::value_ptr(projection));
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "view"), 1, GL_FALSE, glm::value_ptr(view));
glm::mat4 model;
model = glm::translate(model, glm::vec3(0.0f, 0.0f, 0.0f));
GLfloat angle = 20.0f;
model = glm::rotate(model, angle, glm::vec3( 1.0f, 0.3f, 0.5f));
glUniformMatrix4fv(glGetUniformLocation(shaderProgram, "model"), 1, GL_FALSE, glm::value_ptr( model ) );
glDrawArrays(GL_TRIANGLES, 0, vertices.size());
Model detail
Texture:
Model.obj file after triangulated:
https://pastebin.com/vUjHv8Fr
Thank you!!!
This looks to me like your texture is upside down. There's basically two potential errors that could each lead to this happening. Most likely, the texture image itself is upside down, e.g., because the image data passed to glTexImage2D() is in the wrong order. Unlike most other APIs, OpenGL (by default) expects pixel data in row-wise order starting from the bottom row. Check the way you load the texture data to make sure it's in the right order.
If this is not the problem, then it might be that your texture coordinates are for a left-handed texture coordinate system. OpenGL, however, uses right-handed texture coordinates where the origin is the lower-left corner of the texture image rather than the upper left corner. I'm not a Blender guy, but there's probably an export setting for this…

OpenGl error 1281 after setting gl_fragColor

I just started programming opengl-es 2.0 and I'm currently struggling to find an issue concerned with setting the color of a wavefront object im drawing (https://pastebin.com/cEvpj8rt). The drawing is working just fine until I start to manipulate the color at which point im being confronted with opengl error 1281 and I'm unable to pinpoint the cause in my code. I've broken down the shader code to what I believe is the bare minimum required for the fragment shader to work:
void main() {
gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0);
}
To eliminate any additional source of error I am setting the color with a constant value as can be seen above. I doubt the error lies with the simple code above but is concerned with the code in my adapted renderer implementation. (it is based on the renderer that came with a sample from the ar-core github repo. The full code of the initial renderer can be found here: https://github.com/google-ar/arcore-android-sdk/blob/master/samples/java_arcore_hello_ar/app/src/main/java/com/google/ar/core/examples/java/helloar/rendering/ObjectRenderer.java while the adapted version can be seen here: https://pastebin.com/9cmKVnLV) Below you can find an excerpt of the code responsible for setting up and drawing the object. I reckoned the issue to be connected to the texturing which is why I removed the code.
I know its a bit much to ask for help given my lack of understanding on the matter at hand but I'd be glad for any hint/advice at this point. The error occurs after the first draw in the following method:
public void draw(float[] cameraView, float[] cameraPerspective) {
multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
glUseProgram(mProgram);
glBindBuffer(GL_ARRAY_BUFFER, mVertexBufferId);
glVertexAttribPointer(mPositionAttribute, COORDS_PER_VERTEX,
GL_FLOAT, false, 0, mVerticesBaseAddress);
glVertexAttribPointer(mNormalAttribute, 3,
GL_FLOAT, false, 0, mNormalsBaseAddress);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Set the ModelViewProjection matrix in the shader.
glUniformMatrix4fv(mModelViewUniform, 1,
false, mModelViewMatrix, 0);
glUniformMatrix4fv(mModelViewProjectionUniform, 1,
false, mModelViewProjectionMatrix, 0);
glEnableVertexAttribArray(mPositionAttribute);
glEnableVertexAttribArray(mNormalAttribute);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
glDrawElements(GL_TRIANGLES, mIndexCount, GL_UNSIGNED_SHORT, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisableVertexAttribArray(mPositionAttribute);
glDisableVertexAttribArray(mNormalAttribute);
// this is where the error is detected
OpenGlHelper.checkGLError(TAG, "After draw");
}
Here the method which is used for initialization:
public void createOnGlThread(Context context) throws IOException {
InputStream objInputStream = context.getAssets()
.open(OBJ_ASSET_NAME);
Obj obj = ObjReader.read(objInputStream);
obj = ObjUtils.convertToRenderable(obj);
IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
FloatBuffer vertices = ObjData.getVertices(obj);
FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
FloatBuffer normals = ObjData.getNormals(obj);
ShortBuffer indices = ByteBuffer.allocateDirect(2 * wideIndices.limit())
.order(ByteOrder.nativeOrder()).asShortBuffer();
while (wideIndices.hasRemaining()) {
indices.put((short) wideIndices.get());
}
indices.rewind();
int[] buffers = new int[2];
glGenBuffers(2, buffers, 0);
mVertexBufferId = buffers[0];
mIndexBufferId = buffers[1];
// Load vertex buffer
mVerticesBaseAddress = 0;
mTexCoordsBaseAddress = mVerticesBaseAddress + 4 * vertices.limit();
mNormalsBaseAddress = mTexCoordsBaseAddress + 4 * texCoords.limit();
final int totalBytes = mNormalsBaseAddress + 4 * normals.limit();
glBindBuffer(GL_ARRAY_BUFFER, mVertexBufferId);
glBufferData(GL_ARRAY_BUFFER, totalBytes, null, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, mVerticesBaseAddress,
4 * vertices.limit(), vertices);
glBufferSubData(GL_ARRAY_BUFFER, mTexCoordsBaseAddress,
4 * texCoords.limit(), texCoords);
glBufferSubData(GL_ARRAY_BUFFER, mNormalsBaseAddress,
4 * normals.limit(), normals);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Load index buffer
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mIndexBufferId);
mIndexCount = indices.limit();
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 2 * mIndexCount,
indices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
OpenGlHelper.checkGLError(TAG, "OBJ buffer load");
mProgram = glCreateProgram();
glAttachShader(mProgram, OpenGlHelper.loadGLShader(TAG, context,
GL_VERTEX_SHADER, R.raw.sphere_vertex));
glAttachShader(mProgram, OpenGlHelper.loadGLShader(TAG, context,
GL_FRAGMENT_SHADER, R.raw.sphere_fragment));
glLinkProgram(mProgram);
glUseProgram(mProgram);
OpenGlHelper.checkGLError(TAG, "Program creation");
mModelViewUniform = glGetUniformLocation(mProgram, "u_ModelView");
mModelViewProjectionUniform =
glGetUniformLocation(mProgram, "u_ModelViewProjection");
mPositionAttribute = glGetAttribLocation(mProgram, "a_Position");
mNormalAttribute = glGetAttribLocation(mProgram, "a_Normal");
OpenGlHelper.checkGLError(TAG, "Program parameters");
setIdentityM(mModelMatrix, 0);
}

YUV to RGB conversion and display using opengl es 2.0 from android ndk using shaders

I am currently working on a rtsp player on android using ffmpeg to connect and decode the video stream. I would like to use OpenGL es 2.0 to convert the YUV frame to RGB frame and display it but i am blocked (it's the first time i use opengl).
I will try to explain clearly what is my problem.
From the NDK android i initialize an opengl context (from the thread i want to use to display images) using this method :
//
EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_NONE
};
EGLint contextAttrs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
LOGI("Initializing context");
if((display = eglGetDisplay(EGL_DEFAULT_DISPLAY)) == EGL_NO_DISPLAY)
{
closeContext();
return;
}
if(!eglInitialize(display, 0, 0))
{
closeContext();
return;
}
if(!eglChooseConfig(display, attribs, &config, 1, &numConfigs))
{
closeContext();
return;
}
if(!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format))
{
closeContext();
return;
}
ANativeWindow_setBuffersGeometry(window, 0, 0, format);
if(!(surface = eglCreateWindowSurface(display, config, window, 0)))
{
closeContext();
return;
}
if(!(context = eglCreateContext(display, config, 0, contextAttrs)))
{
closeContext();
return;
}
if(!eglMakeCurrent(display, surface, surface, context))
{
closeContext();
return;
}
if(!eglQuerySurface(display, surface, EGL_WIDTH, &width) || !eglQuerySurface(display, surface, EGL_HEIGHT, &height))
{
closeContext();
return;
}
LOGI("EGLWIDTH : %d EGLHEIGHT : %d ", (int)width, (int)height);
isInitEGLContext = 1;
Then i setup the graphics using this method :
//
//Load Vertex and Fragment Shader, attach shader and link program
programId = createProgram(kVertexShader, kFragmentShader);
LOGI("Program id : %d error : %d",(int) programId, glGetError());
if(!programId)
{
LOGI("Could not create program");
return;
}
// get index of the generic vertex attribute bound to vPosition
positionObject = (int) glGetAttribLocation(programId, "vPosition");
// get index of the generic vertex attribute bound to vTexCoord
texturePosition = (int) glGetAttribLocation(programId, "vTexCoord");
// get the location of yTexture within the program (corresponding to program id)
yuv_texture_object[0] = glGetUniformLocation(programId, "yTexture");
// get the location of uTexture within the program
yuv_texture_object[1] = glGetUniformLocation(programId, "uTexture");
// get the location of vTexture within the program
yuv_texture_object[2] = glGetUniformLocation(programId, "vTexture");
// Setup width of each planes (display size)
stream_yuv_width[0] = 800;
stream_yuv_width[1] = 400;
stream_yuv_width[2] = 400;
// Setup height of each planes (display size)
stream_yuv_height[0] = 600;
stream_yuv_height[1] = 300;
stream_yuv_height[2] = 300;
//set the view port
glViewport(0,0,stream_yuv_width[0],stream_yuv_height[0]);
LOGI("glViewPort() %d ", glGetError());
I have hardcoded the display size (for now) until i get something that work.
The createProgram method, load the shaders, create the program, compile and link the shaders successfully.
Here are my shaders :
const char kVertexShader[] =
"attribute vec4 vPosition;\n"
"attribute vec2 vTexCoord;\n"
"varying vec2 v_vTexCoord;\n"
"void main() {\n"
"gl_Position = vPosition;\n"
"v_vTexCoord = vTexCoord;\n"
"}\n";
const char kFragmentShader[] =
"precision mediump float; \n"
"varying vec2 v_vTexCoord;\n"
"uniform sampler2D yTexture;\n"
"uniform sampler2D uTexture;\n"
"uniform sampler2D vTexture;\n"
"void main() {\n"
"float nx, ny; \n"
"nx = v_vTexCoord.x; \n"
"ny = v_vTexCoord.y; \n"
"float y=texture2D(yTexture, v_vTexCoord).r;\n"
"float u=texture2D(uTexture, vec2(nx / 2.0, ny / 2.0)).r;\n"
"float v=texture2D(vTexture, vec2(nx / 2.0, ny / 2.0)).r;\n"
"y = 1.1643 * (y - 0.0625);\n"
"u = u - 0.5; \n"
"v = v - 0.5; \n"
"float r=y + 1.5958 * v;\n"
"float g=y - 0.39173 * u - 0.81290 * v;\n"
"float b=y + 2.017 * u;\n"
"gl_FragColor = vec4(r, g, b, 1.0);\n"
"}\n";
const GLfloat kVertexInformation[] = {
-1.0f, 1.0f, // TexCoord 0 top left
-1.0f,-1.0f, // TexCoord 1 bottom left
1.0f,-1.0f, // TexCoord 2 bottom right
1.0f, 1.0f // TexCoord 3 top right
};
const GLshort kTextureCoordinateInformation[] = {
0, 0, // TexCoord 0 top left
0, 1, // TexCoord 1 bottom left
1, 1, // TexCoord 2 bottom right
1, 0 // TexCoord 3 top right
};
const GLuint kStride = 0;//COORDS_PER_VERTEX * 4;
const GLshort kIndicesInformation[] = {
0, 1, 2,
0, 2, 3
};
Then i setup the yuv textures and the render to textures, at this moment yuv_width[i] and yuv_height[i] are set to correct value:
void setupYUVTexture()
{
//Setup the pixel alignement
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
LOGI("glPixelStorei() : %d ", glGetError());
int i = 0;
for(i = 0 ; i < 3 ; ++i)
{
//Check if the texture already setup
if(yuv_texture_id[i] != 0)
{
glDeleteTextures(1, &yuv_texture_id[i]);
yuv_texture_id[i] = 0;
}
// Active the i texture
glActiveTexture(GL_TEXTURE0 + i);
//Generate the texture name
glGenTextures(1, &yuv_texture_id[i]);
// Bind the texture
glBindTexture(GL_TEXTURE_2D, yuv_texture_id[i]);
// Setup the texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//Define the texture image
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width[i], yuv_height[i], 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
LOGI("glTexImage2D() %d ", glGetError());
}
}
void renderToTexture()
{
// Generate framebuffer object name
glGenFramebuffers(1, &frameBufferObject);
//Bind the framebuffer
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferObject);
//Generate render buffer object name
glGenRenderbuffers(1, &renderBufferObject);
//Bind render buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBufferObject);
//Create and initialize render buffer for display RGBA with the same size of the viewport
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, 800, 600);
//Attach render buffer to frame buffer object
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferObject);
//Attach y plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[0], 0);
//Attach u plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[1], 0);
//Attach v plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[2], 0);
// Bind the framebuffer
glBindFramebuffer(GL_FRAMEBUFFER, 0);
//Check if the framebuffer is correctly setup
GLint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if(status != GL_FRAMEBUFFER_COMPLETE)
{
LOGI(" FBO setting fault : %d ", status);
return;
}
}
To finish, my draw frame method :
void drawFrame()
{
LOGI("DrawFrame");
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferObject);
printGLError("glBindFramebuffer");
glUseProgram(programId);
printGLError("glUseProgram");
int i = 0;
for(i = 0 ; i < 3 ; ++i)
{
glActiveTexture(GL_TEXTURE0 + i);
printGLError("glActiveTexture");
glBindTexture(GL_TEXTURE_2D, yuv_texture_object[i]);
printGLError("glBindTexture");
glUniform1i(yuv_texture_object[i], i);
printGLError("glUniform1i");
LOGI("Plan : %d Largeur : %d Hauteur : %d ", i, yuv_width[i], yuv_height[i]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,yuv_width[i], yuv_height[i], GL_LUMINANCE, GL_UNSIGNED_BYTE, yuv_planes[i]);
printGLError("glTexSubImage2D");
}
glVertexAttribPointer(positionObject, 2, GL_FLOAT, GL_FALSE, kStride, kVertexInformation);
printGLError("glVertexAttribPointer");
glVertexAttribPointer(texturePosition, 2, GL_SHORT, GL_FALSE, kStride, kTextureCoordinateInformation);
printGLError("glVertexAttribPointer");
glEnableVertexAttribArray(positionObject);
printGLError("glVertexAttribArray");
glEnableVertexAttribArray(texturePosition);
printGLError("glVertexAttribArray");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
printGLError("glBindFramebuffer");
glDrawElements(GL_TRIANGLE_STRIP, 6, GL_UNSIGNED_SHORT, kIndicesInformation);
printGLError("glDrawElements");
eglSwapBuffers(display, surface);
printGLError("eglSwapBuffers");
}
I initialize once the opengl textures and other attributes which are necessary, then when a frame is decode i recopy y buffer into yuv_planes[0], u buffer in yuv_planes[ 1] and v buffer in yuv_planes[2].
Once a frame is correctly decoded using ffmpeg i call in this order :
- initContext()
- setupGraphics()
- setupYUVTexture()
- renderToTexture()
then i call drawFrame. Of course, when everything is initialize i call directly drawFrame after each decoded frame.
There is the output i have now.
The size of the image is correct, but now i am block here. I don't understand why the image display is green !
Any ideas
That's a lot of code to go through and a lot of things that can go wrong ;). To debug these kind of issues, I would go step by step.
just output red (gl_FragColor = vec4(1.0, 0.5, 0.5, 1.0)) to make sure your configuration is working properly.
try to output every texture in grayscale. (gl_FragColor = vec4(y, y, y, 1.0))
If all that is working then it most likely means your yuv => rgb conversion is wrong somewhere.
If that's not working, then I would suspect something in the texture mapping. Double check your glTexSubImage2D call. You might need to pass a different stride or use a different coordinate system.

OpenGL ES 2.0 in Android NDK: Nothing being drawn

I have a 2D game project that I'm porting to Android that utilizes OpenGL ES 2.0. I am having trouble getting anything drawn on the screen (except for a solid color from clearing the screen). Everything renders just fine when running in my Windows environment, but of course the environment is set up differently for the different version of OpenGL.
I followed the native-activity sample and took advice from several other OpenGL ES 2.0 resources to compose what I currently have.
I have checked everything I know how to with no anomalous results. As mentioned, glClear works, and displays the color set by glClearColor. I also know that every frame is being rendered, as changing glClearColor frame-by-frame displays the different colors. Of course, the application properly compiles. My textures are loaded from the proper location in the app's cache. glGetError is returning GL_NO_ERROR at every step in the process, so what I am doing appears to be accepted by OpenGL. My shaders are loaded without error. I have also tested this on both a few emulators and my physical android device, so it isn't localized to a specific device configuration.
I speculate that it must be some mistake in how I initialize and set up OpenGL. I am hoping someone more versed in OpenGL ES than I am will be able to help root out my problem. I am pasting the different relevant sections of my code below. engine is a global struct I am presently using out of laziness.
Initializing the display
static int AND_InitDisplay() {
// Desired display attributes
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 16,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
EGLint const attrib_list[3] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, NULL, attrib_list);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
// Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
return 0;
}
Drawing a frame
static void AND_drawFrame() {
if (engine->display == NULL) {
LOGW("DB E: DISPLAY IS NULL");
// No display.
return;
}
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
}
Example of preparing VBO data
I understand many wouldn't like the idea of using multiple VBOs for the same geometry. I would love to hear if this code isn't orthodox or is incorrect, but I am not focused on this unless it the root of my problem.
GLfloat charPosVerts[] = {
p0.x, p0.y, 0.f,
p1.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p0.y, 0.f,
p1.x, p1.y, 0.f,
p0.x, p1.y, 0.f
};
GLfloat charTexVerts[] = {
0.0, 0.0,
textures[texid].w, 0.0,
textures[texid].w, textures[texid].h,
0.0, 0.0,
textures[texid].w, textures[texid].h,
0.0, textures[texid].h
};
GLfloat charColorVerts[] = {
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a,
e->color.r, e->color.g, e->color.b, e->color.a
};
glGenBuffers(1, &(e->vboPos));
glGenBuffers(1, &(e->vboTex));
glGenBuffers(1, &(e->vboColor));
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glBufferData(GL_ARRAY_BUFFER, sizeof(charPosVerts), charPosVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glBufferData(GL_ARRAY_BUFFER, sizeof(charTexVerts), charTexVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glBufferData(GL_ARRAY_BUFFER, sizeof(charColorVerts), charColorVerts, GL_DYNAMIC_DRAW);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
Example of drawing VBO
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, CORE_GetBmpOpenGLTex(texix));
glUniform1i(shaderIDs.uniTexture, 0);
// Draw the sprite
glBindBuffer(GL_ARRAY_BUFFER, e->vboPos);
glVertexAttribPointer(shaderIDs.attribPosition, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribPosition);
glBindBuffer(GL_ARRAY_BUFFER, e->vboTex);
glVertexAttribPointer(shaderIDs.attribTexCoord, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribTexCoord);
glBindBuffer(GL_ARRAY_BUFFER, e->vboColor);
glVertexAttribPointer(shaderIDs.attribColors, 4, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(shaderIDs.attribColors);
glDrawArrays(GL_TRIANGLES, 0, 18);
Vertex Shader
The shaders are very simple.
attribute vec3 position;
attribute vec2 texCoord;
attribute vec4 colors;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main() {
gl_Position = vec4(position, 1.0);
texCoordVar = texCoord;
colorsVar = colors;
}
Fragment Shader
uniform sampler2D texture;
varying vec2 texCoordVar;
varying vec4 colorsVar;
void main()
{
gl_FragColor = texture2D(texture, texCoordVar) * colorsVar;
}
Thanks for looking at this long post. Help is very much appreciated.
The posted code is not drawing anything. From the AND_drawFrame() function:
// Clearing with red color. This displays properly.
glClearColor(1.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// eglSwapBuffers results in no visible change
eglSwapBuffers(engine->display, engine->surface);
Based on this, the draw code is either never invoked, or the window is cleared after drawing, which would wipe out everything that was drawn before.

Categories

Resources