OpenGL inconsistent texture creation - android

I'm currently having some issues with creating a texture in OpenGL ES 2.0. For some reason, the texture on iOS is offset and the texture on Android is out of alignment. I'm using a .bmp image that I got the pixel data from to create the texture.
Errors::iOS:
Errors::Android:
This is how I render out my texture:
void PolygonRenderer::renderTextures() {
// Data read from the header of the BMP file
unsigned char header[54]; // Each BMP file begins by a 54-bytes header
unsigned int dataPos; // Position in the file where the actual data begins
unsigned int width, height;
unsigned int imageSize; // = width*height*3
// Actual RGB data
unsigned char * data;
// Open the file
FILE * file = fopen(iconPath,"rb");
if (!file) {
log("Image could not be opened\n");
return;
}
if (fread(header, 1, 54, file) != 54) {
log("Not a correct BMP file");
return;
}
if (header[0] != 'B' || header[1] != 'M') {
log("Not a correct BMP file");
return;
}
// Read ints from the byte array
dataPos = *(int*)&(header[0x0A]);
imageSize = *(int*)&(header[0x22]);
width = *(int*)&(header[0x12]);
height = *(int*)&(header[0x16]);
if (imageSize == 0) {
imageSize = width * height;
}
log("Image size: %d, %d, total: %d", width, height, imageSize);
if (dataPos == 0) {
dataPos = 54;
}
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
data = new unsigned char[imageSize];
log("Elements read: %d", fread(data, 1, imageSize, file));
fclose(file);
glActiveTexture(GL_TEXTURE0);
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB,
GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
checkForGLError("Add texture:");
log("Texture created: %d", textureId);
loadingTexture = textureId;
}
The reason I am using RGB, even though .bmp saves as BGR is because I use the R channel as opacity and set the color externally. (Also this is OpenGL ES 2.0, which only has GL_RGB and GL_RGBA.)
Rendering polygon excerpt:
glUseProgram(shader.get(1));
GLuint projectionLocation =
glGetUniformLocation(shader.get(1), "projection");
glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, projectionMatrix);
GLuint viewLocation = glGetUniformLocation(shader.get(1), "view");
glBindTexture(GL_TEXTURE_2D, loadingTexture);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glUniformMatrix4fv(viewLocation, 1, GL_FALSE,
&frame.combinedMatrix[16 * loadingPolygons[1].ViewGroup]);
glBindBuffer(GL_ARRAY_BUFFER, loadingPolygons[1].Buffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat),
0);
glBindBuffer(GL_ARRAY_BUFFER, loadingPolygons[1].Buffer);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat),
(GLvoid*)(3 * sizeof(GLfloat)));
glBindBuffer(GL_ARRAY_BUFFER, loadingPolygons[1].Buffer);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat),
(GLvoid*)(6 * sizeof(GLfloat)));
checkForGLError("In Renderer");
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindTexture(GL_TEXTURE_2D, 0);
Vertex Shader Source:
std::string textureVertex =
"attribute vec3 vertexloc; \n"
"attribute vec3 vertexcol; \n"
"attribute vec2 vertexuv; \n"
"varying vec2 TexCoords; \n"
"varying vec3 textColor; \n"
"uniform mat4 projection; \n"
"uniform mat4 view; \n"
"void main() \n"
"{ \n"
" gl_Position = projection * view * vec4(vertexloc, 1.0); \n"
" TexCoords = vertexuv; \n"
" textColor = vertexcol; \n"
"}";
Fragment Shader Source:
std::string textureFragment =
"precision mediump float; \n"
"varying vec2 TexCoords; \n"
"varying vec3 textColor; \n"
"uniform sampler2D text; \n"
"void main() \n"
"{ \n"
" vec4 sampled = vec4(1.0, 1.0, 1.0, texture2D(text, TexCoords).r); \n"
" gl_FragColor = vec4(textColor, 1.0) * sampled; \n"
"}";
Edit: Added the second loadingPolygon value:
w = 250;
h = 43;
x = sWindowWidth / 2 - w / 2;
y = - sWindowHeight / 4 - h / 2;
temp = {
x, y + h, 0.3,
textureColor.x, textureColor.y, textureColor.z,
0, 1,
x, y, 0.3,
textureColor.x, textureColor.y, textureColor.z,
0, 0,
x + w, y, 0.3,
textureColor.x, textureColor.y, textureColor.z,
1, 0,
x, y + h, 0.3,
textureColor.x, textureColor.y, textureColor.z,
0, 1,
x + w, y, 0.3,
textureColor.x, textureColor.y, textureColor.z,
1, 0,
x + w, y + h, 0.3,
textureColor.x, textureColor.y, textureColor.z,
1, 1
};
polygonRenderer.addLoadingPolygon(temp);
Edit 2: Android eglconfig:
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
Edit 3: Creating a shader:
GLuint Shader::LoadShader(std::string vertexSource,
std::string fragmentSource, std::map<int, std::string> attribs) {
GLuint vertexShader = LoadSubShader(vertexSource, GL_VERTEX_SHADER);
GLuint fragmentShader = LoadSubShader(fragmentSource, GL_FRAGMENT_SHADER);
if (vertexShader == 0 || fragmentShader == 0)
return 0;
GLuint program = glCreateProgram();
if (program == 0) {
log("Error compiling shader");
return 0;
}
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
std::map<int, string>::iterator it;
for(it = attribs.begin(); it != attribs.end(); it++) {
glBindAttribLocation(program, it->first, it->second.c_str());
}
glLinkProgram(program);
GLint linked;
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (!linked) {
log("ERROR");
GLint infoLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen > 1) {
char infoLog[512];
glGetProgramInfoLog(program, infoLen, NULL, infoLog);
log("Error linking program: %s", infoLog);
}
glDeleteProgram(program);
return GL_FALSE;
}
// glDetachShader(program, vertexShader);
// glDetachShader(program, fragmentShader);
return program;
}

Related

Black rectangle on top of android camera preview

I am working on android app that uses NDK camera2API with opengl.
When I launch the application on the device, a black rectangle appears on top, although the application should run in full screen.
The architecture of the application from the java side uses the navigation graph.
To fullscreen mode, I use this:
class MainActivity : AppCompatActivity() {
...
...
companion object {
/** Combination of all flags required to put activity into immersive mode */
const val FLAGS_FULLSCREEN=
View.SYSTEM_UI_FLAG_LOW_PROFILE or
View.SYSTEM_UI_FLAG_FULLSCREEN or
View.SYSTEM_UI_FLAG_LAYOUT_STABLE or
View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
/** Milliseconds used for UI animations */
const val ANIMATION_FAST_MILLIS = 50L
const val ANIMATION_SLOW_MILLIS = 100L
private const val IMMERSIVE_FLAG_TIMEOUT = 100L
}
On android side I create texture for usage in cpp:
GLES30.glGenTextures(1, textures, 0)
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0])
surfaceTexture = SurfaceTexture(textures[0])
Shaders:
static const char* vertex_shader_src = R"(
attribute vec3 vertexPosition;
attribute vec2 uvs;
uniform mat4 texMatrix; // this from surfaceTexture getTransformMatrix
varying vec2 varUvs;
void main()
{
varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;
gl_Position = vec4(vertexPosition, 1.0);
}
)";
static const char* fragment_shader_src = R"(
#extension GL_OES_EGL_image_external : require
precision mediump float;
uniform samplerExternalOES texSampler;
varying vec2 varUvs;
void main()
{
gl_FragColor = texture2D(texSampler, varUvs);
}
)";
Vertex and index
static float vertices[] {
// x, y, z, u, v
-1, -1, 0, 0, 0,
-1, 1, 0, 0, 1,
1, 1, 0, 1, 1,
1, -1, 0, 1, 0
};
static GLuint indices[] { 2, 1, 0, 0, 3, 2 };
This is render code
void ogl::draw_frame(const float texMat[]) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glClearColor(0,0,0,1);
glUseProgram(program);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(tex_sampler, 0);
glUniformMatrix4fv(tex_matrix, 1, false, texMat);
glBindBuffer(GL_ARRAY_BUFFER, buffers[0]);
glEnableVertexAttribArray(vertex_position);
glVertexAttribPointer(vertex_position, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, 0);
glEnableVertexAttribArray(uvs);
glVertexAttribPointer(uvs, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void *)(3 * sizeof(float)));
glViewport(0, 0, width, height);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffers[1]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
}
Closed.
the problem was that I used the dimensions that the camera used (for example, the camera resolution is 480x640, and the actual window size is 480 x 752, the difference is 112 pixels)

YUV to RGB conversion and display using opengl es 2.0 from android ndk using shaders

I am currently working on a rtsp player on android using ffmpeg to connect and decode the video stream. I would like to use OpenGL es 2.0 to convert the YUV frame to RGB frame and display it but i am blocked (it's the first time i use opengl).
I will try to explain clearly what is my problem.
From the NDK android i initialize an opengl context (from the thread i want to use to display images) using this method :
//
EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_NONE
};
EGLint contextAttrs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
LOGI("Initializing context");
if((display = eglGetDisplay(EGL_DEFAULT_DISPLAY)) == EGL_NO_DISPLAY)
{
closeContext();
return;
}
if(!eglInitialize(display, 0, 0))
{
closeContext();
return;
}
if(!eglChooseConfig(display, attribs, &config, 1, &numConfigs))
{
closeContext();
return;
}
if(!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format))
{
closeContext();
return;
}
ANativeWindow_setBuffersGeometry(window, 0, 0, format);
if(!(surface = eglCreateWindowSurface(display, config, window, 0)))
{
closeContext();
return;
}
if(!(context = eglCreateContext(display, config, 0, contextAttrs)))
{
closeContext();
return;
}
if(!eglMakeCurrent(display, surface, surface, context))
{
closeContext();
return;
}
if(!eglQuerySurface(display, surface, EGL_WIDTH, &width) || !eglQuerySurface(display, surface, EGL_HEIGHT, &height))
{
closeContext();
return;
}
LOGI("EGLWIDTH : %d EGLHEIGHT : %d ", (int)width, (int)height);
isInitEGLContext = 1;
Then i setup the graphics using this method :
//
//Load Vertex and Fragment Shader, attach shader and link program
programId = createProgram(kVertexShader, kFragmentShader);
LOGI("Program id : %d error : %d",(int) programId, glGetError());
if(!programId)
{
LOGI("Could not create program");
return;
}
// get index of the generic vertex attribute bound to vPosition
positionObject = (int) glGetAttribLocation(programId, "vPosition");
// get index of the generic vertex attribute bound to vTexCoord
texturePosition = (int) glGetAttribLocation(programId, "vTexCoord");
// get the location of yTexture within the program (corresponding to program id)
yuv_texture_object[0] = glGetUniformLocation(programId, "yTexture");
// get the location of uTexture within the program
yuv_texture_object[1] = glGetUniformLocation(programId, "uTexture");
// get the location of vTexture within the program
yuv_texture_object[2] = glGetUniformLocation(programId, "vTexture");
// Setup width of each planes (display size)
stream_yuv_width[0] = 800;
stream_yuv_width[1] = 400;
stream_yuv_width[2] = 400;
// Setup height of each planes (display size)
stream_yuv_height[0] = 600;
stream_yuv_height[1] = 300;
stream_yuv_height[2] = 300;
//set the view port
glViewport(0,0,stream_yuv_width[0],stream_yuv_height[0]);
LOGI("glViewPort() %d ", glGetError());
I have hardcoded the display size (for now) until i get something that work.
The createProgram method, load the shaders, create the program, compile and link the shaders successfully.
Here are my shaders :
const char kVertexShader[] =
"attribute vec4 vPosition;\n"
"attribute vec2 vTexCoord;\n"
"varying vec2 v_vTexCoord;\n"
"void main() {\n"
"gl_Position = vPosition;\n"
"v_vTexCoord = vTexCoord;\n"
"}\n";
const char kFragmentShader[] =
"precision mediump float; \n"
"varying vec2 v_vTexCoord;\n"
"uniform sampler2D yTexture;\n"
"uniform sampler2D uTexture;\n"
"uniform sampler2D vTexture;\n"
"void main() {\n"
"float nx, ny; \n"
"nx = v_vTexCoord.x; \n"
"ny = v_vTexCoord.y; \n"
"float y=texture2D(yTexture, v_vTexCoord).r;\n"
"float u=texture2D(uTexture, vec2(nx / 2.0, ny / 2.0)).r;\n"
"float v=texture2D(vTexture, vec2(nx / 2.0, ny / 2.0)).r;\n"
"y = 1.1643 * (y - 0.0625);\n"
"u = u - 0.5; \n"
"v = v - 0.5; \n"
"float r=y + 1.5958 * v;\n"
"float g=y - 0.39173 * u - 0.81290 * v;\n"
"float b=y + 2.017 * u;\n"
"gl_FragColor = vec4(r, g, b, 1.0);\n"
"}\n";
const GLfloat kVertexInformation[] = {
-1.0f, 1.0f, // TexCoord 0 top left
-1.0f,-1.0f, // TexCoord 1 bottom left
1.0f,-1.0f, // TexCoord 2 bottom right
1.0f, 1.0f // TexCoord 3 top right
};
const GLshort kTextureCoordinateInformation[] = {
0, 0, // TexCoord 0 top left
0, 1, // TexCoord 1 bottom left
1, 1, // TexCoord 2 bottom right
1, 0 // TexCoord 3 top right
};
const GLuint kStride = 0;//COORDS_PER_VERTEX * 4;
const GLshort kIndicesInformation[] = {
0, 1, 2,
0, 2, 3
};
Then i setup the yuv textures and the render to textures, at this moment yuv_width[i] and yuv_height[i] are set to correct value:
void setupYUVTexture()
{
//Setup the pixel alignement
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
LOGI("glPixelStorei() : %d ", glGetError());
int i = 0;
for(i = 0 ; i < 3 ; ++i)
{
//Check if the texture already setup
if(yuv_texture_id[i] != 0)
{
glDeleteTextures(1, &yuv_texture_id[i]);
yuv_texture_id[i] = 0;
}
// Active the i texture
glActiveTexture(GL_TEXTURE0 + i);
//Generate the texture name
glGenTextures(1, &yuv_texture_id[i]);
// Bind the texture
glBindTexture(GL_TEXTURE_2D, yuv_texture_id[i]);
// Setup the texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//Define the texture image
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yuv_width[i], yuv_height[i], 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
LOGI("glTexImage2D() %d ", glGetError());
}
}
void renderToTexture()
{
// Generate framebuffer object name
glGenFramebuffers(1, &frameBufferObject);
//Bind the framebuffer
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferObject);
//Generate render buffer object name
glGenRenderbuffers(1, &renderBufferObject);
//Bind render buffer
glBindRenderbuffer(GL_RENDERBUFFER, renderBufferObject);
//Create and initialize render buffer for display RGBA with the same size of the viewport
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, 800, 600);
//Attach render buffer to frame buffer object
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferObject);
//Attach y plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[0], 0);
//Attach u plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[1], 0);
//Attach v plane to frame buffer object
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id[2], 0);
// Bind the framebuffer
glBindFramebuffer(GL_FRAMEBUFFER, 0);
//Check if the framebuffer is correctly setup
GLint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if(status != GL_FRAMEBUFFER_COMPLETE)
{
LOGI(" FBO setting fault : %d ", status);
return;
}
}
To finish, my draw frame method :
void drawFrame()
{
LOGI("DrawFrame");
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferObject);
printGLError("glBindFramebuffer");
glUseProgram(programId);
printGLError("glUseProgram");
int i = 0;
for(i = 0 ; i < 3 ; ++i)
{
glActiveTexture(GL_TEXTURE0 + i);
printGLError("glActiveTexture");
glBindTexture(GL_TEXTURE_2D, yuv_texture_object[i]);
printGLError("glBindTexture");
glUniform1i(yuv_texture_object[i], i);
printGLError("glUniform1i");
LOGI("Plan : %d Largeur : %d Hauteur : %d ", i, yuv_width[i], yuv_height[i]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0,yuv_width[i], yuv_height[i], GL_LUMINANCE, GL_UNSIGNED_BYTE, yuv_planes[i]);
printGLError("glTexSubImage2D");
}
glVertexAttribPointer(positionObject, 2, GL_FLOAT, GL_FALSE, kStride, kVertexInformation);
printGLError("glVertexAttribPointer");
glVertexAttribPointer(texturePosition, 2, GL_SHORT, GL_FALSE, kStride, kTextureCoordinateInformation);
printGLError("glVertexAttribPointer");
glEnableVertexAttribArray(positionObject);
printGLError("glVertexAttribArray");
glEnableVertexAttribArray(texturePosition);
printGLError("glVertexAttribArray");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
printGLError("glBindFramebuffer");
glDrawElements(GL_TRIANGLE_STRIP, 6, GL_UNSIGNED_SHORT, kIndicesInformation);
printGLError("glDrawElements");
eglSwapBuffers(display, surface);
printGLError("eglSwapBuffers");
}
I initialize once the opengl textures and other attributes which are necessary, then when a frame is decode i recopy y buffer into yuv_planes[0], u buffer in yuv_planes[ 1] and v buffer in yuv_planes[2].
Once a frame is correctly decoded using ffmpeg i call in this order :
- initContext()
- setupGraphics()
- setupYUVTexture()
- renderToTexture()
then i call drawFrame. Of course, when everything is initialize i call directly drawFrame after each decoded frame.
There is the output i have now.
The size of the image is correct, but now i am block here. I don't understand why the image display is green !
Any ideas
That's a lot of code to go through and a lot of things that can go wrong ;). To debug these kind of issues, I would go step by step.
just output red (gl_FragColor = vec4(1.0, 0.5, 0.5, 1.0)) to make sure your configuration is working properly.
try to output every texture in grayscale. (gl_FragColor = vec4(y, y, y, 1.0))
If all that is working then it most likely means your yuv => rgb conversion is wrong somewhere.
If that's not working, then I would suspect something in the texture mapping. Double check your glTexSubImage2D call. You might need to pass a different stride or use a different coordinate system.

Slow OpenGL ES Render-to-texture ping pong with FBO on Android

I made a render-to-texture test using Qt, running it both on macOS and Android.
The test creates 2 textures and an FBO, and then in a loop attaches alternating textures as rendering target.
On Android the code is surprisingly slow though. On a Samsung Galaxy Tab S, it about 30-40 times slower than on my mac, so I suspect something is wrong.
The actual code inside the fragment shader does not seem to matter.
Adding a glClear after the glBindFramebuffer makes it a bit faster but still very slow. Any clues on where to look for the cause?
// create textures
glEnable(GL_TEXTURE_2D);
GLuint tex1, tex2;
// define texture properties
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER_EXT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER_EXT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1280, 800, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
// define texture properties
glGenTextures(1, &tex2);
glBindTexture(GL_TEXTURE_2D, tex2);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER_EXT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER_EXT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1280, 800, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
// create framebuffer
GLuint fbo;
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT);
// create program
QGLShaderProgram program;
program.addShaderFromSourceFile(QGLShader::Fragment, ":/shaders/fshader.glsl");
program.addShaderFromSourceFile(QGLShader::Vertex, ":/shaders/vshader.glsl");
program.link();
program.bind();
//
float vertices[16];
int i = 0;
vertices[i++] = 0.0f; vertices[i++] = 0.0f; vertices[i++] = 0.0; vertices[i++] = 1.0;
vertices[i++] = 0.0f; vertices[i++] = 1280.0f; vertices[i++] = 0.0; vertices[i++] = 0.0;
vertices[i++] = 1280.0f; vertices[i++] = 0.0f; vertices[i++] = 1.0; vertices[i++] = 1.0;
vertices[i++] = 1280.0f; vertices[i++] = 1280.0f; vertices[i++] = 1.0; vertices[i++] = 0.0;
int vertexLocation = program.attributeLocation("a_position");
program.enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 2, GL_FLOAT, GL_FALSE, 4*sizeof(float), (const void *)vertices);
int texcoordLocation = program.attributeLocation("a_texcoord");
program.enableAttributeArray(texcoordLocation);
glVertexAttribPointer(texcoordLocation, 2, GL_FLOAT, GL_FALSE, 4*sizeof(float), (const void *)(vertices + 2));
QMatrix4x4 textureMatrix, modelViewMatrix;
// do loop test
int count = 10000;
bool swapped = false;
cout << "Start fbo test" << endl;
QTime myTimer;
myTimer.start();
textureMatrix.setToIdentity();
program.setUniformValue("textureMatrix", textureMatrix);
modelViewMatrix.setToIdentity();
program.setUniformValue("modelViewProjectionMatrix", modelViewMatrix);
program.setUniformValue("srcTex", 0);
for(int i = 0; i < count; i++)
{
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, swapped? tex2 : tex1, 0);
// check completeness
GLenum status;
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
switch(status)
{
case GL_FRAMEBUFFER_COMPLETE:
break;
default:
log("Framebuffer error");
}
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, swapped? tex1 : tex2);
// draw slab
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
swapped = !swapped;
}
This is the vertex shader:
#ifdef GL_ES
// Set default precision to medium
precision mediump int;
precision mediump float;
#endif
uniform mat4 modelViewProjectionMatrix;
uniform mat4 textureMatrix;
attribute vec4 a_position;
attribute vec2 a_texcoord;
varying vec2 v_texcoord;
void main()
{
// Calculate vertex position in screen space
gl_Position = modelViewProjectionMatrix* a_position;
v_texcoord = vec4(textureMatrix * vec4(a_texcoord, 0.0, 1.0)).xy;
}
And the fragment shader:
#ifdef GL_ES
// Set default precision to medium
precision mediump int;
precision mediump float;
#endif
uniform sampler2D srcTex;
varying vec2 v_texcoord;
void main(void)
{
vec4 f = texture2D(srcTex, v_texcoord);
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
Use two FBOs, each binds to a separate texture instead of swapping within the loop. Call glClear() after glBindFramebuffer() within the loop if you don't want to preserve the content in texture.

Unable to get texture to render to quad in OpenGL ES 2.0 running on Android

When I launch the project I get an ominously black quad rendered inside an expectedly cyan background. I have determined that the texture coordinates are being interpolated properly with the commented out line of code in the fragment shader. I have tried a number of different ways of loading a texture and have always got the same result. The temporary bit of code that loads the 4 pixel texture is copied verbatim out of an example from a book. However I have included this as well just in case I have made an oversight.
I have attempted to remove impertinent code. But, I'm still quite new to this and continually learning the full meaning of much of the code. Additionally, much of the code has been adapted from different sources. So, I apologize for the messiness, inconsistent variable naming, and verbosity. I do feel like the issue is in the first several lines. Thanks in advance for all insight. Even any information on how I could go about debugging this would be appreciated; I feel quite in the dark when issues come up when working on this project.
Draw Frame:
public void onDrawFrame(GL10 gl)
{
GLES20.glClearColor(0.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
update();
GLES20.glUseProgram(mProgramHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureHandle);
GLES20.glUniform1i(mTextureUniformHandle, 0);
//For some reason I think the problem is in this area
Matrix.setIdentityM(mModelMatrix, 0);
quadVerts.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false, mStrideBytes, quadVerts);
GLES20.glEnableVertexAttribArray(mPositionHandle);
quadVerts.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false, mStrideBytes, quadVerts);
GLES20.glEnableVertexAttribArray(mColorHandle);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2, GLES20.GL_FLOAT, false, 0, quadTex);
GLES20.glEnableVertexAttribArray(2);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mOrthographicMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 4);
//...
checkGlError("on draw frame: ");
}
Surface Changed:
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
w = width;
h = height;
final float near = 1.0f;
final float far = 10.0f;
Matrix.orthoM(mOrthographicMatrix, 0, 0, width, 0, height, near, far);
float[] pVertsData =
{
20.0f, 20.0f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f,
(float) width- 20.0f, 20.0f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f,
(float) width - 20.0f, (float) height - 20.0f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f,
20.0f, (float) height - 20.0f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f
};
quadVerts = ByteBuffer.allocateDirect(pVertsData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
quadVerts.put(pVertsData).position(0);
float texture[] =
{
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
quadTex = ByteBuffer.allocateDirect(texture.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
quadTex.put(texture).position(0);
checkGlError("surface changed: ");
}
Surface Created:
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
mParticleSystem = new ParticleSystem();
//GLES20.glEnable(GLES20.GL_TEXTURE_2D);
if (mTextureHandle != 1)
mTextureHandle = loadGLTexture(activeContext, resourceID);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 1.5f;
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "attribute vec4 a_Color; \n"
+ "attribute vec2 a_TexCoordinate; \n"
+ "//varying vec3 v_Position; \n"
+ "varying vec4 v_Color; \n"
+ "varying vec2 v_TexCoordinate; \n"
+ "void main() \n"
+ "{ \n"
+ " v_TexCoordinate = a_TexCoordinate; \n"
+ " v_Color = a_Color; \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ "} \n";
final String fragmentShader =
"precision mediump float; \n"
+ "uniform sampler2D u_Texture; \n"
+ "varying vec4 v_Color; \n"
+ "varying vec2 v_TexCoordinate; \n"
+ "void main() \n"
+ "{ \n"
+ " vec4 baseColor;"
+ " baseColor = texture2D(u_Texture, v_TexCoordinate); \n"
+ " "
+ " gl_FragColor = baseColor; \n"
+ " \n"
+ " //gl_FragColor = vec4(v_TexCoordinate.x, v_TexCoordinate.y, 0.0, 1.0); \n"
+ " //gl_FragColor = v_Color; \n"
+ "} \n";
//... Compile Shaders
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
GLES20.glAttachShader(programHandle, vertexShaderHandle);
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
GLES20.glBindAttribLocation(programHandle, 2, "a_TexCoordinate");
GLES20.glLinkProgram(programHandle);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
mTextureUniformHandle = GLES20.glGetUniformLocation(programHandle, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(programHandle, "a_TexCoordinate");
//GLES20.glUseProgram(programHandle);
mProgramHandle = programHandle;
checkGlError("surface created: ");
}
Load Texture:
private int loadGLTexture(Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glGenTextures(1, textureHandle, 0);
byte[] pixels =
{
(byte) 0xff, 0, 0,
0, (byte) 0xff, 0,
0, 0, (byte) 0xff,
(byte) 0xff, (byte) 0xff, 0
};
ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(4*3);
pixelBuffer.put(pixels).position(0);
GLES20.glBindTexture ( GLES20.GL_TEXTURE_2D, textureHandle[0] );
GLES20.glTexImage2D ( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 2, 2, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, pixelBuffer );
GLES20.glTexParameteri ( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST );
GLES20.glTexParameteri ( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST );
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
checkGlError("create texture: ");
return textureHandle[0];
}
Code updated as suggested below.
Finally, located the problem. I removed a lot of commented out lines in the posted code for simplicity's sake. One of which was the line in the fragment shader directly before "gl_FragColor = baseColor;". However this line did not have '\n'... So in effect I had commented out the line that was supposed to actually put the texture on the quad. So the code above will actually run properly while the code that was in my project would not.
Couple thoughts. I don't know if your problem is in here, but here goes:
You're not doing any error checking with glGetError (you should do this). It will help you find so many problems.
GLES20.glEnable(GLES20.GL_TEXTURE_2D); is not a legal call in GLES2.0. Enabling GL_TEXTURE_2D only effects the deprecated fixed function pipeline. This is likely generating an error, but shouldn't cause your problem.
Can you try adding error checking, and report back if there are any problems? I scanned your code a bit but it looks pretty correct so far.

Not working OpenGL ES shader, call glLinkProgram every frame?

I'm trying to make transparent object in OpenGL ES 2.0. It's a live wallpaper, I'm using GLWallpaperService as a base class for this. I'm setting up OpenGL in this way:
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthFunc(GLES20.GL_GEQUAL);
GLES20.glClearDepthf(0.0f);
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
Shaders I use are taken from PowerVR sample OGLES2AlphaTest for alpha testing, which works fine on my HTC Desire device.
Here is the code for shaders:
private final String mVertexShader = "uniform highp mat4 uMVPMatrix;\n" +
"attribute highp vec4 aPosition;\n" +
"attribute highp vec2 aTextureCoord;\n" +
"varying mediump vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}\n";
private final String mFragmentShader = "precision mediump float;\n" +
"varying mediump vec2 vTextureCoord;\n" +
"uniform sampler2D sTexture;\n" +
"void main() {\n" +
" vec4 base = texture2D(sTexture, vTextureCoord);\n" +
" if(base.a < 0.5){ discard; }\n" +
" gl_FragColor = base;\n" +
"}\n";
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
Code for rendering frame:
public void onDrawFrame(GL10 glUnused) {
//GLES20.glLinkProgram(mProgram);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
long time = SystemClock.uptimeMillis() % 4000L;
float angle = 0.090f * ((int) time);
time = SystemClock.uptimeMillis();// % 4000L;
angle = 0.030f * ((int) time);
// Matrix.setRotateM(mMMatrix, 0, angle, 0, 0, 1.0f);
Matrix.setRotateM(mMMatrix, 0, angle, 0, 1.0f, 0);
Matrix.scaleM(mMMatrix, 0, 0.075f, 0.075f, 0.075f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, numPolys);
checkGlError("glDrawArrays");
}
Without GLES20.glLinkProgram(mProgram); in the start of onDrawFrame() I get incorrect result (with incorrect transparency): http://imgur.com/kESeO
When I add GLES20.glLinkProgram(mProgram); in the start of onDrawFrame() it renders correctly, but the performance is bad, glLinkProgram() is time-consuming function. Here is screenshot of correct rendering: http://imgur.com/sw83z
Please explain what am I doing wrong, apparently I don't have to call glLinkProgram() on every frame redraw.
You don't need to link your program each frame, link it once and store its id somewhere. Then call GLES20.glUseProgram(_programId); before you issue drawing calls that correspond to that program.
Check this for an already implemented approach: https://github.com/TraxNet/ShadingZen/blob/master/library/src/main/java/org/traxnet/shadingzen/core/ShadersProgram.java (The bindProgram method is what you are after).
The cause of this misbehavior was not related to glLinkProgram(). I've modified some other GL init parameters (can't recall which at the moment) and now it works just fine.

Categories

Resources