Texture not rendering properly in openGL ES 2 on Android - android

I'm having an issue rendering textures in openGL ES 2 on Android. The image is being drawn but the texture ism't wrapping correctly by the look of it.
I have tried all the usual things to fix the issue but nothing has worked.
Here's how one of the images should look:
But here's how they look on the screen:
Ignore the black border that's part of the texture.
Here is my Texture class:
public class HFTexture {
private int width;
private int height;
private int textureId;
private HFGame game;
private String textureFile;
public HFTexture(HFGame game, String textureFile) {
this.game = game;
this.textureFile = textureFile;
//load();
}
public void load() {
int[] texIds = new int[1];
GLES20.glGenTextures(1, texIds, 0);
textureId = texIds[0];
InputStream in;
try {
in = game.getFileManager().getAsset(textureFile);
Bitmap bitmap = BitmapFactory.decodeStream(in);
width = bitmap.getWidth();
height = bitmap.getHeight();
bind();
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
bitmap.recycle();
} catch(IOException ex) {
}
}
public void bind() {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
}
public void activate(HFShader shader, int texture) {
GLES20.glActiveTexture(texture);
bind();
GLES20.glUniform1i(shader.getHandle("sampler0"), 0);
}
public void delete() {
bind();
int[] textureIds = {textureId};
GLES20.glDeleteTextures(1, textureIds, 0);
}
}
Here is my Vertices class:
public class Vertices {
private FloatBuffer vertexBuffer;
private FloatBuffer normalBuffer;
private FloatBuffer texCoordBuffer;
private ShortBuffer indexBuffer;
private final int VERTEX_COUNT;
private final int VERTEX_STRIDE;
private final int VERTEX_SIZE = 3;
private final int NORMAL_STRIDE;
private final int NORMAL_SIZE = 3;
private final int TEXTURE_COORD_STRIDE;
private final int TEXTURE_COORD_SIZE = 2;
private final int INDEX_COUNT;
public Vertices(float[] vertices, float[] normals, float[] texCoords, short[] indices) {
VERTEX_STRIDE = VERTEX_SIZE * 4;
NORMAL_STRIDE = NORMAL_SIZE * 4;
TEXTURE_COORD_STRIDE = TEXTURE_COORD_SIZE * 4;
VERTEX_COUNT = vertices.length;
INDEX_COUNT = indices.length;
ByteBuffer bb = ByteBuffer.allocateDirect(VERTEX_COUNT * VERTEX_STRIDE);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
bb = ByteBuffer.allocateDirect(normals.length * NORMAL_STRIDE);
bb.order(ByteOrder.nativeOrder());
normalBuffer = bb.asFloatBuffer();
normalBuffer.put(normals);
normalBuffer.position(0);
bb = ByteBuffer.allocateDirect(texCoords.length * TEXTURE_COORD_STRIDE);
bb.order(ByteOrder.nativeOrder());
texCoordBuffer = bb.asFloatBuffer();
texCoordBuffer.put(texCoords);
texCoordBuffer.position(0);
bb = ByteBuffer.allocateDirect(indices.length * 2);
bb.order(ByteOrder.nativeOrder());
indexBuffer = bb.asShortBuffer();
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void bind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(
positionHandle, VERTEX_SIZE,
GLES20.GL_FLOAT, false,
VERTEX_STRIDE, vertexBuffer);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glVertexAttribPointer(
normalHandle, NORMAL_SIZE,
GLES20.GL_FLOAT, false,
NORMAL_STRIDE, normalBuffer);
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer);
}
public void unbind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(texCoordHandle);
}
public void draw() {
if(indexBuffer != null) {
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDEX_COUNT, GLES20.GL_UNSIGNED_SHORT, indexBuffer);
} else {
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, VERTEX_COUNT);
}
}
}
And here is my Vertex data:
float[] verts = {
-(width / 2f), (height / 2f), 0f, // index 0
-(width / 2f), -(height / 2f), 0f, // index 1
(width / 2f), -(height / 2f), 0f, // index 2
(width / 2f), (height / 2f), 0f // index 3
};
float[] norms = {
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f
};
float[] texCoords = {
0f, 1f,
0f, 0f,
1f, 0f,
1f, 1f
};
short[] indices = {
0,1,2,2,3,0
};
I've tried adding the clamp to edge texture parameters as well but that didn't seem to help. Have I just put the vertex and texture coords in the wrong order or is there something I'm missing altogether?

You are setting your vertex buffer instead of your texture coord buffer for the texture co-ordinates:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer); // <-- here
should be:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, texCoordBuffer);

Related

Changing background color in OpeGL ES Android changes color of texture

If i change the background of GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f) and then try to draw a texture, colors of this last changes unexpectedly. This is the png file:
The result of the application when i try simply to display it is this one:
Im using this code:
public class GLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private float[] vertices = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f
};
private float[] textureVertices = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private final String vertexShaderCode =
"attribute vec4 aPosition;" +
"attribute vec2 aTexPosition;" +
"varying vec2 vTexPosition;" +
"void main() {" +
" gl_Position = aPosition;" +
" vTexPosition = aTexPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D uTexture;" +
"varying vec2 vTexPosition;" +
"void main() {\n" +
"vec4 color = texture2D(uTexture, vTexPosition);\n"+
//"if(color.r == 0.0 && color.g == 0.0 && color.b == 0.0)\n"+
// "color = vec4(1.0,0.5,0.5,1.0);"+
// "discard;"+
" gl_FragColor = color;" +
"}";
private FloatBuffer verticesBuffer;
private FloatBuffer textureBuffer;
private int vertexShader;
private int fragmentShader;
private int program;
private Bitmap bmp;
private int textures[] = new int[2];
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
public GLRenderer() {
bmp=Bitmap.createBitmap(513,912, Bitmap.Config.ARGB_8888);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
checkGlError("glClearColor");
setup();
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
#Override
public void onDrawFrame(GL10 gl) {
Log.d("Drawing_Frame","Working");
float[] scratch = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Draw Bitmap
drawBinaryImage(bmp,textures[0]);
Matrix.setRotateM(mRotationMatrix, 0, 0, 0, 0, 1.0f);
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
}
private void setup(){
GLES20.glGenTextures(2, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
//GLES20.glBindTexture(GL);
initializeBuffers();
initializeProgram();
}
private void initializeBuffers() {
ByteBuffer buff = ByteBuffer.allocateDirect(vertices.length * 4);
buff.order(ByteOrder.nativeOrder());
verticesBuffer = buff.asFloatBuffer();
verticesBuffer.put(vertices);
verticesBuffer.position(0);
buff = ByteBuffer.allocateDirect(textureVertices.length * 4);
buff.order(ByteOrder.nativeOrder());
textureBuffer = buff.asFloatBuffer();
textureBuffer.put(textureVertices);
textureBuffer.position(0);
}
private void initializeProgram() {
vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glGetShaderInfoLog(vertexShader);
checkGlError("glCreateShader");
GLES20.glShaderSource(vertexShader, vertexShaderCode);
GLES20.glCompileShader(vertexShader);
fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
GLES20.glCompileShader(fragmentShader);
program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
checkGlError("glLinkProgram");
}
public void updateTexture(Bitmap bmp){
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
}
private void drawBinaryImage(Bitmap bmp,int texture){
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glUseProgram(program);
//Changes Here original Line GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE);
int positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
int textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
int texturePositionHandle = GLES20.glGetAttribLocation(program, "aTexPosition");
//Log.d("GL_SETUP",positionHandle+" , "+textureHandle);
GLES20.glVertexAttribPointer(texturePositionHandle, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(texturePositionHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
Log.d("FILTER_APPLY","Applying");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glUniform1i(textureHandle, 0);
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, verticesBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
}
public void setBitmap(Bitmap bitmap){
updateTexture(bitmap);
this.bmp = bitmap;
}
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
The framebuffer is cleared by GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);.
RGBA(1, 1, 0, 1) is yellow. This causes that before rendering the texture, the entire framebuffer is filled in yellow.
The texture contains a blue color RGBA(0, 0, 1, 1) and a black color RGBA(0, 0, 0, 1).
When the quad with the texture is drawn, then blending is enabled with the following function:
(see Blending and glBlendFunc)
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE);
At blending the current color of the framebuffer is mixed by the actually drawn color. With the above setup this done by the following function:
destinationColor = sourceColor * 1 + destinationColor * 1
In the regions where the texture is blue, the final color becomes white:
(1, 1, 0) * 1 + (0, 0, 1) * 1 = (1, 1, 1)
In the regions where the texture is black, the color in the framebuffer stays yellow:
(1, 1, 0) * 1 + (0, 0, 0) * 1 = (1, 1, 0)

How to draw a shapes in OpenGL from a Binary Image?

I need to draw a shape in OpenGL from a binary image. For example, I have this image binary
and I need to draw this shape in OpenGL. The fact is that I need to do this "dynamically" so I will have for example a different binary image every second and I need to draw the shapes from every one of those images. Shapes will not be the only triangles like in the example image. I will use this in Android Studio.
Convert this image to Bitmap object and draw it on the GLSurfaceView. you can check condition in shader code like this.
vec4 color = texture2D(uTexture, vTexPosition);
if(color.r == 0.0 && color.g == 0.0 && color.b == 0.0)
color = vec4(1.0,0.5,0.5,1.0);
gl_FragColor = color;
This is the full GLRenderer
public class GLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private float[] vertices = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f
};
private float[] textureVertices = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private final String vertexShaderCode =
"attribute vec4 aPosition;" +
"attribute vec2 aTexPosition;" +
"varying vec2 vTexPosition;" +
"void main() {" +
" gl_Position = aPosition;" +
" vTexPosition = aTexPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D uTexture;" +
"varying vec2 vTexPosition;" +
"void main() {\n" +
"vec4 color = texture2D(uTexture, vTexPosition);\n"+
"if(color.r == 0.0 && color.g == 0.0 && color.b == 0.0)\n"+
"color = vec4(1.0,0.5,0.5,1.0);"+
" gl_FragColor = color;" +
"}";
private FloatBuffer verticesBuffer;
private FloatBuffer textureBuffer;
private int vertexShader;
private int fragmentShader;
private int program;
private Bitmap bmp;
private int textures[] = new int[2];
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
public GLRenderer() {
bmp=Bitmap.createBitmap(513,912, Bitmap.Config.ARGB_8888);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
checkGlError("glClearColor");
setup();
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
#Override
public void onDrawFrame(GL10 gl) {
Log.d("Drawing_Frame","Working");
float[] scratch = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Draw Bitmap
drawBinaryImage(bmp,textures[0]);
Matrix.setRotateM(mRotationMatrix, 0, 0, 0, 0, 1.0f);
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
}
private void setup(){
GLES20.glGenTextures(2, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
//GLES20.glBindTexture(GL);
initializeBuffers();
initializeProgram();
}
private void initializeBuffers() {
ByteBuffer buff = ByteBuffer.allocateDirect(vertices.length * 4);
buff.order(ByteOrder.nativeOrder());
verticesBuffer = buff.asFloatBuffer();
verticesBuffer.put(vertices);
verticesBuffer.position(0);
buff = ByteBuffer.allocateDirect(textureVertices.length * 4);
buff.order(ByteOrder.nativeOrder());
textureBuffer = buff.asFloatBuffer();
textureBuffer.put(textureVertices);
textureBuffer.position(0);
}
private void initializeProgram() {
vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glGetShaderInfoLog(vertexShader);
checkGlError("glCreateShader");
GLES20.glShaderSource(vertexShader, vertexShaderCode);
GLES20.glCompileShader(vertexShader);
fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
GLES20.glCompileShader(fragmentShader);
program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
checkGlError("glLinkProgram");
}
public void updateTexture(Bitmap bmp){
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
}
private void drawBinaryImage(Bitmap bmp,int texture){
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glUseProgram(program);
//Changes Here original Line GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE);
int positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
int textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
int texturePositionHandle = GLES20.glGetAttribLocation(program, "aTexPosition");
//Log.d("GL_SETUP",positionHandle+" , "+textureHandle);
GLES20.glVertexAttribPointer(texturePositionHandle, 2, GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(texturePositionHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
Log.d("FILTER_APPLY","Applying");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glUniform1i(textureHandle, 0);
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, verticesBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
}
public void setBitmap(Bitmap bitmap){
updateTexture(bitmap);
this.bmp = bitmap;
}
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
Use this renderer with GlSurfaceView
Here is main Activity class
public class MainActivity extends AppCompatActivity {
FrameLayout glView;
private GLRenderer renderer;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
glView = findViewById(R.id.glview);
setupGL();
}
private void setupGL(){
renderer = new GLRenderer();
//MyGlSurfaceView glSurfaceView = new MyGlSurfaceView(this,renderer);
GLSurfaceView glSurfaceView = new GLSurfaceView(this);
glSurfaceView.setEGLContextClientVersion(2);
glSurfaceView.setRenderer(renderer);
glView.addView(glSurfaceView);
produceFrame();
}
private void produceFrame(){
Bitmap bmp = BitmapFactory.decodeResource(getResources(),R.drawable.sample);
renderer.setBitmap(bmp);
}
You can check this for complete project.

How to set correctly Matrix.orthoM for a textured sprite?

I am testing to draw a textured sprite with opengl es 2.0. on android.
Demo works fine when I set ortho projection from - scr_w/2 to +scr_w/2 and - scr_h/ to +scr_h/2 with origin in middle of screen.
However if I translate to what I expect : 480 width (scr_w) and 720 (scr_h) height coordinate system + origin on top left corner of screen.
then Texture rendering goes wrong.
Matrix.orthoM(mProjectionMatrix, 0, 0, scr_w,0, scr_h, -1, 1); //then texture rendering doesn't work ?
I have tried to set Matrix.setLookAtM differently to point from (240,360,5) to (240,360,0)
//Matrix.setLookAtM(mViewMatrix, 0, scr_w/2, scr_h/2, 5, scr_w/2, scr_h/2, 0f, 0f, 1.0f, 0.0f); // then texture rendering doesn't work ?
The Renderer class:
public class SimpleRenderer implements GLSurfaceView.Renderer{
private final Context ctx;
static int scr_w = 480;
static int scr_h = 720;
private final float[] mViewMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mMVPMatrix = new float[16];
private Sprite spt1;
SimpleRenderer(final Context context)
{
this.ctx = context;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
//Set the background color
GLES20.glClearColor(0.0f, 0.0f, 2.0f, 1.0f);
//Disable depth test
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
// Set alpha blend on
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
spt1 = new Sprite(ctx);
}
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
Matrix.orthoM(mProjectionMatrix, 0, -scr_w/2, scr_w/2,-scr_h/2, scr_h/2, -1, 1);
// Matrix.orthoM(mProjectionMatrix, 0, 0, scr_w,0, scr_h, -1, 1); // DOESNT WORK ?
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 5, 0, 0, 0f, 0f, 1.0f, 0.0f);
//Matrix.setLookAtM(mViewMatrix, 0, scr_w/2, scr_h/2, 5, scr_w/2, scr_h/2, 0f, 0f, 1.0f, 0.0f); // DOESN'T WORK
// Mix the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
}
public void onDrawFrame(GL10 unused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
spt1.Draw(mMVPMatrix);
}}
The Sprite Class maybe mess with shader or fragment in 2D coordinates?
public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;
//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
"attribute vec2 a_TexCoordinate; \n" +
"varying vec2 v_TexCoordinate; \n" +
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main() { \n" +
" gl_Position = vPosition * uMVPMatrix; \n" +
" v_TexCoordinate = a_TexCoordinate; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (vColor * texture2D(u_Texture, v_TexCoordinate));" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = {
-50f, 50f, // top left
-50f, -50f, // bottom left
50f, -50f, // bottom right
50f, 50f //top right
};
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex
float color[] = { 1f, 1f, 1f, 1.0f };
public Sprite(final Context activityContext)
{
mActivityContext = activityContext;
//Initialize Vertex Byte Buffer
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(spriteCoords);
vertexBuffer.position(0);
final float[] TextureCoordinate =
{
1f, 0f,
1f, 1f,
0f, 1f,
0f, 0f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(TextureCoordinate.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(TextureCoordinate).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
mTextureDataHandle = loadTexture(mActivityContext, R.drawable.cathead);
}
public void Draw(float[] mvpMatrix)
{
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
//Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glEnable(GLES20.GL_BLEND_COLOR);
GLES20.glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
GLES20.glDepthMask(false);
//Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
public static int loadShader(int type, String shaderCode)
{
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}

android - Handling aspect ratio on openGL Renderer for Camera Orientation Landscape

I am rendering camera data on my GLSurfaceView. Everything work fine . But the problem is the image stretch a bit in landscape mood. I have read this and tried that solution http://www.learnopengles.com/understanding-opengls-matrices . Please the check the "Adjusting to the screen’s aspect ratio" part of that link. I tried that solution . my code for that solution are attached below. These code are from part of my Renderer class which implements GLSurfaceView.Renderer interface :
private float[] vertices;
private float[] verticesFrontCameraPortrait = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f};
private float[] verticesFrontCamera_90_Degree_Right = new float[]{-1.f, -1.f, -1.f, 1.f, 1.f, 1.f, 1.f, -1.f,};
private float[] verticesFrontCamera_270_Degree_right = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f,};
#Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
}
My vertices variable hold the reference of any of the three vertice matrices according to device configuration. That works fine totally. But the problem occurs if try the work on onSurfaceChanged(GL10 gl10, int width, int height)
method. It crush my program saying ArrayIndexOutOfBoundsExceptio . But if i don't use these two lines :
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
everything work fine but the video/camera image data's are stretched on landscape mood. I don't want stretched image that are rendered on my GLSurfaceView.
Here is my full Renderer class .
/**
* IMPORTANT - Please read before changing
*
* This class render a NV21 (YV12) image byte array in GlSurfaceView using OpenGl.
* NV21 image format has 12 bits per pixel out of which 8 bits are for luminance (Y) and 4 bits
* are for chrominance (UV). So yBuffer size is same and uvBuffer size is of half
* of number of pixels. yTexture height and widht are also same as image height and width.
* First 2/3 of the input image array are Y values and last 1/3 are uv in the
* order v0u0v1u1... (altering v and u values) so on. So GL_LUMINANCE and GL_LUMINANCE_ALPHA format
* are used to pass yBuffer and uvBuffer respectively and fragment_shader takes U value from alpha channel
* and V value from red channel (could be green or blue channel with same result). uvTexture height
* and width are also 1/4 of the original image height and width
*
* GL_TEXTURE0 + 1 (GL_TEXTURE1) and GL_TEXTURE0 + 2 (GL_TEXTURE2) must be used for yTexture and uvTexture.
* If GL_TEXTURE0 is used for yTexture, it doesn't work in some devices.
*/
public class VideoCallImageRenderer implements GLSurfaceView.Renderer {
private final String TAG = "VideoCallImageRenderer";
private int cameraType; // 0 for backcamera, 1 for front camera, 2 for opponent camera
private Context context;
private int shaderProgram;
short[] indices = new short[]{0, 1, 2, 0, 2, 3};
// y texture handle
private int[] yTexture = new int[1];
// uv texture handle
private int[] uvTexture = new int[1];
// texture coordinate and vertices buffers
private FloatBuffer texCoordBuffer, vertexBuffer;
// indices buffer
private ShortBuffer indexBuffer;
// y and uv texture buffers
private ByteBuffer yBuffer, uvBuffer;
// image height and width
private int width = 0, height = 0;
// true when a valid image data is set. default value false.
private boolean render = false;
// position attribute location handle in vertex shader
private int positionLocation;
// texture coordinate attribute location handle in vertex shader
private int textureCoordinateLocation;
// y_texture sampler2D location handle in fragment shader
private int yTextureLocation;
// uv_texture sampler2D location handle in fragment shader
private int uvTextureLocation;
final private float bytePerPixel = 1.5f;
/*This value of vertices are changed to roatate the image: explanation
let 1.f, 1.f = A
-1.f, 1.f = B
-1.f, -1.f= C
1.f, -1.f = D
SO ABCD = Back camera in normal stage
if we want to rotate 90 degre right then value will be DABC
if we want to rotate 90 degre left then value will be BCDA
private float[] vertices = new float[]{
1.f, -1.f,
-1.f, -1.f,
-1.f, 1.f,
1.f, 1.f
};
*/
private float[] vertices;
private float[] verticesPortrait;
private float[] vertices_90_Degree_Right;
private float[] vertices_270_Degree_Right;
private int currentDeviceOrientation = -1;
private float[] verticesFrontCameraPortrait = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f};
private float[] verticesFrontCamera_90_Degree_Right = new float[]{-1.f, -1.f, -1.f, 1.f, 1.f, 1.f, 1.f, -1.f,};
private float[] verticesFrontCamera_270_Degree_right = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f,};
private float[] verticesBackCameraPortrait = new float[]{1.f, 1.f, -1.f, 1.f, -1.f, -1.f, 1.f, -1.f};
private float[] verticesBackCamera_90_Degree_Right = new float[]{1.f, -1.f, 1.f, 1.f, -1.f, 1.f, -1.f, -1.f,};
private float[] verticesBackCamera_270_Degree_Right = new float[]{-1.f, 1.f, -1.f, -1.f, 1.f, -1.f, 1.f, 1.f,};
private float[] verticesOpponentCamera = new float[]{1.f, 1.f, 1.f, -1.f, -1.f, -1.f, -1.f, 1.f};
private float[] verticeOpponentCamera_90_Degree_Right = new float[]{1.f, -1.f, -1.f, -1.f, -1.f, 1.f, 1.f, 1.f,};
private float[] verticeOpponentCamera_270_Degree_Right = new float[]{-1.f, 1.f, 1.f, 1.f, 1.f, -1.f, -1.f, -1.f,};
private float[] texCoords = new float[]{
0.f, 0.f,
0.f, 1.f,
1.f, 1.f,
1.f, 0.f
};
public VideoCallImageRenderer(Context context, int cameraType) {
this.context = context;
// initialize texture coordinate buffer
this.cameraType = cameraType;
if (cameraType == 0) {
verticesPortrait = verticesBackCameraPortrait;
vertices_90_Degree_Right = verticesBackCamera_90_Degree_Right;
vertices_270_Degree_Right = verticesBackCamera_270_Degree_Right;
} else if (cameraType == 1) {
verticesPortrait = verticesFrontCameraPortrait;
vertices_90_Degree_Right = verticesFrontCamera_90_Degree_Right;
vertices_270_Degree_Right = verticesFrontCamera_270_Degree_right;
} else {
verticesPortrait = verticesOpponentCamera;
vertices_90_Degree_Right = verticeOpponentCamera_90_Degree_Right;
vertices_270_Degree_Right = verticeOpponentCamera_270_Degree_Right;
}
ByteBuffer tcbb = ByteBuffer.allocateDirect(texCoords.length * 4);
tcbb.order(ByteOrder.nativeOrder());
texCoordBuffer = tcbb.asFloatBuffer();
texCoordBuffer.put(texCoords);
texCoordBuffer.position(0);
// initialize vertices buffer
vertices = verticesPortrait;
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
// initialize indices buffer
ByteBuffer ibb = ByteBuffer.allocateDirect(indices.length * 2);
ibb.order(ByteOrder.nativeOrder());
indexBuffer = ibb.asShortBuffer();
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void resetVertex(float[] new_vertices) {
Constants.debugLog(TAG, "resetVertex");
ByteBuffer vbb = ByteBuffer.allocateDirect(new_vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(new_vertices);
vertexBuffer.position(0);
}
public void setImageBuffer(final byte[] imageBytes, int height, int width, int iDeviceOrientation) {
// reinitialize texture buffers if width or height changes
Constants.debugLog(TAG, "setImageBuffer device Orientation == " + iDeviceOrientation);
if (currentDeviceOrientation == -1) {
currentDeviceOrientation = iDeviceOrientation;
}
if (iDeviceOrientation != currentDeviceOrientation) {
currentDeviceOrientation = iDeviceOrientation;
switch (currentDeviceOrientation) {
case 0:
vertices = verticesPortrait;
resetVertex(vertices);
break;
case 1:
vertices = vertices_90_Degree_Right;
resetVertex(vertices);
break;
case 3:
vertices = vertices_270_Degree_Right;
resetVertex(vertices);
break;
}
}
final boolean resolutionChanged = this.width != width || this.height != height;
if (resolutionChanged) {
this.width = width;
this.height = height;
final int numberOfPixels = this.height * this.width;
this.yBuffer = ByteBuffer.allocateDirect(numberOfPixels);
this.yBuffer.order(ByteOrder.nativeOrder());
this.uvBuffer = ByteBuffer.allocateDirect(numberOfPixels / 2);
this.uvBuffer.order(ByteOrder.nativeOrder());
}
this.render = updateYUVBuffers(imageBytes);
}
private boolean updateYUVBuffers(final byte[] imageBytes) {
final int numberOfPixels = this.height * this.width;
final int numberOfExpectedBytes = (int) (numberOfPixels * this.bytePerPixel);
if (imageBytes != null && imageBytes.length != (int) (numberOfPixels * this.bytePerPixel)) {
return false;
}
// put image bytes into texture buffers
yBuffer.put(imageBytes, 0, numberOfPixels);
yBuffer.position(0);
uvBuffer.put(imageBytes, numberOfPixels, numberOfPixels / 2);
uvBuffer.position(0);
return true;
}
#Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
createShader();
positionLocation = GLES20.glGetAttribLocation(shaderProgram, "a_position");
textureCoordinateLocation = GLES20.glGetAttribLocation(shaderProgram, "a_texCoord");
// generate y texture
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
yTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "y_texture");
GLES20.glGenTextures(1, yTexture, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
// generate uv texture
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
uvTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "uv_texture");
GLES20.glGenTextures(1, uvTexture, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uvTexture[0]);
// clear display color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
}
#Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspectRatio = (float) width / (float) height;
Matrix.orthoM(vertices, 0, -aspectRatio, aspectRatio, -1, 1, -1, 1);
}
#Override
public void onDrawFrame(GL10 gl10) {
// clear display
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (render) {
GLES20.glUseProgram(shaderProgram);
GLES20.glVertexAttribPointer(positionLocation, 2,
GLES20.GL_FLOAT, false,
0, vertexBuffer);
GLES20.glVertexAttribPointer(textureCoordinateLocation, 2, GLES20.GL_FLOAT,
false,
0, texCoordBuffer);
GLES20.glEnableVertexAttribArray(positionLocation);
GLES20.glEnableVertexAttribArray(textureCoordinateLocation);
// create and update y texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 1);
GLES20.glUniform1i(yTextureLocation, 1);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, this.width,
this.height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, this.yBuffer);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// create and update uv texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + 2);
GLES20.glUniform1i(uvTextureLocation, 2);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, this.width / 2,
this.height / 2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, this.uvBuffer);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// render image
GLES20.glDrawElements(GLES20.GL_TRIANGLES, indices.length,
GLES20.GL_UNSIGNED_SHORT, indexBuffer);
GLES20.glDisableVertexAttribArray(positionLocation);
GLES20.glDisableVertexAttribArray(textureCoordinateLocation);
}
}
void createShader() {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER,
CallConstants.readRawTextFile(context, R.raw.vertex_shader));
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
CallConstants.readRawTextFile(context, R.raw.fragment_shader));
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
GLES20.glLinkProgram(shaderProgram);
GLES20.glUseProgram(shaderProgram);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(shaderProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e("Render", "Could not link program: ");
Log.e("Render", GLES20.glGetProgramInfoLog(shaderProgram));
GLES20.glDeleteProgram(shaderProgram);
shaderProgram = 0;
}
// free up no longer needed shader resources
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
}
public int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}

Android OpenGL Render to a frameBuffer in the original (small) resolution and then scale it (viewPort) to the screen size

I have a NV21 (YUV420) camera video which I'm applying on it a fragment shader in order to get some filter effects and YUV to RGB convertion as well.
Everything is working except the bad performance.
My fragment shader is a bit heavy because it has many textur2D() calls.
The original frame resolution is 480x640 pixels, and I noticed that if I'm setting the viewport to this original size (instead of fullscreen size) it is working good and fluently.
So basically I need first to render the frame and processing it in a frameBuffer (FBO) with that original size and then (after the shader's work done) scale it to the fullscreen size using viewport (1080x1920 mostly), And it means that the "heavy" processing work would be applied on much less fragments.
I've found out some tutorials and similar questions here how to achieve that, but unfortunately I've got no luck with that. (Got some black screens or GL_INVALID_OPERATION and etc')...
Any help would be much appreciated.
Also, another (optional) performance tweak that I don't know how to deal with (if it's possible) is to combined somehow these 3 textures (Y_tex, U_tex and V_tex) to a single texture which be uniformed to the shader as a single sampler and then I can make just one texture2D() call in the shader in order to get the current YUV values and convert them to RGB values.
This is my renderer code:
static class MyRenderer implements GLSurfaceView.Renderer
{
int mTextureIds[] = new int[3];
float[] mScaleMatrix = new float[16];
private FloatBuffer mVertexBuffer;
private FloatBuffer mTextureBuffer;
private ShortBuffer mDrawListBuffer;
boolean mVideoFitEnabled = true;
boolean mVideoDisabled = false;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static final int TEXTURECOORDS_PER_VERTEX = 2;
static float mXYZCoords[] = {
-1.0f, 1.0f, 0.0f, // top left
-1.0f, -1.0f, 0.0f, // bottom left
1.0f, -1.0f, 0.0f, // bottom right
1.0f, 1.0f, 0.0f // top right
};
static float mUVCoords[] = {
0, 0, // top left
0, 1, // bottom left
1, 1, // bottom right
1, 0 // top right
};
private short mVertexIndex[] = {0, 1, 2, 0, 2, 3}; // order to draw vertices
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;"
+ "attribute vec4 aPosition;\n"
+ "attribute vec2 aTextureCoord;\n"
+ "varying vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = aTextureCoord;\n"
+ "}\n";
private final String fragmentShaderCode =
"precision mediump float;\n"
+ "uniform sampler2D Ytex;\n"
+ "uniform sampler2D Utex,Vtex;\n"
+ "varying vec2 vTextureCoord;\n"
+ "void main(void) {\n"
+ " float nx,ny,r,g,b,y,u,v;\n"
+ " mediump vec4 txl,ux,vx;"
+ " nx=vTextureCoord[0];\n"
+ " ny=vTextureCoord[1];\n"
+ " y=texture2D(Ytex,vec2(nx,ny)).r;\n"
+ " u=texture2D(Utex,vec2(nx,ny)).r;\n"
+ " v=texture2D(Vtex,vec2(nx,ny)).r;\n"
+ " y=1.1643*(y-0.0625);\n"
+ " u=u-0.5;\n"
+ " v=v-0.5;\n"
+ " r=y+1.5958*v;\n"
+ " g=y-0.39173*u-0.81290*v;\n"
+ " b=y+2.017*u;\n"
// --> Bilateral blur filter code HERE <--
+ " gl_FragColor=vec4(r,g,b,1.0);\n"
+ "}\n";
ReentrantLock mFrameLock = new ReentrantLock();
Frame mCurrentFrame;
private int mProgram;
private int mTextureWidth;
private int mTextureHeight;
private int mViewportWidth;
private int mViewportHeight;
public MyRenderer()
{
ByteBuffer bb = ByteBuffer.allocateDirect(mXYZCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
mVertexBuffer = bb.asFloatBuffer();
mVertexBuffer.put(mXYZCoords);
mVertexBuffer.position(0);
ByteBuffer tb = ByteBuffer.allocateDirect(mUVCoords.length * 4);
tb.order(ByteOrder.nativeOrder());
mTextureBuffer = tb.asFloatBuffer();
mTextureBuffer.put(mUVCoords);
mTextureBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(mVertexIndex.length * 2);
dlb.order(ByteOrder.nativeOrder());
mDrawListBuffer = dlb.asShortBuffer();
mDrawListBuffer.put(mVertexIndex);
mDrawListBuffer.position(0);
}
#Override public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
int positionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
int textureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
GLES20.glVertexAttribPointer(positionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, COORDS_PER_VERTEX * 4, mVertexBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(textureHandle, TEXTURECOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, TEXTURECOORDS_PER_VERTEX * 4, mTextureBuffer);
GLES20.glEnableVertexAttribArray(textureHandle);
GLES20.glUseProgram(mProgram);
int i = GLES20.glGetUniformLocation(mProgram, "Ytex"); // GLES20.glUniform3i(i, 0, 1, 2);
GLES20.glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
i = GLES20.glGetUniformLocation(mProgram, "Utex");
GLES20.glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
i = GLES20.glGetUniformLocation(mProgram, "Vtex");
GLES20.glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
mTextureWidth = 0;
mTextureHeight = 0;
}
static void initializeTexture(int name, int id, int width, int height)
{
GLES20.glActiveTexture(name);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, id);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
}
void setupTextures(Frame frame)
{
if (mTextureIds[0] != 0)
{
GLES20.glDeleteTextures(3, mTextureIds, 0);
}
GLES20.glGenTextures(3, mTextureIds, 0);
int w = frame.getWidth();
int h = frame.getHeight();
int hw = (w + 1) >> 1;
int hh = (h + 1) >> 1;
initializeTexture(GLES20.GL_TEXTURE0, mTextureIds[0], w, h);
initializeTexture(GLES20.GL_TEXTURE1, mTextureIds[1], hw, hh);
initializeTexture(GLES20.GL_TEXTURE2, mTextureIds[2], hw, hh);
mTextureWidth = frame.getWidth();
mTextureHeight = frame.getHeight();
}
void updateTextures(Frame frame)
{
int width = frame.getWidth();
int height = frame.getHeight();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
int y_size = width * height;
int uv_size = half_width * half_height;
ByteBuffer bb = frame.getBuffer();
bb.clear(); // If we are reusing this frame, make sure we reset position and limit
if (bb.remaining() == y_size + uv_size * 2)
{
bb.position(0);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glPixelStorei(GLES20.GL_PACK_ALIGNMENT, 1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width, height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
bb.position(y_size);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[1]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, half_width, half_height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
bb.position(y_size + uv_size);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[2]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, half_width, half_height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
int i = GLES20.glGetUniformLocation(mProgram, "width");
GLES20.glUniform1f(i, (float) mTextureWidth);
i = GLES20.glGetUniformLocation(mProgram, "height");
GLES20.glUniform1f(i, (float) mTextureHeight);
}
else
{
mTextureWidth = 0;
mTextureHeight = 0;
}
}
#Override public void onSurfaceChanged(GL10 gl, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
mViewportWidth = width;
mViewportHeight = height;
}
#Override public void onDrawFrame(GL10 gl)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mFrameLock.lock();
if (mCurrentFrame != null && !mVideoDisabled)
{
GLES20.glUseProgram(mProgram);
if (mTextureWidth != mCurrentFrame.getWidth() || mTextureHeight != mCurrentFrame.getHeight())
{
setupTextures(mCurrentFrame);
}
updateTextures(mCurrentFrame);
Matrix.setIdentityM(mScaleMatrix, 0);
float scaleX = 1.0f, scaleY = 1.0f;
float ratio = (float) mCurrentFrame.getWidth() / mCurrentFrame.getHeight();
float vratio = (float) mViewportWidth / mViewportHeight;
if (mVideoFitEnabled)
{
if (ratio > vratio)
{
scaleY = vratio / ratio;
}
else
{
scaleX = ratio / vratio;
}
}
else
{
if (ratio < vratio)
{
scaleY = vratio / ratio;
}
else
{
scaleX = ratio / vratio;
}
}
Matrix.scaleM(mScaleMatrix, 0, scaleX * (mCurrentFrame.isMirroredX() ? -1.0f : 1.0f), scaleY, 1);
int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mScaleMatrix, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, mVertexIndex.length, GLES20.GL_UNSIGNED_SHORT, mDrawListBuffer);
}
mFrameLock.unlock();
}
public void displayFrame(Frame frame)
{
mFrameLock.lock();
if (this.mCurrentFrame != null)
{
this.mCurrentFrame.recycle();
}
this.mCurrentFrame = frame;
mFrameLock.unlock();
}
public static int loadShader(int type, String shaderCode)
{
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void disableVideo(boolean b)
{
mFrameLock.lock();
mVideoDisabled = b;
if (mVideoDisabled)
{
if (this.mCurrentFrame != null)
{
this.mCurrentFrame.recycle();
}
this.mCurrentFrame = null;
}
mFrameLock.unlock();
}
public void enableVideoFit(boolean enableVideoFit)
{
mVideoFitEnabled = enableVideoFit;
}
}
Eventually I've figured it out thanks to a very talent guy who helped me with it.
Here is my renderer class which has now a frameBuffer with 2 rendering passes:
static class MyRenderer implements GLSurfaceView.Renderer
{
int mTextureIds[] = new int[4];
float[] mScaleMatrix = new float[16];
float[] mFilterScaleMatrix = new float[16];
private FloatBuffer mVertexBuffer;
private FloatBuffer mTextureBuffer;
private ShortBuffer mDrawListBuffer;
private IntBuffer frameBuffer;
boolean mVideoFitEnabled = true;
boolean mVideoDisabled = false;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static final int TEXTURECOORDS_PER_VERTEX = 2;
static float mXYZCoords[] = {
-1.0f, 1.0f, 0.0f, // top left
-1.0f, -1.0f, 0.0f, // bottom left
1.0f, -1.0f, 0.0f, // bottom right
1.0f, 1.0f, 0.0f // top right
};
static float mUVCoords[] = {
0, 0, // top left
0, 1, // bottom left
1, 1, // bottom right
1, 0 // top right
};
private short mVertexIndex[] = {0, 1, 2, 0, 2, 3}; // order to draw vertices
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;"
+ "attribute vec4 aPosition;\n"
+ "attribute vec2 aTextureCoord;\n"
+ "varying vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = aTextureCoord;\n"
+ "}\n";
private final String fragmentShaderCode =
"YUV to RGB Conversion shader HERE";
private final String frameBufferShader =
"MY filter effect shader HERE";
ReentrantLock mFrameLock = new ReentrantLock();
Frame mCurrentFrame;
private int mProgram;
private int mProgramFilter;
private int mTextureWidth;
private int mTextureHeight;
private int mViewportWidth;
private int mViewportHeight;
public MyRenderer()
{
ByteBuffer bb = ByteBuffer.allocateDirect(mXYZCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
mVertexBuffer = bb.asFloatBuffer();
mVertexBuffer.put(mXYZCoords);
mVertexBuffer.position(0);
ByteBuffer tb = ByteBuffer.allocateDirect(mUVCoords.length * 4);
tb.order(ByteOrder.nativeOrder());
mTextureBuffer = tb.asFloatBuffer();
mTextureBuffer.put(mUVCoords);
mTextureBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(mVertexIndex.length * 2);
dlb.order(ByteOrder.nativeOrder());
mDrawListBuffer = dlb.asShortBuffer();
mDrawListBuffer.put(mVertexIndex);
mDrawListBuffer.position(0);
frameBuffer = IntBuffer.allocate(1);
}
#Override public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
int filterVertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int filterFragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, frameBufferShader);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
mProgramFilter = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgramFilter, filterVertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgramFilter, filterFragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgramFilter);
int positionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
int textureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
GLES20.glVertexAttribPointer(positionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, COORDS_PER_VERTEX * 4, mVertexBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(textureHandle, TEXTURECOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, TEXTURECOORDS_PER_VERTEX * 4, mTextureBuffer);
GLES20.glEnableVertexAttribArray(textureHandle);
GLES20.glUseProgram(mProgram);
int i = GLES20.glGetUniformLocation(mProgram, "Ytex");
GLES20.glUniform1i(i, 3); /* Bind Ytex to texture unit 0 */
i = GLES20.glGetUniformLocation(mProgram, "Utex");
GLES20.glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
i = GLES20.glGetUniformLocation(mProgram, "Vtex");
GLES20.glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
GLES20.glUseProgram(mProgramFilter);
i = GLES20.glGetUniformLocation(mProgramFilter, "Ytex");
GLES20.glUniform1i(i, 0);
mTextureWidth = 0;
mTextureHeight = 0;
}
static void initializeTexture(int name, int id, int width, int height)
{
GLES20.glActiveTexture(name);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, id);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
}
void setupTextures(Frame frame)
{
if (mTextureIds[0] != 0)
{
GLES20.glDeleteTextures(4, mTextureIds, 0);
}
GLES20.glGenTextures(4, mTextureIds, 0);
int w = frame.getWidth();
int h = frame.getHeight();
int hw = (w + 1) >> 1;
int hh = (h + 1) >> 1;
initializeTexture(GLES20.GL_TEXTURE0, mTextureIds[0], w, h);
initializeTexture(GLES20.GL_TEXTURE1, mTextureIds[1], hw, hh);
initializeTexture(GLES20.GL_TEXTURE2, mTextureIds[2], hw, hh);
GLES20.glGenFramebuffers(1, frameBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.get(0));
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[3]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, w, h, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, mTextureIds[3], 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
mTextureWidth = frame.getWidth();
mTextureHeight = frame.getHeight();
GLES20.glUseProgram(mProgramFilter);
int i = GLES20.glGetUniformLocation(mProgramFilter, "width");
GLES20.glUniform1f(i, (float) mTextureWidth);
i = GLES20.glGetUniformLocation(mProgramFilter, "height");
GLES20.glUniform1f(i, (float) mTextureHeight);
}
void updateTextures(Frame frame)
{
int width = frame.getWidth();
int height = frame.getHeight();
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
int y_size = width * height;
int uv_size = half_width * half_height;
ByteBuffer bb = frame.getBuffer();
bb.clear(); // If we are reusing this frame, make sure we reset position and limit
if (bb.remaining() == y_size + uv_size * 2)
{
bb.position(0);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glPixelStorei(GLES20.GL_PACK_ALIGNMENT, 1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width, height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
bb.position(y_size);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[1]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, half_width, half_height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
bb.position(y_size + uv_size);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureIds[2]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, half_width, half_height, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bb);
}
else
{
mTextureWidth = 0;
mTextureHeight = 0;
}
}
#Override public void onSurfaceChanged(GL10 gl, int width, int height)
{
/// GLES20.glViewport(0, 0, width, height);
mViewportWidth = width;
mViewportHeight = height;
}
#Override public void onDrawFrame(GL10 gl)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mFrameLock.lock();
if (mCurrentFrame != null && !mVideoDisabled)
{
if (mTextureWidth != mCurrentFrame.getWidth() || mTextureHeight != mCurrentFrame.getHeight())
{
setupTextures(mCurrentFrame);
}
updateTextures(mCurrentFrame);
/// Step 1: Smoothing Filter - Render to FrameBuffer [pass 1]
Matrix.setIdentityM(mFilterScaleMatrix, 0);
GLES20.glViewport(0, 0, mTextureWidth, mTextureHeight);
GLES20.glUseProgram(mProgramFilter);
int mMVPFilterMatrixHandle = GLES20.glGetUniformLocation(mProgramFilter, "uMVPMatrix");
GLES20.glUniformMatrix4fv(mMVPFilterMatrixHandle, 1, false, mFilterScaleMatrix, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.get(0));
GLES20.glDrawElements(GLES20.GL_TRIANGLES, mVertexIndex.length, GLES20.GL_UNSIGNED_SHORT, mDrawListBuffer);
/// Step 2: Draw + RGB Conversion - Render to screen [pass 2]
Matrix.setIdentityM(mScaleMatrix, 0);
float scaleX = 1.0f, scaleY = 1.0f;
float ratio = (float) mCurrentFrame.getWidth() / mCurrentFrame.getHeight();
float vratio = (float) mViewportWidth / mViewportHeight;
if (mVideoFitEnabled)
{
if (ratio > vratio)
{
scaleY = vratio / ratio;
}
else
{
scaleX = ratio / vratio;
}
}
else
{
if (ratio < vratio)
{
scaleY = vratio / ratio;
}
else
{
scaleX = ratio / vratio;
}
}
Matrix.scaleM(mScaleMatrix, 0, scaleX * (mCurrentFrame.isMirroredX() ? -1.0f : 1.0f), scaleY, 1);
GLES20.glUseProgram(mProgram);
GLES20.glViewport(0, 0, mViewportWidth, mViewportHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mScaleMatrix, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, mVertexIndex.length, GLES20.GL_UNSIGNED_SHORT, mDrawListBuffer);
}
mFrameLock.unlock();
}
public void displayFrame(Frame frame)
{
mFrameLock.lock();
if (this.mCurrentFrame != null)
{
this.mCurrentFrame.recycle();
}
this.mCurrentFrame = frame;
mFrameLock.unlock();
}
public static int loadShader(int type, String shaderCode)
{
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void disableVideo(boolean b)
{
mFrameLock.lock();
mVideoDisabled = b;
if (mVideoDisabled)
{
if (this.mCurrentFrame != null)
{
this.mCurrentFrame.recycle();
}
this.mCurrentFrame = null;
}
mFrameLock.unlock();
}
public void enableVideoFit(boolean enableVideoFit)
{
mVideoFitEnabled = enableVideoFit;
}
}
What you are trying to do is called downsampling.What you need to do is to render first into a small FBO, where both the viewport and the FBO attachment are of the same size. In that pass you can apply your blur effect. Then you can render or blit it to another FBO of the original size to get the texture scaled back.It's important to note that depending on your blue technique, the upscaled result may have noticeable degraded quality.
As some OpenGL API calls may have different namings in Android SDK,here is the general pseudo-code for what you need to do:
Let's w = original width and h = original height.
1.Create a custom FBO with texture attachment of the size w/2 and h/2 (if you plan to downsample at half the original resolution.
2.Attach the texture to the FBO.Bind the FBO for write.
3.set glViewport() to the same size as the FBO texture attachment.
4.Render a full screen quad.Apply your effect during this pass.
5.Second pass:Bind back the default framebuffer(alternatively - another custom FBO if you have other rendering stage later).If you just want to blit,then make sure you bind the fist FBO for read and not for write.
6.Bind the texture attached to the FBO from the previous pass to the sampler unit.
7.Set the viewport to w and h size.Draw the full screen quad.Do in the fragment shader whatever you need.(Gamma corrections,blending etc..)
That's it.What is nice in this technique is that OpenGL does the upscale/downscale for you automatically with the filtering type you select for the texture being involved in the process.
And on the side note:If you have performance issues this is not a good idea to write a renderer in Java.Though it is possible your issues are due to wrong API usage or inefficient algorithm application for the blur effect.

Categories

Resources