How can i convert an oval to circle openGL ES2.0 Android - android

I have been working on an openGL ES 2.0 Android project. My objective is to create a circle. From the help that i got from online, i managed to run this code which is suppose to give a circle in the view,but instead i got an oval. I tried many ways to make it circle. Any help shal appreciated
Thanks
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import android.opengl.GLES20;
import android.util.Log;
public class Circle {
private int mProgram, mPositionHandle, mColorHandle, mMVPMatrixHandle;
private FloatBuffer mVertexBuffer;
private float vertices[] = new float[364 * 3];
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
private final String vertexShaderCode = "uniform mat4 uMVPMatrix;"
+ "attribute vec4 vPosition;" + "void main() {"
+ " gl_Position = uMVPMatrix * vPosition;" + "}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
Circle() {
vertices[0] = 0;
vertices[1] = 0;
vertices[2] = 0;
for (int i = 1; i < 364; i++) {
vertices[(i * 3) + 0] = (float) (0.5 * Math.cos((3.14 / 180)
* (float) i) + vertices[0]);
vertices[(i * 3) + 1] = (float) (0.5 * Math.sin((3.14 / 180)
* (float) i) + vertices[1]);
vertices[(i * 3) + 2] = 0;
}
Log.v("Thread", "" + vertices[0] + "," + vertices[1] + ","
+ vertices[2]);
ByteBuffer vertexByteBuffer = ByteBuffer
.allocateDirect(vertices.length * 4);
vertexByteBuffer.order(ByteOrder.nativeOrder());
mVertexBuffer = vertexByteBuffer.asFloatBuffer();
mVertexBuffer.put(vertices);
mVertexBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader
// to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment
// shader to program
GLES20.glLinkProgram(mProgram);
}
public static int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void draw(float[] mvpMatrix) {
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT,
false, 12, mVertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 364);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
And the output i am getting is this

The vertex coordinates span a (normalised) range of -1 to +1 - And it appears you are generating vertices from -0.5 to +0.5.
Now, where does the GL engine draw ? To a buffer that is provided by a window system - in this case the Android window system. The Android window system (ie, the linux kernel display driver in the lowest level) provides a buffer that has a widthxheight of say 480x640 (your screenshot shows that width < height). So, the drawn circle on the screen has a width of 0.5*480, but height of 0.5*640, hence it gives an oval shape.
You will have to scale down your y coordinate by the ratio of (screenwidth/screenheight), to get a true circle. You can get it using either egl API or Android API in your application.

Related

Applying rotation to GLSurfaceView

I am attempting to use a GLSurfaceView to render output from Camera2. It works OK when the device is in portrait mode, however when rotating to landscape, of course the picture is "sideways".
I've done a bit of looking around as to how to apply rotation to the output, however there seems to be a multitude of different ways, and I was hoping there would be a simple one that would fit into mine.
This is an abridged version of my GLSurfaceView descendant:
public class DWGLCameraView extends GLSurfaceView implements Renderer, OnFrameAvailableListener {
// Other parts snipped
public void onDrawFrame(GL10 gl) {
mSurfaceTexture.updateTexImage();
float[] mtx = new float[16];
mSurfaceTexture.getTransformMatrix(mtx);
// Can I do something here to apply the rotation?
mDrawer.draw(mtx);
}
}
..and this (if it helps), is the draw routine DWGLDrawer class:
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class DWGLDrawer {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec4 inputTextureCoordinate;" +
"uniform mat4 u_xform;\n" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{"+
"gl_Position = vPosition;"+
"textureCoordinate = (u_xform * inputTextureCoordinate).xy;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n"+
"precision mediump float;" +
"varying vec2 textureCoordinate;\n" +
"uniform samplerExternalOES s_texture;\n" +
"void main() {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer;
private ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mTextureCoordHandle;
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
// number of coordinates per vertex in this array
private static final int COORDS_PER_VERTEX = 2;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private static float squareCoords[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f,
};
private static float textureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
private int texture;
private int mTransformLocation;
public DWGLDrawer(int texture) {
this.texture = texture;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL ES program executables
mTransformLocation = GLES20.glGetUniformLocation(mProgram, "u_xform");
}
public void draw(float[] mtx) {
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
GLES20.glUniformMatrix4fv(mTransformLocation, 1, false, mtx, 0);
// get handle to vertex shader's vPosition member
int positionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(positionHandle);
// Prepare the <insert shape here> coordinate data
GLES20.glVertexAttribPointer(positionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
int textureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// textureVerticesBuffer.clear();
// textureVerticesBuffer.put( transformTextureCoordinates( textureVertices, mtx ));
// textureVerticesBuffer.position(0);
GLES20.glVertexAttribPointer(textureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
}
private int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private float[] transformTextureCoordinates( float[] coords, float[] matrix) {
float[] result = new float[ coords.length ];
float[] vt = new float[4];
for ( int i = 0 ; i < coords.length ; i += 2 ) {
float[] v = { coords[i], coords[i+1], 0 , 1 };
Matrix.multiplyMV(vt, 0, matrix, 0, v, 0);
result[i] = vt[0];
result[i+1] = vt[1];
}
return result;
}
}
As per my comment in onDrawFrame, I figured that I might be able to apply rotation there. I had tried this:
if (mRotation > 0)
Matrix.rotateM(mtx, 0, mRotation, 0f, 0f, -1f);
(mRotation is the rotation value in degrees)
however that resulted in an unintelligible picture, so I assume that it's completely wrong, or there's something else I need to do
Please remember that I am using Camera2, so setDisplayOrientation (from Camera) is not an option
EDIT
I've now included the entire declaration for DWGLDrawer, and added to the description
In general, your approach should be correct (assuming mRotation is in degrees and is based on something like Display.getRotation() to determine your UI orientation - note that getRotation doesn't return degrees so the value has to be adjusted).
However, you don't have your shader code here, so it's hard to say if you're using mTransformLocation in a way that's compatible with the rotateM call you use. So probably you just need to debug the values you see for the matrix, what the rotate call does, and so on, to figure out where the math goes haywire. Unfortunately common for dealing with transforms in EGL (or anywhere, really).

Open GL ES 2.0 Android Shading

i am trying to implement a 3d aplication for android and i am having trouble when drawing 3d objects like a cone for example.
The problem is that i cant notice the transitions between the different faces, all of them are draw with the same color.
I think i need to add shading to the polygons but i cant find any tutorial showing me how do i do that.
this is the code i am using to draw a cone.
public class Cone{
float baseSize = 0f;
float height = 0f;
protected final float[] mTransformMatrix = new float[16];
private FloatBuffer vertexBuffer;
private final int mProgram;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float topCoords[] = new float[30];
static float baseCoords[] = new float[30];
static float lineCoords[] = new float[96];
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 1f, 0f, 0f, 1.0f };
float linecolor[] = { 1f, 1f, 1f, 1.0f };
public Cone(float baseSize , float height) {
this.baseSize = baseSize;
this.height = height;
float ang = (float) ((2*Math.PI) / 8);
Matrix.setIdentityM(mTransformMatrix, 0);
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
(topCoords.length * 2 + lineCoords.length) * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
topCoords[0] = 0;
topCoords[1] = height;
topCoords[2] = 0;
baseCoords[0]= 0;
baseCoords[1]= 0;
baseCoords[2]= 0;
for(int i=1; i < 10;i++) {
topCoords[i*3] = this.baseSize * (float) Math.cos(i*ang);
topCoords[i*3 + 1] = 0;
topCoords[i*3 + 2] = this.baseSize * (float) Math.sin(i*ang);
baseCoords[i*3] = this.baseSize * (float) Math.cos(i*ang);
baseCoords[i*3 + 1] = 0;
baseCoords[i*3 + 2] = this.baseSize * (float) Math.sin(i*ang);
}
for (int i = 0 ; i < 8 ; i ++) {
lineCoords[i*6] = 0;
lineCoords[i*6 + 1] = height;
lineCoords[i*6 + 2] = 0;
lineCoords[i*6 + 3] = this.baseSize *(float) Math.cos((i+1)*ang);
lineCoords[i*6 + 4] = 0;
lineCoords[i*6 + 5] = this.baseSize * (float) Math.sin((i+1)*ang);
}
int j = 0;
for (int i = 8 ; i < 16 ; i++){
lineCoords[i*6] = this.baseSize *(float) Math.cos((j+1)*ang);
lineCoords[i*6 + 1] = 0;
lineCoords[i*6 + 2] = this.baseSize * (float) Math.sin((j+1)*ang);
lineCoords[i*6 + 3] = this.baseSize *(float) Math.cos((j+2)*ang);
lineCoords[i*6 + 4] = 0;
lineCoords[i*6 + 5] = this.baseSize * (float) Math.sin((j+2)*ang);
j++;
}
vertexBuffer.put(topCoords);
vertexBuffer.put(baseCoords);
vertexBuffer.put(lineCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int topVertexCount = topCoords.length / COORDS_PER_VERTEX;
private final int lineVertexCount = lineCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the cone
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the cone
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, topVertexCount);
//Draw base
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, topVertexCount, topVertexCount);
//Draw cone lines
GLES20.glUniform4fv(mColorHandle, 1, linecolor, 0);
GLES20.glDrawArrays(GLES20.GL_LINES, topVertexCount*2, lineVertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
Thanks for the help
Your fragment shader code indeed assigns the same color to every fragment is processes. There are a number of different ways you can add "lighting" to your scene. 'Gouraud shading' is one of the easiest to implement, with modern shaders. It interpolates the normal at each vertex of a triangle across the triangle, and computes a light intensity based on the light direction. In modern shading languages, (including OpenGL ES 2), this interpolation is done for you.
There are many other possible lighting models, however most (if not all, including Gouraud shading) will require that you generate vertex normals, which are you not doing in your cone mesh generation code.

Using video stream as open GL ES 2.0 texture

I'm trying to capture video and display it to the screen by setting an Open GL ES texture to an android surfaceTexture. I can't use a TextureView and implement SurfaceTextureListener as per this tutorial since I am using Google Cardboard.
I have followed the Android documentation on how to initialise Open GL ES 2.0 and use it, and also this tutorial on texturing.
Putting the 2 together I get a blank screen and occasionally get <core_glBindTexture:572>: GL_INVALID_OPERATION in the console window.
Overwhelmed by so many new concepts that I don't know, I'm not able to debug or just understand if the two approach can be used like this. Here is my drawing code, it is initialised in the onSurfaceCreated() of the MainActivity class, and drawn from onEyeDraw() which is Cardboard's draw function.
package com.example.rich.test3;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.view.TextureView;
import java.nio.ShortBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by rich on 03/05/2015.
*/
public class Square {
private java.nio.FloatBuffer vertexBuffer;
private java.nio.ShortBuffer drawListBuffer;
private final java.nio.FloatBuffer mCubeTextureCoordinates;
float color[] = { 1.f, 1.f, 1.f, 1.0f };
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_Position = vPosition;" +
" v_TexCoordinate = a_TexCoordinate;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
-0.5f, 0.5f, 0.0f, // top left
0.5f, 0.5f, 0.0f}; // top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private final int vertexCount = squareCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private int mTextureDataHandle;
float textureCoordinates[] =
{0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f };
Camera _camera;
TextureView _textureView;
int[] textures;
android.graphics.SurfaceTexture _surface;
public Square()
{
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
mCubeTextureCoordinates = ByteBuffer.allocateDirect(textureCoordinates.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(textureCoordinates).position(0);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_surface = new android.graphics.SurfaceTexture(textures[0]);
_camera = Camera.open();
Camera.Size previewSize = _camera.getParameters().getPreviewSize();
try
{
_camera.setPreviewTexture(_surface);
}
catch (java.io.IOException ex)
{
// Console.writeLine (ex.Message);
}
final int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertexShaderHandle, vertexShaderCode);
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
final int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderCode);
GLES20.glCompileShader(fragmentShaderHandle);
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
GLES20.glAttachShader(mProgram, vertexShaderHandle);
GLES20.glAttachShader(mProgram, fragmentShaderHandle);
GLES20.glBindAttribLocation(mProgram, 0, "a_Position");
GLES20.glBindAttribLocation(mProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(mProgram);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
//do check here
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
mTextureDataHandle = textures[0];
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
public void draw()
{
_surface.updateTexImage();
GLES20.glUseProgram(mProgram);
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glUniform1i(mTextureUniformHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
When rendering a SurfaceTexture texture object, you need to use the GL_TEXTURE_EXTERNAL_OES texture target:
The texture object uses the GL_TEXTURE_EXTERNAL_OES texture target, which is defined by the GL_OES_EGL_image_external OpenGL ES extension. This limits how the texture may be used. Each time the texture is bound it must be bound to the GL_TEXTURE_EXTERNAL_OES target rather than the GL_TEXTURE_2D target. Additionally, any OpenGL ES 2.0 shader that samples from the texture must declare its use of this extension using, for example, an "#extension GL_OES_EGL_image_external : require" directive. Such shaders must also access the texture using the samplerExternalOES GLSL sampler type.
So you need to change your fragment shader like this, adding the #extension declaration and declaring your texture uniform as samplerExternalOES:
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform samplerExternalOES u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
Also in your draw() function, bind the texture like this:
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureDataHandle);
You can't use normal texture to render camera or video preview, you have to use GL_TEXTURE_EXTERNAL_OES extension. I had same problem and i found a complete working solution on github. The name of the project is android_instacam .
Here you will find source code to study. If you want to see it in action directly on your device just go on play store here.

Triangle not visible OpenGLES2.0 android

Hello Guys I am Beginner to OpenGL,
I am Trying to follow android developers tutorials . But I am not able to see the Triangle.
What is wrong;
I tried to create a triangle on surface created and called draw method inseide onDraw of Renderer class .
Triangle class:
public class Triangle {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4;
private int mProgram,mPositionHandle,mColorHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private FloatBuffer vertexBuffer;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void draw() {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
You are missing code to setup a projection matrix and viewport. You also need to call glSwapBuffers(), unless you are using GLSurfaceView, which does that for you. You can use an ortho projection for simplicity, and it should be multiplied by each vPosition in your vertex shader.
This is how you can use and construct a projection matrix:
Ortho(-1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f);
glUniformMatrix4fv(iProjectionMatrixLocation, 1, GL_FALSE, (const GLfloat *)&m_mViewProj);
glViewport(0, 0, m_iWidth, m_iHeight);
...
// Construct a matrix for an orthographic projection view.
void Button::Ortho(float left, float top, float right, float bottom, float nearPlane, float farPlane)
{
float rcplmr = 1.0f / (left - right);
float rcpbmt = 1.0f / (bottom - top);
float rcpnmf = 1.0f / (nearPlane - farPlane);
m_mViewProj.f0 = -2.0f * rcplmr;
m_mViewProj.f1 = 0.0f;
m_mViewProj.f2 = 0.0f;
m_mViewProj.f3 = 0.0f;
m_mViewProj.f4 = 0.0f;
m_mViewProj.f5 = -2.0f * rcpbmt;
m_mViewProj.f6 = 0.0f;
m_mViewProj.f7 = 0.0f;
m_mViewProj.f8 = 0.0f;
m_mViewProj.f9 = 0.0f;
m_mViewProj.f10 = -2.0f * rcpnmf;
m_mViewProj.f11 = 0.0f;
m_mViewProj.f12 = (right + left) * rcplmr;
m_mViewProj.f13 = (top + bottom) * rcpbmt;
m_mViewProj.f14 = (nearPlane + farPlane) * rcpnmf;
m_mViewProj.f15 = 1.0f;
}
The third article here will help:
http://montgomery1.com/opengl/

GLES20.glGetAttribLocation() returns -1

I am having trouble getting my Android OpenGL ES 2.0 application working. I have a "Triangle" class very similar to several tutorials out there. In the "draw" function, mPositionHande is -1 after the following line of code:
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
Below is the entire triangle class which has shader code etc. Any ideas what might cause this? Sorry, I am new to OpenGL programming, so maybe this is a dumb question.
public class Triangle {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
public float triangleCoords[];
private final int vertexCount = 3; //triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 1.0f, 0.5f, 1.0f };
float edgeColor[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Triangle(float[] vertices) {
this.triangleCoords = vertices;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the facet vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the facet
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyRenderer.checkGlError("glUniformMatrix4fv");
// Draw the facet
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
//If edge mode
//GLES20.glUniform4fv(mColorHandle, 1, edgeColor, 0);
//GLES20.glLineWidth(2.0f);
//GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}

Categories

Resources