Rendering artefacts on torus - android

I am trying to render an opaque torus in Android using OpenGL ES 2.0. When I added colour, following this guide, I noticed an artefact when viewing the torus from certain perspectives. I have linked an image that shows this, though the animation here may make it clearer.
After some initial reading, I considered that it could be a depth buffer issue, since it appears that the interior rear surface could be being rendered over the exterior, front facing surface that should be seen. However, changing the view frustrum near/far limits to try and maximise the separation between surfaces hasn't helped.
I am certain that the vertices themselves are correct, from rendering using GLES20.GL_LINES instead of GLES20.GL_TRIANGLES. Any ideas what could be causing this artefact?
Below is the code for the surface:
public class Miller {
private FloatBuffer verticesBuffer;
private ShortBuffer indicesBuffer;
final int nTheta = 50; // Number of divisions per 2pi theta.
final int nPhi = 50; // And per 2pi phi.
private int mProgramHandle;
private final int POSITION_DATA_SIZE_IN_ELEMENTS = 3; // Number of elements per coordinate per vertex (x,y,z)
private final int COLOR_DATA_SIZE_IN_ELEMENTS = 4; // Number of elements per colour per vertex (r,g,b,a)
private final int BYTES_PER_FLOAT = 4; // Number of bytes used per float.
private final int BYTES_PER_SHORT = 2; // Number of bytes used per short.
private final int POSITION_DATA_SIZE = POSITION_DATA_SIZE_IN_ELEMENTS * nTheta * nPhi;
private final int COLOR_DATA_SIZE = COLOR_DATA_SIZE_IN_ELEMENTS * nTheta * nPhi;
final int STRIDE = (POSITION_DATA_SIZE_IN_ELEMENTS + COLOR_DATA_SIZE_IN_ELEMENTS)* BYTES_PER_FLOAT;
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"varying vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 aColor;" +
"attribute vec4 aPosition;" +
"varying vec4 vColor;" +
"void main() {" +
" vColor = aColor;" +
" gl_Position = uMVPMatrix * aPosition;" +
"}";
private float a; // Minor radius
private float R0; // Major radius
int nVertices = nTheta*nPhi; // Number of vertices
Miller(float minrad, float majrad) {
this.R0 = majrad/3.0f; // Rescale.
this.a = minrad/3.0f;
ByteBuffer buffer1 = ByteBuffer.allocateDirect(nVertices * (POSITION_DATA_SIZE_IN_ELEMENTS + COLOR_DATA_SIZE_IN_ELEMENTS) * BYTES_PER_FLOAT );
buffer1.order(ByteOrder.nativeOrder());
verticesBuffer = buffer1.asFloatBuffer();
for (int iTheta = 0; iTheta < nTheta; iTheta++) {
float theta = (float) (iTheta * 2 * Math.PI / nTheta);
for (int iPhi = 0; iPhi < nPhi; iPhi++) {
float phi = (float) (iPhi * 2 * Math.PI / nPhi);
// Circular torus vertices
float x = (float) ((R0 + a * Math.cos(theta)) * Math.cos(phi));
float y = (float) (a * Math.sin(theta));
float z = (float) ((R0 + a * Math.cos(theta)) * Math.sin(phi));
verticesBuffer.put(x);
verticesBuffer.put(y);
verticesBuffer.put(z);
float mod = (float)Math.sqrt(x*x + y*y + z*z); // Distance from origin to point
float cx = (float)Math.pow(Math.sin(phi),2);
float cy = (float)Math.pow(Math.sin(phi),2);
float cz = (float)Math.pow(Math.cos(phi),2); // colours
// Add colours according to position
verticesBuffer.put(cx);
verticesBuffer.put(cy);
verticesBuffer.put(cz);
verticesBuffer.put(1.0f); // Opaque
}
}
verticesBuffer.position(0);
// Create buffer for indices 2 bytes per short per coord per vertex
ByteBuffer buffer2 = ByteBuffer.allocateDirect(nPhi *nTheta * POSITION_DATA_SIZE_IN_ELEMENTS * BYTES_PER_SHORT * 2);
buffer2.order(ByteOrder.nativeOrder());
indicesBuffer = buffer2.asShortBuffer();
for (int iTheta = 0; iTheta < nTheta; iTheta++) {
for (int iPhi = 0; iPhi < nPhi; iPhi++) {
int f = iTheta* nPhi + iPhi; // First vertex
int s,fp1,sp1; // Initialise second, first plus 1, second plus 1.
if (iTheta != nTheta-1) { // Triangles that link back to theta=0
s = f + nPhi;
} else {
s = iPhi;
}
if (iPhi != nPhi-1) { // Triangles that link back to phi = 0
fp1 = f+1;
sp1 = s+1;
} else {
fp1 = f-iPhi;
sp1 = s-iPhi;
}
indicesBuffer.put((short)f); // First triangle
indicesBuffer.put((short)fp1);
indicesBuffer.put((short)s);
indicesBuffer.put((short)s); // Second triangle
indicesBuffer.put((short)fp1);
indicesBuffer.put((short)sp1);
}
}
indicesBuffer.position(0);
int vertexShaderHandle = TokGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER, // Load vertex shader - acquire handle.
vertexShaderCode);
int fragmentShaderHandle = TokGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER, // And fragment shader handle.
fragmentShaderCode);
// create empty OpenGL ES Program
mProgramHandle = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgramHandle, vertexShaderHandle);
// add the fragment shader to program
GLES20.glAttachShader(mProgramHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(mProgramHandle, 0, "aPosition");
GLES20.glBindAttribLocation(mProgramHandle, 1, "aColor");
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgramHandle);
}
private int mPositionHandle;
private int mNormalHandle;
private int mColorHandle;
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgramHandle);
// get handle to vertex shader's aPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetAttribLocation(mProgramHandle, "aColor"); // USED TO BE vColor
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix");
// Set color for drawing the triangle
//GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Prepare the coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
// Pass in the position information
verticesBuffer.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, POSITION_DATA_SIZE_IN_ELEMENTS, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle); // Enable handle to position of vertices
// Pass in the colour information
verticesBuffer.position(POSITION_DATA_SIZE_IN_ELEMENTS);
GLES20.glVertexAttribPointer(mColorHandle, COLOR_DATA_SIZE_IN_ELEMENTS, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle); // Enable handle to colour of vertices
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw vertices linked by triangles.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6*nTheta*nPhi, GLES20.GL_UNSIGNED_SHORT, indicesBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mColorHandle);
}
}
and for the renderer:
public class TokGLRenderer implements GLSurfaceView.Renderer {
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private Miller surf;
public void onSurfaceCreated(GL10 unused) {
surf = new Miller(0.96f, 3.1439243f);
}
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0,0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
float zoom = 0.9f;
Matrix.frustumM(mProjectionMatrix, 0, -ratio/zoom, ratio/zoom, -1f/zoom, 1f/zoom, 7f, 11f);
}
private float[] mRotationMatrix = new float[16];
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 5f, 5f, 5f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
surf.draw(mMVPMatrix);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}

To make the depth test work you have to enable the depth test ( GLES20.glEnable(GLES20.GL_DEPTH_TEST)) and you have to specify the size of the depth buffer.
In GLSurfaceView this can be done by the 4th parameter of setEGLConfigChooser:
e.g. depth buffer size of 16 bits:
setEGLConfigChooser(8, 8, 8, 8, 16, 0)

Related

Android OpenGL animation

I'm at the beginning of OpenGL 2.0 and I've impemented the Google tutorial about drawing a square and a triangle. Obviously it works fine :)
Now I'm trying to add some movement (translating the square vertically). The problem it that the square moves but only once. Someone told me that there's no loop but I think that the loop is done by the "onDrawFrame" method, ain't it?.
Can anyone help me to understand where's my mistake(s)?
Thankyou for your time.
MainActivity:
public class MainActivity extends Activity {
private GLSurfaceView mGLView;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity
mGLView = new MyGLSurfaceView(this);
setContentView(mGLView);
}
#Override
protected void onPause() {
super.onPause();
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
mGLView.onResume();
}
}
MyGLSurfaceView class:
public class MyGLSurfaceView extends GLSurfaceView {
private final MyGLRenderer mRenderer;
public MyGLSurfaceView(Context context) {
super(context);
// Create an OpenGL ES 2.0 context.
setEGLContextClientVersion(2);
// Set the Renderer for drawing on the GLSurfaceView
mRenderer = new MyGLRenderer();
setRenderer(mRenderer);
// Render the view only when there is a change in the drawing data
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
}
The renderer class:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private Triangle mTriangle;
private Square mSquare;
float i;
int direction;
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
i = 0;
direction = 1;
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mTriangle = new Triangle();
mSquare = new Square();
}
#Override
public void onDrawFrame(GL10 unused) {
float[] triangleScratch = new float[16];
float[] squareScratch = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -7, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Separate the square and the triangle
Matrix.transposeM(squareScratch, 0, mMVPMatrix, 0);
Matrix.transposeM(triangleScratch, 0, mMVPMatrix, 0);
if(i>1) {direction = -1;}
if(i<-1) {direction = 1;}
i += 0.1 * direction;
//Introduce a translation
Matrix.translateM(squareScratch, 0, 0.0f, i, 0.0f);
// Draw square
mSquare.draw(squareScratch);
// Draw triangle
mTriangle.draw(triangleScratch);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
/**
* Utility method for compiling a OpenGL shader.
*
* <p><strong>Note:</strong> When developing shaders, use the checkGlError()
* method to debug shader coding errors.</p>
*
* #param type - Vertex or fragment shader type.
* #param shaderCode - String containing the shader code.
* #return - Returns an id for the shader.
*/
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
/**
* Utility method for debugging OpenGL calls. Provide the name of the call
* just after making it:
*
* <pre>
* mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
* MyGLRenderer.checkGlError("glGetUniformLocation");</pre>
*
* If the operation is not successful, the check throws an error.
*
* #param glOperation - Name of the OpenGL call to check.
*/
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
Square class:
public class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// The matrix must be included as a modifier of gl_Position.
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
0.0f, 1.0f, 0.0f, // top left
0.0f, 0.0f, 0.0f, // bottom left
1.0f, 0.0f, 0.0f, // bottom right
1.0f, 1.0f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
/**
* Sets up the drawing object data for use in an OpenGL ES context.
*/
public Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGLRenderer.loadShader(
GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(
GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix - The Model View Project matrix in which to draw
* this shape.
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(
GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
The entire code is here:
Link to code repository
Thanks to all the people who have the patience to take a look at it.
Thanks to all the people who took a look at this.
The solution is to comment out the following line in MyGLSurfaceView.java:
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
Thanks

Open GL ES 2.0 Android Shading

i am trying to implement a 3d aplication for android and i am having trouble when drawing 3d objects like a cone for example.
The problem is that i cant notice the transitions between the different faces, all of them are draw with the same color.
I think i need to add shading to the polygons but i cant find any tutorial showing me how do i do that.
this is the code i am using to draw a cone.
public class Cone{
float baseSize = 0f;
float height = 0f;
protected final float[] mTransformMatrix = new float[16];
private FloatBuffer vertexBuffer;
private final int mProgram;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float topCoords[] = new float[30];
static float baseCoords[] = new float[30];
static float lineCoords[] = new float[96];
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 1f, 0f, 0f, 1.0f };
float linecolor[] = { 1f, 1f, 1f, 1.0f };
public Cone(float baseSize , float height) {
this.baseSize = baseSize;
this.height = height;
float ang = (float) ((2*Math.PI) / 8);
Matrix.setIdentityM(mTransformMatrix, 0);
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
(topCoords.length * 2 + lineCoords.length) * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
topCoords[0] = 0;
topCoords[1] = height;
topCoords[2] = 0;
baseCoords[0]= 0;
baseCoords[1]= 0;
baseCoords[2]= 0;
for(int i=1; i < 10;i++) {
topCoords[i*3] = this.baseSize * (float) Math.cos(i*ang);
topCoords[i*3 + 1] = 0;
topCoords[i*3 + 2] = this.baseSize * (float) Math.sin(i*ang);
baseCoords[i*3] = this.baseSize * (float) Math.cos(i*ang);
baseCoords[i*3 + 1] = 0;
baseCoords[i*3 + 2] = this.baseSize * (float) Math.sin(i*ang);
}
for (int i = 0 ; i < 8 ; i ++) {
lineCoords[i*6] = 0;
lineCoords[i*6 + 1] = height;
lineCoords[i*6 + 2] = 0;
lineCoords[i*6 + 3] = this.baseSize *(float) Math.cos((i+1)*ang);
lineCoords[i*6 + 4] = 0;
lineCoords[i*6 + 5] = this.baseSize * (float) Math.sin((i+1)*ang);
}
int j = 0;
for (int i = 8 ; i < 16 ; i++){
lineCoords[i*6] = this.baseSize *(float) Math.cos((j+1)*ang);
lineCoords[i*6 + 1] = 0;
lineCoords[i*6 + 2] = this.baseSize * (float) Math.sin((j+1)*ang);
lineCoords[i*6 + 3] = this.baseSize *(float) Math.cos((j+2)*ang);
lineCoords[i*6 + 4] = 0;
lineCoords[i*6 + 5] = this.baseSize * (float) Math.sin((j+2)*ang);
j++;
}
vertexBuffer.put(topCoords);
vertexBuffer.put(baseCoords);
vertexBuffer.put(lineCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int topVertexCount = topCoords.length / COORDS_PER_VERTEX;
private final int lineVertexCount = lineCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the cone
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the cone
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, topVertexCount);
//Draw base
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, topVertexCount, topVertexCount);
//Draw cone lines
GLES20.glUniform4fv(mColorHandle, 1, linecolor, 0);
GLES20.glDrawArrays(GLES20.GL_LINES, topVertexCount*2, lineVertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
Thanks for the help
Your fragment shader code indeed assigns the same color to every fragment is processes. There are a number of different ways you can add "lighting" to your scene. 'Gouraud shading' is one of the easiest to implement, with modern shaders. It interpolates the normal at each vertex of a triangle across the triangle, and computes a light intensity based on the light direction. In modern shading languages, (including OpenGL ES 2), this interpolation is done for you.
There are many other possible lighting models, however most (if not all, including Gouraud shading) will require that you generate vertex normals, which are you not doing in your cone mesh generation code.

Triangle not visible OpenGLES2.0 android

Hello Guys I am Beginner to OpenGL,
I am Trying to follow android developers tutorials . But I am not able to see the Triangle.
What is wrong;
I tried to create a triangle on surface created and called draw method inseide onDraw of Renderer class .
Triangle class:
public class Triangle {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4;
private int mProgram,mPositionHandle,mColorHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private FloatBuffer vertexBuffer;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void draw() {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
You are missing code to setup a projection matrix and viewport. You also need to call glSwapBuffers(), unless you are using GLSurfaceView, which does that for you. You can use an ortho projection for simplicity, and it should be multiplied by each vPosition in your vertex shader.
This is how you can use and construct a projection matrix:
Ortho(-1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f);
glUniformMatrix4fv(iProjectionMatrixLocation, 1, GL_FALSE, (const GLfloat *)&m_mViewProj);
glViewport(0, 0, m_iWidth, m_iHeight);
...
// Construct a matrix for an orthographic projection view.
void Button::Ortho(float left, float top, float right, float bottom, float nearPlane, float farPlane)
{
float rcplmr = 1.0f / (left - right);
float rcpbmt = 1.0f / (bottom - top);
float rcpnmf = 1.0f / (nearPlane - farPlane);
m_mViewProj.f0 = -2.0f * rcplmr;
m_mViewProj.f1 = 0.0f;
m_mViewProj.f2 = 0.0f;
m_mViewProj.f3 = 0.0f;
m_mViewProj.f4 = 0.0f;
m_mViewProj.f5 = -2.0f * rcpbmt;
m_mViewProj.f6 = 0.0f;
m_mViewProj.f7 = 0.0f;
m_mViewProj.f8 = 0.0f;
m_mViewProj.f9 = 0.0f;
m_mViewProj.f10 = -2.0f * rcpnmf;
m_mViewProj.f11 = 0.0f;
m_mViewProj.f12 = (right + left) * rcplmr;
m_mViewProj.f13 = (top + bottom) * rcpbmt;
m_mViewProj.f14 = (nearPlane + farPlane) * rcpnmf;
m_mViewProj.f15 = 1.0f;
}
The third article here will help:
http://montgomery1.com/opengl/

Textures are stretched in android opengl es 2.0

I started from the google's android tutorial on open gl and then used this tutorial:
to add textures. Since the class structures differ a bit, I had to do some code-moving-renaming. What i ended up is:
Here is the texture file i used.
As one can see, the texture is somehow stretched along (1, 1), although the underlying object is a square.
Here is my quad's code, any help is appreciated.
public class GLSquare implements IGLObject {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" + "attribute vec2 a_TexCoordinate;"
+ "varying vec2 v_TexCoordinate;" + "attribute vec4 vPosition;"
+ "void main() {"
+
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;"
+ "v_TexCoordinate = a_TexCoordinate;" + "}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform sampler2D u_Texture;" + "varying vec2 v_TexCoordinate;"
+ "void main() {"
+ " gl_FragColor = texture2D(u_Texture, v_TexCoordinate);" + "}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgramHandle;
private int mPositionHandle;
private int mMVPMatrixHandle;
private int mTextureDataHandle;
/** The texture pointer */
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -10f, 10f, 0.0f, // top left
-10f, -10f, 0.0f, // bottom left
10f, -10f, 0.0f, // bottom right
10f, 10f, 0.0f }; // top right
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
static float[] mTranslate = new float[16];
static float[] translatedMVP = new float[16];
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
public GLSquare(float x, float y, float z, Context context) {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
bb = ByteBuffer.allocateDirect(texture.length * 4);
bb.order(ByteOrder.nativeOrder());
textureBuffer = bb.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
Matrix.setIdentityM(mTranslate, 0);
Matrix.translateM(mTranslate, 0, x, y, z);
// prepare shaders and OpenGL program
final int vertexShaderHandle = GLTools.compileShader(
GLES20.GL_VERTEX_SHADER, vertexShaderCode);
final int fragmentShaderHandle = GLTools.compileShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgramHandle = GLTools.createAndLinkProgram(vertexShaderHandle,
fragmentShaderHandle, new String[] { "a_Position", "a_Color",
"a_TexCoordinate" });
// Load the texture
mTextureDataHandle = GLTools.loadGLTexture(context, R.raw.stars1024);
}
public void draw(float[] vpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgramHandle);
// Pass in the position information
vertexBuffer.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT,
false, 0, vertexBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
int mTextureCoordinateHandle = GLES20.glGetAttribLocation(
mProgramHandle, "a_TexCoordinate");
// Pass in the texture coordinate information
textureBuffer.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2,
GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle,
"uMVPMatrix");
WideOpenRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
Matrix.multiplyMM(translatedMVP, 0, vpMatrix, 0, mTranslate, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, translatedMVP, 0);
// Draw the cube.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
}
}
here is the methode loading texture:
public static int loadGLTexture(Context context, final int resourceId) {
Log.d("GLTools", "Loading texture...");
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
Log.d("GLTools", "Binding texture, setting parameter" + resourceId);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
Ok, i am now able to answer my own question.
The texture was just scaled relative to its origin = left bottom point (u, v) = (0, 0), which has irritated me, since I am new to opengl, I was expecting that it would be scaled relative to at least world origin, or object's centroid.

OpenGL ES 2.0 drawing line based on motion, Line always starts in origin

I've just started learning OpenGL for Android and I'm having a weird problem when drawing lines. All i want to do is to draw a line based on a finger motion. Now as soon as I start swiping I always get a line folowing my motion from the origin(0,0).
here a picture:
http://imageshack.us/photo/my-images/137/screenshot2012061312174.jpg/
The arrow symbols my finger motion and the line starting in the origin (red circle) is the mentioned line folowing my entire motion.
Don't get bothered with the Coords array I know this isn't best practice but I debuged the entire programm and couldn't finde any bugs involving this array.
I probably should mention that the ArrayList points contains all my generated points.
I'm trying to figure this out for quit a while now but I'm really stuck any suggestion could be helpfull
This is my entire render class.
public class HelloOpenGLES20Renderer implements GLSurfaceView.Renderer {
private FloatBuffer triangleVB;
private int mProgram;
private int maPositionHandle;
public ArrayList<PointWrapper> points;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private int[] viewport = new int[4];
private ArrayList<Float> coordinates;
float[] Coords = new float[100000];
boolean first;
private int counter;
private PointWrapper last;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" + "void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode = "precision mediump float; \n"
+ "void main(){ \n"
+ " gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n"
+ "} \n";
private int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public HelloOpenGLES20Renderer() {
points = new ArrayList<PointWrapper>();
first = true;
this.counter = 0;
last = new PointWrapper();
coordinates = new ArrayList<Float>();
}
private float[] convertCoordinates(PointWrapper f) {
float[] vector = new float[4];
GLU.gluUnProject(f.point.x, f.point.y, 0.0f, mVMatrix, 0, mProjMatrix,
0, viewport, 0, vector, 0);
return vector;
}
private void initShapes() {
ArrayList<PointWrapper> points2 = new ArrayList<PointWrapper>(points);
float[] vector;
if (!points2.isEmpty()) {
if(points2.size()%2==1){
points2.remove(points2.size()-1);
}
for (int i = counter/2; i < points2.size(); i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
}
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
Coords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native
// byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from
// the ByteBuffer
triangleVB.put(Coords); // add the coordinates to the
// FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
// initialize the triangle vertex array
// initShapes();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader
// to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment
// shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
initShapes();
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 2, GLES20.GL_FLOAT,
false, 0, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glLineWidth(5f);
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, counter);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
viewport[0] = 0;
viewport[1] = 0;
viewport[2] = width;
viewport[3] = height;
// this projection matrix is applied to object coodinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
}
my thanks in advance
for (int i = counter/2; i < points2.size(); i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
You have intialized Coords to hold 100000 floats and it initializes them to 0. In this loop the last iteration has 'counter' with the number of floats you have set in your array.
What you pass to glDrawArrays should be the number of VERTICES to draw. so in this case half of 'counter'.
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, counter);
Your for-loop is adding 'counter'/2 extra amount of (0,0) vertices at the end of your array. the quickest fix would be to pass 'counter'/ 2 to glDrawArrays but I'd suggest a clearer approach.
numOfVertices = points2.size(); //make field
int counter = 0; //make local
for (int i = 0; i < numOfVertices; i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
and then
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, numOfVertices);

Categories

Resources