I'm having problem mapping the OpenGL coordinates to my android screen properly. For some reason the x-coordinates are not contained within the screen. This is visible when I try to draw a circle in the middle of the screen with the radius of half the x-screen. The circle should be just big enough to fit within the x-borders of the screen, but instead it goes beyond the screen border and is in fact touching the y-borders of the screen instead. also if I tell the circle to be drawn at 0,0, (upper left corner) the center will actually be drawn outside the x-border after converting it to OpenGL coordinates.
Hope that made sense.
Here's my DrawScreen class:
public class DrawScreen implements GLSurfaceView.Renderer {
Ball ball;
public float mAngle;
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
ball = new Ball();
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
ball.initShapes(240, 360, 240);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the circle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, ball.ballVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the circle
GLES20.glDrawArrays(GLES20.GL_LINE_LOOP, 0, (int) ball.getNumSeg());
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = ratio;
final float right = -ratio;
final float bottom = 1.0f;
final float top = -1.0f;
final float near = 3.0f;
final float far = 7.0f;
//Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
Matrix.orthoM(mProjMatrix, 0, left, right, bottom, top, near, far);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
And here's where I convert the screen coordinates to OpenGL coordinates:
float numSegments = 360;
public void initShapes(float tx, float ty, float tr){
cx = (tx / (480 / 2 )) - 1.f;
cy = (ty / (720 / 2 )) - 1.f;
r = (tr / (480 / 2 )) ;
System.out.println(r);
System.out.println(cx);
System.out.println(cy);
float ballCoords[] = new float[(int) (numSegments * 3)];
double theta = (2 * 3.1415926 / numSegments);
float c = (float) Math.cos(theta);//precalculate the sine and cosine
float s = (float) Math.sin(theta);
float t;
float x = r;//we start at angle = 0
float y = 0;
for(int i = 0; i < (numSegments * 3); i = i + 3 ) {
ballCoords[i] = (x + cx);
ballCoords[i + 1] = (y + cy);
ballCoords[i + 2] = (0);
//apply the rotation matrix
t = x;
x = c * x - s * y;
y = s * t + c * y;
}
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
ballCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
ballVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
ballVB.put(ballCoords); // add the coordinates to the FloatBuffer
ballVB.position(0); // set the buffer to read the first coordinate
}
Ok, so I solved it... though it doesn't look pretty.
I basically turned all the numbers in the onSurfaceChanged on their heads and it looks the way I want now.
This is my new code:
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) height / width;
final float left = 1;
final float right = -1;
final float bottom = ratio;
final float top = -ratio;
final float near = 3.0f;
final float far = 7.0f;
//Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
Matrix.orthoM(mProjMatrix, 0, left, right, bottom, top, near, far);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
Related
I am trying to render an opaque torus in Android using OpenGL ES 2.0. When I added colour, following this guide, I noticed an artefact when viewing the torus from certain perspectives. I have linked an image that shows this, though the animation here may make it clearer.
After some initial reading, I considered that it could be a depth buffer issue, since it appears that the interior rear surface could be being rendered over the exterior, front facing surface that should be seen. However, changing the view frustrum near/far limits to try and maximise the separation between surfaces hasn't helped.
I am certain that the vertices themselves are correct, from rendering using GLES20.GL_LINES instead of GLES20.GL_TRIANGLES. Any ideas what could be causing this artefact?
Below is the code for the surface:
public class Miller {
private FloatBuffer verticesBuffer;
private ShortBuffer indicesBuffer;
final int nTheta = 50; // Number of divisions per 2pi theta.
final int nPhi = 50; // And per 2pi phi.
private int mProgramHandle;
private final int POSITION_DATA_SIZE_IN_ELEMENTS = 3; // Number of elements per coordinate per vertex (x,y,z)
private final int COLOR_DATA_SIZE_IN_ELEMENTS = 4; // Number of elements per colour per vertex (r,g,b,a)
private final int BYTES_PER_FLOAT = 4; // Number of bytes used per float.
private final int BYTES_PER_SHORT = 2; // Number of bytes used per short.
private final int POSITION_DATA_SIZE = POSITION_DATA_SIZE_IN_ELEMENTS * nTheta * nPhi;
private final int COLOR_DATA_SIZE = COLOR_DATA_SIZE_IN_ELEMENTS * nTheta * nPhi;
final int STRIDE = (POSITION_DATA_SIZE_IN_ELEMENTS + COLOR_DATA_SIZE_IN_ELEMENTS)* BYTES_PER_FLOAT;
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"varying vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 aColor;" +
"attribute vec4 aPosition;" +
"varying vec4 vColor;" +
"void main() {" +
" vColor = aColor;" +
" gl_Position = uMVPMatrix * aPosition;" +
"}";
private float a; // Minor radius
private float R0; // Major radius
int nVertices = nTheta*nPhi; // Number of vertices
Miller(float minrad, float majrad) {
this.R0 = majrad/3.0f; // Rescale.
this.a = minrad/3.0f;
ByteBuffer buffer1 = ByteBuffer.allocateDirect(nVertices * (POSITION_DATA_SIZE_IN_ELEMENTS + COLOR_DATA_SIZE_IN_ELEMENTS) * BYTES_PER_FLOAT );
buffer1.order(ByteOrder.nativeOrder());
verticesBuffer = buffer1.asFloatBuffer();
for (int iTheta = 0; iTheta < nTheta; iTheta++) {
float theta = (float) (iTheta * 2 * Math.PI / nTheta);
for (int iPhi = 0; iPhi < nPhi; iPhi++) {
float phi = (float) (iPhi * 2 * Math.PI / nPhi);
// Circular torus vertices
float x = (float) ((R0 + a * Math.cos(theta)) * Math.cos(phi));
float y = (float) (a * Math.sin(theta));
float z = (float) ((R0 + a * Math.cos(theta)) * Math.sin(phi));
verticesBuffer.put(x);
verticesBuffer.put(y);
verticesBuffer.put(z);
float mod = (float)Math.sqrt(x*x + y*y + z*z); // Distance from origin to point
float cx = (float)Math.pow(Math.sin(phi),2);
float cy = (float)Math.pow(Math.sin(phi),2);
float cz = (float)Math.pow(Math.cos(phi),2); // colours
// Add colours according to position
verticesBuffer.put(cx);
verticesBuffer.put(cy);
verticesBuffer.put(cz);
verticesBuffer.put(1.0f); // Opaque
}
}
verticesBuffer.position(0);
// Create buffer for indices 2 bytes per short per coord per vertex
ByteBuffer buffer2 = ByteBuffer.allocateDirect(nPhi *nTheta * POSITION_DATA_SIZE_IN_ELEMENTS * BYTES_PER_SHORT * 2);
buffer2.order(ByteOrder.nativeOrder());
indicesBuffer = buffer2.asShortBuffer();
for (int iTheta = 0; iTheta < nTheta; iTheta++) {
for (int iPhi = 0; iPhi < nPhi; iPhi++) {
int f = iTheta* nPhi + iPhi; // First vertex
int s,fp1,sp1; // Initialise second, first plus 1, second plus 1.
if (iTheta != nTheta-1) { // Triangles that link back to theta=0
s = f + nPhi;
} else {
s = iPhi;
}
if (iPhi != nPhi-1) { // Triangles that link back to phi = 0
fp1 = f+1;
sp1 = s+1;
} else {
fp1 = f-iPhi;
sp1 = s-iPhi;
}
indicesBuffer.put((short)f); // First triangle
indicesBuffer.put((short)fp1);
indicesBuffer.put((short)s);
indicesBuffer.put((short)s); // Second triangle
indicesBuffer.put((short)fp1);
indicesBuffer.put((short)sp1);
}
}
indicesBuffer.position(0);
int vertexShaderHandle = TokGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER, // Load vertex shader - acquire handle.
vertexShaderCode);
int fragmentShaderHandle = TokGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER, // And fragment shader handle.
fragmentShaderCode);
// create empty OpenGL ES Program
mProgramHandle = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgramHandle, vertexShaderHandle);
// add the fragment shader to program
GLES20.glAttachShader(mProgramHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(mProgramHandle, 0, "aPosition");
GLES20.glBindAttribLocation(mProgramHandle, 1, "aColor");
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgramHandle);
}
private int mPositionHandle;
private int mNormalHandle;
private int mColorHandle;
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgramHandle);
// get handle to vertex shader's aPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetAttribLocation(mProgramHandle, "aColor"); // USED TO BE vColor
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix");
// Set color for drawing the triangle
//GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Prepare the coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
// Pass in the position information
verticesBuffer.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, POSITION_DATA_SIZE_IN_ELEMENTS, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle); // Enable handle to position of vertices
// Pass in the colour information
verticesBuffer.position(POSITION_DATA_SIZE_IN_ELEMENTS);
GLES20.glVertexAttribPointer(mColorHandle, COLOR_DATA_SIZE_IN_ELEMENTS, GLES20.GL_FLOAT, false, STRIDE, verticesBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle); // Enable handle to colour of vertices
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw vertices linked by triangles.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6*nTheta*nPhi, GLES20.GL_UNSIGNED_SHORT, indicesBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mColorHandle);
}
}
and for the renderer:
public class TokGLRenderer implements GLSurfaceView.Renderer {
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private Miller surf;
public void onSurfaceCreated(GL10 unused) {
surf = new Miller(0.96f, 3.1439243f);
}
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES20.glViewport(0,0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
float zoom = 0.9f;
Matrix.frustumM(mProjectionMatrix, 0, -ratio/zoom, ratio/zoom, -1f/zoom, 1f/zoom, 7f, 11f);
}
private float[] mRotationMatrix = new float[16];
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 5f, 5f, 5f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
surf.draw(mMVPMatrix);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
To make the depth test work you have to enable the depth test ( GLES20.glEnable(GLES20.GL_DEPTH_TEST)) and you have to specify the size of the depth buffer.
In GLSurfaceView this can be done by the 4th parameter of setEGLConfigChooser:
e.g. depth buffer size of 16 bits:
setEGLConfigChooser(8, 8, 8, 8, 16, 0)
I'm trying to draw text and a square with OpenGL ES 2.0.
Each one could be in any position.
But so far I can only see one of them depending on the projection mode I choose :
I only see the square if I use "Matrix.setLookAtM" to make the camera (0, -3, 0) look at (0, 0, 0)
I only see the text if I use "Matrix.orthoM" to have an orthogonal projection
cf code from "Texample2Renderer.java" at the bottom.
I would like to see both, how is it possible ? I was thinking about modifying the Square class to make it work with the orthogonal projection Mode, but I have no idea how to do this.
For the text, I'm using this code (there's a lot of code, so I prefer to post the repo):
https://github.com/d3alek/Texample2
And for the square, I'm using this code :
public class Square {
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f}; // top right
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// The matrix must be included as a modifier of gl_Position.
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private final short drawOrder[] = {0, 1, 2, 0, 2, 3}; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
float color[] = {0.2f, 0.709803922f, 0.898039216f, 1.0f};
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
/**
* Sets up the drawing object data for use in an OpenGL ES context.
*/
public Square() {
// BUFFER FOR SQUARE COORDS
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// BUFFER FOR DRAW ORDER
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = Utilities.loadShader(
GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = Utilities.loadShader(
GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix - The Model View Project matrix in which to draw
* this shape.
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
//Utilities.checkEglErrorEGL14Android("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
//Utilities.checkEglErrorEGL14Android("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(
GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
Texample2Renderer.java:
public class Texample2Renderer implements GLSurfaceView.Renderer {
private static final String TAG = "TexampleRenderer";
private Square square;
private GLText glText; // A GLText Instance
private Context context; // Context (from Activity)
private int width = 100; // Updated to the Current Width + Height in onSurfaceChanged()
private int height = 100;
private float[] mProjMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mVPMatrix = new float[16];
private boolean usesOrtho = false;
public Texample2Renderer(Context context) {
super();
this.context = context; // Save Specified Context
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor( 0.4f, 0.3f, 0.6f, 1.0f );
// Create the GLText
glText = new GLText(context.getAssets());
square = new Square();
// Load the font from file (set size + padding), creates the texture
// NOTE: after a successful call to this the font is ready for rendering!
glText.load( "Roboto-Regular.ttf", 20*3, 2, 2 ); // Create Font (Height: 14 Pixels / X+Y Padding 2 Pixels)
// enable texture + alpha blending
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
}
public void onSurfaceChanged(GL10 unused, int width, int height) { // gl.glViewport( 0, 0, width, height );
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// Take into account device orientation
if (width > height) {
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 1, 10);
}
else {
Matrix.frustumM(mProjMatrix, 0, -1, 1, -1/ratio, 1/ratio, 1, 10);
}
// Save width and height
this.width = width; // Save Current Width
this.height = height; // Save Current Height
if(usesOrtho) {
int useForOrtho = Math.min(width, height);
//TODO: Is this wrong?
Matrix.orthoM(mVMatrix, 0,
-useForOrtho / 2,
useForOrtho / 2,
-useForOrtho / 2,
useForOrtho / 2, 0.1f, 100f);
}
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
int clearMask = GLES20.GL_COLOR_BUFFER_BIT;
GLES20.glClear(clearMask);
if(!usesOrtho)
Matrix.setLookAtM(mVMatrix,
0, // offset
0, 0, -3f, // eye (camera's position)
0f, 0f, 0f, // center (where to look at)
0f, 1.0f, 0.0f); // up
Matrix.multiplyMM(mVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
if(square != null)
square.draw(mVPMatrix);
// TEST: render the entire font texture
glText.drawTexture( width/2, height/2, mVPMatrix); // Draw the Entire Texture
// TEST: render some strings with the font
glText.begin( 1.0f, 1.0f, 1.0f, 1.0f, mVPMatrix ); // Begin Text Rendering (Set Color WHITE)
glText.drawC("Test String 3D!", 0f, 0f, 0f, 0, -30, 0);
// glText.drawC( "Test String :)", 0, 0, 0 ); // Draw Test String
glText.draw( "Diagonal 1", 40, 40, 40); // Draw Test String
glText.draw( "Column 1", 100, 100, 90); // Draw Test String
glText.end(); // End Text Rendering
glText.begin( 0.0f, 0.0f, 1.0f, 1.0f, mVPMatrix ); // Begin Text Rendering (Set Color BLUE)
glText.draw( "More Lines...", 50, 200 ); // Draw Test String
glText.draw( "The End.", 50, 200 + glText.getCharHeight(), 180); // Draw Test String
glText.end(); // End Text Rendering
}
}
I've just modified the code in Texample2Renderer.java to draw a square. I also added a boolean to switch between projection modes.
Any help would be much appreciated, thanks a lot in advance !
Try to reverse the order of vertices in your square cords:
static float squareCoords[] = {
0.5f, 0.5f, 0.0f}; // top right
0.5f, -0.5f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, // top left
Its propably facing other direction then your camera is
Actually I just needed to scale up the square, and it worked with orthogonal projection. Since the square size was "1", it was almost not visible.
Hello Guys I am Beginner to OpenGL,
I am Trying to follow android developers tutorials . But I am not able to see the Triangle.
What is wrong;
I tried to create a triangle on surface created and called draw method inseide onDraw of Renderer class .
Triangle class:
public class Triangle {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4;
private int mProgram,mPositionHandle,mColorHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private FloatBuffer vertexBuffer;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void draw() {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
You are missing code to setup a projection matrix and viewport. You also need to call glSwapBuffers(), unless you are using GLSurfaceView, which does that for you. You can use an ortho projection for simplicity, and it should be multiplied by each vPosition in your vertex shader.
This is how you can use and construct a projection matrix:
Ortho(-1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f);
glUniformMatrix4fv(iProjectionMatrixLocation, 1, GL_FALSE, (const GLfloat *)&m_mViewProj);
glViewport(0, 0, m_iWidth, m_iHeight);
...
// Construct a matrix for an orthographic projection view.
void Button::Ortho(float left, float top, float right, float bottom, float nearPlane, float farPlane)
{
float rcplmr = 1.0f / (left - right);
float rcpbmt = 1.0f / (bottom - top);
float rcpnmf = 1.0f / (nearPlane - farPlane);
m_mViewProj.f0 = -2.0f * rcplmr;
m_mViewProj.f1 = 0.0f;
m_mViewProj.f2 = 0.0f;
m_mViewProj.f3 = 0.0f;
m_mViewProj.f4 = 0.0f;
m_mViewProj.f5 = -2.0f * rcpbmt;
m_mViewProj.f6 = 0.0f;
m_mViewProj.f7 = 0.0f;
m_mViewProj.f8 = 0.0f;
m_mViewProj.f9 = 0.0f;
m_mViewProj.f10 = -2.0f * rcpnmf;
m_mViewProj.f11 = 0.0f;
m_mViewProj.f12 = (right + left) * rcplmr;
m_mViewProj.f13 = (top + bottom) * rcpbmt;
m_mViewProj.f14 = (nearPlane + farPlane) * rcpnmf;
m_mViewProj.f15 = 1.0f;
}
The third article here will help:
http://montgomery1.com/opengl/
I've look everywhere to understand how to translate shapes with openGl es 2.0 but I can't find the right way. Rotation and scaling work fine.
I tried it with the android openGl es 2.0 tutorial but the shape is more distorded than translated.
Here's the code (almost the same than the android code sample http://developer.android.com/shareables/training/OpenGLES.zip, except of the line to translate:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private Triangle mTriangle;
private Square mSquare;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
// Declare as volatile because we are updating it from another thread
public volatile float mAngle;
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mTriangle = new Triangle();
mSquare = new Square();
}
#Override
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Draw square
mSquare.draw(mMVPMatrix);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
// Draw triangle
mTriangle.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
/**
* Utility method for debugging OpenGL calls. Provide the name of the call
* just after making it:
*
* <pre>
* mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
* MyGLRenderer.checkGlError("glGetUniformLocation");</pre>
*
* If the operation is not successful, the check throws an error.
*
* #param glOperation - Name of the OpenGL call to check.
*/
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
And here's the square class with the translation transformation:
class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = vPosition * uMVPMatrix;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
public Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
//TRANSLATION
float[] transMatrix = new float[16];
Matrix.setIdentityM(transMatrix,0);
Matrix.translateM(transMatrix,0,5.0f,0,0);
Matrix.multiplyMM(transMatrix,0,mvpMatrix,0,transMatrix,0);
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, transMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I think it's related with the vertex shader code but I can't figure it out?
I am doing the translation of the matrix within the onDrawFrame method of the Renderer, so my code looks like this:
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrixS, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//translate the Matrix
Matrix.translateM(mViewMatrixS, 0, 2f, 1f, 0);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrixS, 0, mProjectionMatrix, 0, mViewMatrixS, 0);
// Draw square
mSquare.draw(mMVPMatrix);
And this does the job of translating my shape correctly. I don't know if this was the problem.
I find the answer on this post: Is Google's Android OpenGL tutorial teaching incorrect linear algebra?
Just invert uMVPMatrix and vPosition in the vertexShaderCode string to:
" gl_Position = uMVPMatrix * vPosition;"
The following link contains the answer. Took over a day to locate it. Posting here to help others as I seen this post many times. OpenGL ES Android Matrix Transformations
Android exmaple project was wrong and has just been updated finally. FYI.
You might be seeing the distortion because of the perspective camera. Try reducing the distance you translate the objects by.
I've just started learning OpenGL for Android and I'm having a weird problem when drawing lines. All i want to do is to draw a line based on a finger motion. Now as soon as I start swiping I always get a line folowing my motion from the origin(0,0).
here a picture:
http://imageshack.us/photo/my-images/137/screenshot2012061312174.jpg/
The arrow symbols my finger motion and the line starting in the origin (red circle) is the mentioned line folowing my entire motion.
Don't get bothered with the Coords array I know this isn't best practice but I debuged the entire programm and couldn't finde any bugs involving this array.
I probably should mention that the ArrayList points contains all my generated points.
I'm trying to figure this out for quit a while now but I'm really stuck any suggestion could be helpfull
This is my entire render class.
public class HelloOpenGLES20Renderer implements GLSurfaceView.Renderer {
private FloatBuffer triangleVB;
private int mProgram;
private int maPositionHandle;
public ArrayList<PointWrapper> points;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private int[] viewport = new int[4];
private ArrayList<Float> coordinates;
float[] Coords = new float[100000];
boolean first;
private int counter;
private PointWrapper last;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" + "void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode = "precision mediump float; \n"
+ "void main(){ \n"
+ " gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n"
+ "} \n";
private int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public HelloOpenGLES20Renderer() {
points = new ArrayList<PointWrapper>();
first = true;
this.counter = 0;
last = new PointWrapper();
coordinates = new ArrayList<Float>();
}
private float[] convertCoordinates(PointWrapper f) {
float[] vector = new float[4];
GLU.gluUnProject(f.point.x, f.point.y, 0.0f, mVMatrix, 0, mProjMatrix,
0, viewport, 0, vector, 0);
return vector;
}
private void initShapes() {
ArrayList<PointWrapper> points2 = new ArrayList<PointWrapper>(points);
float[] vector;
if (!points2.isEmpty()) {
if(points2.size()%2==1){
points2.remove(points2.size()-1);
}
for (int i = counter/2; i < points2.size(); i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
}
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
Coords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native
// byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from
// the ByteBuffer
triangleVB.put(Coords); // add the coordinates to the
// FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
// initialize the triangle vertex array
// initShapes();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader
// to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment
// shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
initShapes();
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 2, GLES20.GL_FLOAT,
false, 0, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glLineWidth(5f);
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, counter);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
viewport[0] = 0;
viewport[1] = 0;
viewport[2] = width;
viewport[3] = height;
// this projection matrix is applied to object coodinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
}
my thanks in advance
for (int i = counter/2; i < points2.size(); i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
You have intialized Coords to hold 100000 floats and it initializes them to 0. In this loop the last iteration has 'counter' with the number of floats you have set in your array.
What you pass to glDrawArrays should be the number of VERTICES to draw. so in this case half of 'counter'.
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, counter);
Your for-loop is adding 'counter'/2 extra amount of (0,0) vertices at the end of your array. the quickest fix would be to pass 'counter'/ 2 to glDrawArrays but I'd suggest a clearer approach.
numOfVertices = points2.size(); //make field
int counter = 0; //make local
for (int i = 0; i < numOfVertices; i++) {
vector = convertCoordinates(points2.get(i));
Coords[counter] = vector[0] / vector[3];
Coords[counter+1] = -1 * (vector[1] / vector[3]);
counter= counter+2;
}
and then
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, numOfVertices);