Related
I have this code that should move my circle( square):
float[] scratch = new float[16];
float[] move = new float[16];
Matrix.setIdentityM(move, 0);
Matrix.translateM(move, 0, 100, 100, 0);
Matrix.multiplyMM(scratch, 0, projectionMatrix, 0, move, 0);
mCircle.draw(scratch);
projectionMatrix is the camera:
Matrix.orthoM(projectionMatrix, 0, 0, width, height, 0, -1f, 1f);
But when I execute the code I get this:
Image
I followed the code from Android Developer.
precision highp float;
uniform float uRadius;
vec2 center = vec2(uRadius, uRadius);
vec2 coord = vec2(gl_FragCoord.x, 1080. - gl_FragCoord.y);
vec2 position = coord - center;
uniform vec4 uColor;
void main()
{
if (length(position) > uRadius) {
discard;
}
gl_FragColor = uColor;
}
--------------------------------
uniform mat4 uMatrix;
attribute vec4 aPosition;
void main()
{
gl_Position = uMatrix * aPosition;
}
My main loop:
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
float[] scratch = new float[16];
float[] move = new float[16];
Matrix.setIdentityM(move, 0);
Matrix.translateM(move, 0, 50, 50, 0);
Matrix.multiplyMM(scratch, 0, projectionMatrix, 0, move, 0);
mCircle.draw(scratch);
}
And circle draw functions is:
public void draw(float[] projectionMatrix) {
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
int mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
int mColorHandle = GLES20.glGetUniformLocation(mProgram, "uColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, mColor, 0);
int radiusHandle = GLES20.glGetUniformLocation(mProgram, "uRadius");
MyGLRenderer.checkGlError("glGetUniformLocation");
GLES20.glUniform1f(radiusHandle, mRadius);
// get handle to shape's transformation matrix
int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, projectionMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(
GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
After some hours I finally found the logical error in the fragment shader: the center of the circle was remaining the same, now I added 2 more uniforms for the offsets/position and it's working.
I've look everywhere to understand how to translate shapes with openGl es 2.0 but I can't find the right way. Rotation and scaling work fine.
I tried it with the android openGl es 2.0 tutorial but the shape is more distorded than translated.
Here's the code (almost the same than the android code sample http://developer.android.com/shareables/training/OpenGLES.zip, except of the line to translate:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private Triangle mTriangle;
private Square mSquare;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
// Declare as volatile because we are updating it from another thread
public volatile float mAngle;
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mTriangle = new Triangle();
mSquare = new Square();
}
#Override
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Draw square
mSquare.draw(mMVPMatrix);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
// Draw triangle
mTriangle.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
/**
* Utility method for debugging OpenGL calls. Provide the name of the call
* just after making it:
*
* <pre>
* mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
* MyGLRenderer.checkGlError("glGetUniformLocation");</pre>
*
* If the operation is not successful, the check throws an error.
*
* #param glOperation - Name of the OpenGL call to check.
*/
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}
And here's the square class with the translation transformation:
class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = vPosition * uMVPMatrix;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
public Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
//TRANSLATION
float[] transMatrix = new float[16];
Matrix.setIdentityM(transMatrix,0);
Matrix.translateM(transMatrix,0,5.0f,0,0);
Matrix.multiplyMM(transMatrix,0,mvpMatrix,0,transMatrix,0);
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, transMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I think it's related with the vertex shader code but I can't figure it out?
I am doing the translation of the matrix within the onDrawFrame method of the Renderer, so my code looks like this:
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrixS, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//translate the Matrix
Matrix.translateM(mViewMatrixS, 0, 2f, 1f, 0);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrixS, 0, mProjectionMatrix, 0, mViewMatrixS, 0);
// Draw square
mSquare.draw(mMVPMatrix);
And this does the job of translating my shape correctly. I don't know if this was the problem.
I find the answer on this post: Is Google's Android OpenGL tutorial teaching incorrect linear algebra?
Just invert uMVPMatrix and vPosition in the vertexShaderCode string to:
" gl_Position = uMVPMatrix * vPosition;"
The following link contains the answer. Took over a day to locate it. Posting here to help others as I seen this post many times. OpenGL ES Android Matrix Transformations
Android exmaple project was wrong and has just been updated finally. FYI.
You might be seeing the distortion because of the perspective camera. Try reducing the distance you translate the objects by.
I've tried following the lessons about drawing shapes and applying projections to them as found here, but the projection just doesn't seem to work. As far as I know I've copy-pasted all the pieces of code in my project, here are the important code parts:
In my Renderer class:
#Override
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Draw shape
mTriangle.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
The draw method of my Triangle class:
public void draw(float[] mvpMatrix) {
GLES20.glUseProgram(mProgram);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); // get handle to vertex shader's vPosition member
GLES20.glEnableVertexAttribArray(mPositionHandle); // Enable a handle to the triangle vertices
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); // Prepare the triangle coordinate data
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor"); // get handle to fragment shader's vColor member
GLES20.glUniform4fv(mColorHandle, 1, color, 0); // Set color for drawing the triangle
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); // get handle to shape's transformation matrix
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0); // Apply the projection and view transformation
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount); // Draw the triangle
GLES20.glDisableVertexAttribArray(mPositionHandle); // Disable vertex array
}
The triangle still is being stretched. What am I doing wrong?
Did you try to debug in onSurfaceChanged? What happens in those lines? Especially:
what are the passed values width and height?
which value does float ratio = (float) width / height; calculate?
do you see any changes to the values of mProjMatrix after the call Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);?
how does your private final String fragmentShaderCode in class Triangle look like? As the order of the multiplication in the line " gl_Position = uMVPMatrix * vPosition;" + is important.
Your provided code looks quiet good, difficult issue here.
Edit Code added, please see below
Edit 2 - Screenshots from device included at bottom along with explanation
Edit 3 - New code added
I have 2 classes, a rendered and a custom 'quad' class.
I have these declared at class level in my renderer class:
final float[] mMVPMatrix = new float[16];
final float[] mProjMatrix = new float[16];
final float[] mVMatrix = new float[16];
And in my onSurfaceChanged method I have:
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
and....
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// TODO Auto-generated method stub
myBitmap = BitmapFactory.decodeResource(curView.getResources(), R.drawable.box);
//Create new Dot objects
dot1 = new Quad();
dot1.setTexture(curView, myBitmap);
dot1.setSize(300,187); //These numbers are the size but are redundant/not used at the moment.
myBitmap.recycle();
//Set colour to black
GLES20.glClearColor(0, 0, 0, 1);
}
And finally from this class, onDrawFrame:
#Override
public void onDrawFrame(GL10 gl) {
// TODO Auto-generated method stub
//Paint the screen the colour defined in onSurfaceCreated
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix) so looking from the front
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Combine
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
dot1.rotateQuad(0,0,45, mMVPMatrix); //x,y,angle and matrix passed in
}
Then, in my quad class:
This declared at class level:
private float[] mRotationMatrix = new float[16];
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mRotationHandle;
//Create our vertex shader
String strVShader =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uRotate;" +
"attribute vec4 a_position;\n"+
"attribute vec2 a_texCoords;" +
"varying vec2 v_texCoords;" +
"void main()\n" +
"{\n" +
// "gl_Position = a_position * uRotate;\n"+
// "gl_Position = uRotate * a_position;\n"+
"gl_Position = a_position * uMVPMatrix;\n"+
// "gl_Position = uMVPMatrix * a_position;\n"+
"v_texCoords = a_texCoords;" +
"}";
//Fragment shader
String strFShader =
"precision mediump float;" +
"varying vec2 v_texCoords;" +
"uniform sampler2D u_baseMap;" +
"void main()" +
"{" +
"gl_FragColor = texture2D(u_baseMap, v_texCoords);" +
"}";
Then method for setting texture (don't think this is relevant to this problem though!!)
public void setTexture(GLSurfaceView view, Bitmap imgTexture){
this.imgTexture=imgTexture;
iProgId = Utils.LoadProgram(strVShader, strFShader);
iBaseMap = GLES20.glGetUniformLocation(iProgId, "u_baseMap");
iPosition = GLES20.glGetAttribLocation(iProgId, "a_position");
iTexCoords = GLES20.glGetAttribLocation(iProgId, "a_texCoords");
texID = Utils.LoadTexture(view, imgTexture);
}
And finally, my 'rotateQuad' method (which currently is supposed to draw and rotate the quad).
public void rotateQuad(float x, float y, int angle, float[] mvpMatrix){
Matrix.setRotateM(mRotationMatrix, 0, angle, 0, 0, 0.1f);
// Matrix.translateM(mRotationMatrix, 0, 0, 0, 0); //Removed temporarily
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mvpMatrix, 0, mRotationMatrix, 0, mvpMatrix, 0);
float[] vertices = {
-.5f,.5f,0, 0,0,
.5f,.5f,0, 1,0,
-.5f,-.5f,0, 0,1,
.5f,-.5f,0, 1,1
};
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
//Bind the correct texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID);
//Use program
GLES20.glUseProgram(iProgId);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(iProgId, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// get handle to shape's rotation matrix
mRotationHandle = GLES20.glGetUniformLocation(iProgId, "uRotate");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mRotationHandle, 1, false, mRotationMatrix, 0);
//Set starting position for vertices
vertexBuf.position(0);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iPosition, 3, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for position
GLES20.glEnableVertexAttribArray(iPosition);
//Set starting position for texture
vertexBuf.position(3);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iTexCoords, 2, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for texture
GLES20.glEnableVertexAttribArray(iTexCoords);
//Draw it
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
for Edit 2.
This is my quad as drawn in the center of the screen. No rotation.
This is the same quad rotated at +45 Degrees with the code "gl_Position = a_position * uMVPMatrix;" + in my vertex shader (it's from a different project now so the shader variable is a_position and not vPosition), it's looks correct!
However, this is the same quad rotated at +45 Degrees with the 2 shader variables switched (so they read "gl_Position = uMVPMatrix * a_position;" - as you can see, it's not quite right.
Also just a side note, you can't see it here as the quare is symetrical, but each method also rotates in the opposite direction to the other....
Any help appreciated.
It's really impossible to tell because we don't know what you are passing to these two variables.
OpenGL is column-major format, so if vPosition is in fact a vector, and uMVPMatrix is a matrix, then the first option is correct, if this is in your shader.
If this is not in your shader but in your program code, then there is not enough information.
If you are using the first option but getting unexpected results, you are likely not computing your matrix properly or not passing the correct vertices.
Normally in the vertex shader you should multiple the positions by the MVP, that is
gl_Position = uMVPMatrix *vPosition;
When you change the order this should work...
Thanks to all for the help.
I managed to track down the problem (For the most part). I will show what I did.
It was the following line:
Matrix.multiplyMM(mvpMatrix, 0, mvpMatrix, 0, mRotationMatrix, 0);
As you can see I was multiplying the matrices and storing them back into one that I was using in the multiplication.
So I created a new matrix called mvpMatrix2 and stored the results in that. Then passed that to my vertex shader.
//Multiply matrices
Matrix.multiplyMM(mvpMatrix2, 0, mvpMatrix, 0, mRotationMatrix, 0);
//get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(iProgId, "uMVPMatrix");
//Give to vertex shader variable
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix2, 0);
After applying this, there is no distortion (And also, with regards to my other question here Using Matrix. Rotate in OpenGL ES 2.0 I am able to translate the centre of the quad). I say 'for the most part' because however, when I rotate it, it rotates backwards (so if I say rotate +45 degrees, (Clockwise), it actually rotates the quad by -45 degrees (Anit-clockwise).
But hopefully, this will help anyone who has a similar problem in the future.
I am trying to make a projection matrix scaling the screen and making a coordination system. For some reason I don't think any of my matrix calling is working... the 3 function I am using are
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -1, 10);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
Are they canceling each other out? anything wrong with it? (the full rendering class code is at the end)
My main goal in doing this is eventually getting to a situation where when I make a square, I could provide coordinates such as (200, 100,0) //x, y, z which are not only between -1 and 1.
Here is my full rendering class:
public class MyRenderer implements Renderer {
private static final String TAG = "MyRenderer";
Square square;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
private int camWidth,camHeight;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 0.5f);
camWidth=480;camHeight=320;
// initialize a square
square = new Square();
}
#Override
public void onDrawFrame(GL10 nope) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//set camera position
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
square.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 nope, int width, int height) {
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
My Square class (wasn't sure if needed or not, but just to be safe :) ) -
public class Square {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f
};
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // bytes per vertex
// red-green-blue-alpha
float color[] = { 0.63f, 0.76f, 0.22f, 1.0f };
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
private FloatBuffer vertexBuffer;
private ShortBuffer drawListBuffer;
public Square() {
ByteBuffer bb = ByteBuffer.allocateDirect(
// # of coords values * 4 bytes per float
triangleCoords.length * 4);
// use native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add coordination to FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read first coordinate
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = ChizRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = ChizRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// dispable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
And lastly, just so you could have some visuals:
This is how it looks on my phone with and without the three metrix functions mentioned before, it also seems that the only thing that did make any change with the width and height was GLES20.glViewport(0, 0, camWidth, camHeight);
It seems as if the metrix is doing nothing.
Looks like you adapted the sample to a square. Couple of issues here:
Call glViewPort only in onSurfaceChanged and only with the parameters given.
The vertex shader code does not use uMVPMatrix. You'd've seen this by checking the value of mMVPMatrixHandle (it's -1 for uniforms that don't exist, see here).
After the program has been linked, the locations of the shader variables are fixed, so the code may fetch them once, and not for every draw call.
Then, you'll need to adapt the coordinates of the square...