Related
I'm trying to render to a texture using OpenGL ES on an Android phone (using a native plugin/C++). I can set the color of the texture using glClear, and I can clear different sections to different colors using glScissor so I'm fairly sure the issue isn't the framebuffer setup/attachment. Probably there's an issue with the shaders or the vertex data, but I can't see what it is.
Here's the code I've reduced the problem down to:
#include <GLES3/gl31.h>
#include <GLES2/gl2ext.h>
#include <GLES3/gl3ext.h>
const int32_t WIDTH = 512;
const int32_t HEIGHT = 512;
char vertex_code[] = R"(
#version 300 es
in vec3 VertexPosition;
void main() {
gl_Position = vec4(VertexPosition, 1.0f);
}
)";
char fragment_code[] = R"(
#version 300 es
precision mediump float;
out vec4 FragColor;
void main() {
FragColor = vec4(1.0f, 0.0f, 1.0f, 1.0f);
}
)";
GLuint compile_shader(char* shader_code, GLenum shader_type) {
GLuint shader_id = glCreateShader(shader_type);
glShaderSource(shader_id, 1, &shader_code, nullptr);
glCompileShader(shader_id);
// check shader compilation:
GLint result = 0;
glGetShaderiv(shader_id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
glDeleteShader(shader_id);
return 0;
}
return shader_id;
}
GLuint create_shader_program(char* vertex_code, char* fragment_code) {
GLuint vertex_id = compile_shader(vertex_code, GL_VERTEX_SHADER);
GLuint fragment_id = compile_shader(fragment_code, GL_FRAGMENT_SHADER);
GLuint shader_program_id = glCreateProgram();
// attach shaders and link:
glAttachShader(shader_program_id, vertex_id);
glAttachShader(shader_program_id, fragment_id);
glLinkProgram(shader_program_id);
// check linking status:
GLint result = 0;
glGetProgramiv(shader_program_id, GL_LINK_STATUS, &result);
if (result == GL_FALSE) {
glDeleteProgram(shader_program_id);
glDeleteShader(vertex_id);
glDeleteShader(fragment_id);
return 0;
}
// delete the shaders now that they're linked:
glDetachShader(shader_program_id, vertex_id);
glDetachShader(shader_program_id, fragment_id);
glDeleteShader(vertex_id);
glDeleteShader(fragment_id);
return shader_program_id;
}
GLuint render_texture() {
// *** create framebuffer and texture to render into
GLuint framebuffer_id;
glGenFramebuffers(1, &framebuffer_id);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_id);
GLuint target_texture_id;
glGenTextures(1, &target_texture_id);
glBindTexture(GL_TEXTURE_2D, target_texture_id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, WIDTH, HEIGHT, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, target_texture_id, 0);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
return 0;
}
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// *** create shader program
GLuint shader_program_id = create_shader_program(vertex_code, fragment_code);
// *** set up quad
float quad[] = {
// positions
-1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
};
GLuint vertex_data_buffer, vertex_array;
glGenVertexArrays(1, &vertex_array);
glGenBuffers(1, &vertex_data_buffer);
glBindVertexArray(vertex_array);
// load vertex data into VBO:
glBindBuffer(GL_ARRAY_BUFFER, vertex_data_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad), quad, GL_STATIC_DRAW);
// position attribute
GLint vertex_pos_location = glGetAttribLocation(shader_program_id, "VertexPosition");
Log("vertex_pos_location %i", vertex_pos_location);
glVertexAttribPointer(vertex_pos_location, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// *** render
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_id);
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shader_program_id);
glBindVertexArray(vertex_array);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glFinish();
return target_texture_id;
}
When I use this texture elsewhere (within Unity, for what it's worth) I can see that the glClear(GL_COLOR_BUFFER_BIT); call has worked fine, but I'm not seeing the pink colour I'd expect from the basic fragment shader. I've used glGetError() to check for errors after each GL call, and they all seem fine...
Any thoughts on what the issue could be?
New to OpenGl and currently working on Android NDK. I am currently getting this error, and can't figure it out. I feel like it's something basic, but I could be wrong. the appRender is run every frame by the java render side.
Error:
E/emuglGLESv2_enc: glDrawArrays: a vertex attribute array is enabled with no data bound
app.cpp:
void appRender(long tick, int width, int height){
const float vertices[] =
{
0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f
};
glClear(GL_COLOR_BUFFER_BIT);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// Setup colors for each vertex
GLfloat colors[3*4];
for (int v = 0; v < 3; v++){
colors[4*v+0] = 0;
colors[4*v+1] = 1;
colors[4*v+2] = 0;
colors[4*v+3] = 1;
}
// Setup color buffer
GLuint colorBuffer;
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
1,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(
1,
4,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindTexture(GL_TEXTURE_2D, 0);
//textRenderer.RenderTexts(0.5f);
//boxRenderer.RenderBoxes(0.5f);
}
So I found it, and yes, I'm bad.
glVertexAttribPointer(1,3,...) -> glVertexAttribPointer(0,3,...)
I'm having trouble understanding where/how to setup buffers for a native android application in VS 2015. I apologize if this isn't the best way to ask a question. I appreciate any help/insight.
This is what I have so far:
(in engine_init_display)
GLint vShaderLength = vertex_shader.length();
const GLchar* vcode = vertex_shader.c_str();
GLint fShaderLength = fragment_shader.length();
const GLchar* fcode = fragment_shader.c_str();
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vcode, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fcode, NULL);
glCompileShader(fs);
shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
GLint pos_id = glGetAttribLocation(shader_programme, "position");
//Set vertex data
glUseProgram(shader_programme);
glVertexAttribPointer(pos_id, 0, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(pos_id, //GLuint
3, //GLint size
GL_FLOAT, //GLenum type
GL_FALSE, //GLboolean
(sizeof(float) * 5), //GLsizei stride
points //const GLvoid *pointer
);
glEnableVertexAttribArray(pos_id);
(in engine_draw_frame)
glClearColor(1.0f, 0.41f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
eglSwapBuffers(engine->display, engine->surface);
With this, I get a pink (clear colour) background. I'm not sure what I'm doing wrong.
Here are my vertex data and shaders
float points[] =
{
-0.2f, 0.6f, 0.0f,
0.0f, 1.0f,
0.5f, 0.5f, 0.0f,
1.0f, 1.0f,
-0.5f, -0.5f, 0.0f,
0.0f, 0.0f,
0.5f, -0.5f, 0.0f,
1.0f, 0.0f
};
unsigned short indices[] =
{
0, 2, 1, 2, 3, 1
};
std::string vertex_shader =
"#version 300 es \n"
"in vec3 position; \n"
"void main () { \n"
" gl_Position = vec4 (position, 1.0); \n"
"} \n";
std::string fragment_shader =
"#version 300 es \n"
"precision highp float; \n"
"out vec4 frag_colour; \n"
"void main () { \n"
" frag_colour = vec4 (0.5, 0.0, 0.5, 1.0); \n"
"} \n";
OK, I figured it out. There isn't anything wrong with my shaders or vertex array. The problem was that I didn't specify EGL to create an OpenGLES2 context using EGL_CONTEXT_CLIENT_VERSION.
Check here -> Khronos Specification, page 43 (of pdf) for more info
Sample from specification:
EGLContext eglCreateContext(EGLDisplay dpy,
EGLConfig config, EGLContext share_context,
const EGLint *attrib_list);
if the *attrib_list is left null, the default is OpenGLES1 and shaders will not work in that context.
So, what you need to do is create an attribute list. Something along the lines of:
EGLint contextAttributes[]=
{
EGL_CONTEXT_CLIENT_VERSION,2,
EGL_NONE
}
and pass that to the create context
p_context = eglCreateContext(display, config, NULL, contextAttributes);
Basically, I was too unsure of my ability with vertex buffers that I focused on that for a long time.
If I have custom renderer in opengl-es prepared:
public void onDrawFrame(GL10 gl)
{
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// here i want to draw line from [0, 0, 0] to [1, 0, 0]
}
What is the easiest and shortest way to draw line?
In ordinary OpenGL it is:
glBegin(GL_LINES);
glVertex3f(0, 0, 0);
glVertex3f(1, 0, 0);
glEnd();
But how can i get the same effect with OpenGL ES?
I am new to new to OpenGL ES 2.0 but I created a line class.
public class Line {
private FloatBuffer VertexBuffer;
private final String VertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String FragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
protected int GlProgram;
protected int PositionHandle;
protected int ColorHandle;
protected int MVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float LineCoords[] = {
0.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f
};
private final int VertexCount = LineCoords.length / COORDS_PER_VERTEX;
private final int VertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Line() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
LineCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
VertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
int vertexShader = ArRenderer.loadShader(GLES20.GL_VERTEX_SHADER, VertexShaderCode);
int fragmentShader = ArRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER, FragmentShaderCode);
GlProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(GlProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(GlProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(GlProgram); // creates OpenGL ES program executables
}
public void SetVerts(float v0, float v1, float v2, float v3, float v4, float v5) {
LineCoords[0] = v0;
LineCoords[1] = v1;
LineCoords[2] = v2;
LineCoords[3] = v3;
LineCoords[4] = v4;
LineCoords[5] = v5;
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
}
public void SetColor(float red, float green, float blue, float alpha) {
color[0] = red;
color[1] = green;
color[2] = blue;
color[3] = alpha;
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(GlProgram);
// get handle to vertex shader's vPosition member
PositionHandle = GLES20.glGetAttribLocation(GlProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(PositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(PositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
VertexStride, VertexBuffer);
// get handle to fragment shader's vColor member
ColorHandle = GLES20.glGetUniformLocation(GlProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(ColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
MVPMatrixHandle = GLES20.glGetUniformLocation(GlProgram, "uMVPMatrix");
ArRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(MVPMatrixHandle, 1, false, mvpMatrix, 0);
ArRenderer.checkGlError("glUniformMatrix4fv");
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_LINES, 0, VertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(PositionHandle);
}
}
And then in my Render class I create my line objects and to a container so the get draw in the by iterating over the items and calling the Line.draw method in onDrawFrame.
Here are some lines I create to make a horizon:
Line eastHorz = new Line();
eastHorz.SetVerts(10f, 10f, 0f, 10f, -10f, 0f);
eastHorz.SetColor(.8f, .8f, 0f, 1.0f);
Line northHorz = new Line();
northHorz.SetVerts(-10f, 10f, 0f, 10f, 10f, 0f);
northHorz.SetColor(0.8f, 0.8f, 0f, 1.0f);
Line westHorz = new Line();
westHorz.SetVerts(-10f, -10f, 0f, -10f, 10f, 0f);
westHorz.SetColor(0.8f, 0.8f, 0f, 1.0f);
Line southHorz = new Line();
southHorz.SetVerts(-10f, -10f, 0f, 10f, -10f, 0f);
southHorz.SetColor(0.8f, 0.8f, 0f, 1.0f);
Lines.add(eastHorz);
Lines.add(northHorz);
Lines.add(westHorz);
Lines.add(southHorz);
ArRenderer is my render class which holds the Lines, camera position etc and implements GLSurfaceView.Renderer. The loadShader method is:
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
Thanks Rodney Lambert for the Line class you´ve provided. However it also would be nice to provide a simpler call in onDrawFrame, something like:
Line vertLine = new Line();
vertLine.SetVerts(-0.5f, 0.5f, 0f, -0.5f, -0.5f, 0f);
vertLine.SetColor(.8f, .8f, 0f, 1.0f);
vertLine.draw(mMVPMatrix);
vertLine.SetVerts(-0.5f, 0.5f, 0f, -0.5f, -0.5f, 0f);
creates definitely a line visible inside the viewport
I am trying to make a projection matrix scaling the screen and making a coordination system. For some reason I don't think any of my matrix calling is working... the 3 function I am using are
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -1, 10);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
Are they canceling each other out? anything wrong with it? (the full rendering class code is at the end)
My main goal in doing this is eventually getting to a situation where when I make a square, I could provide coordinates such as (200, 100,0) //x, y, z which are not only between -1 and 1.
Here is my full rendering class:
public class MyRenderer implements Renderer {
private static final String TAG = "MyRenderer";
Square square;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
private int camWidth,camHeight;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 0.5f);
camWidth=480;camHeight=320;
// initialize a square
square = new Square();
}
#Override
public void onDrawFrame(GL10 nope) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//set camera position
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
square.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 nope, int width, int height) {
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
My Square class (wasn't sure if needed or not, but just to be safe :) ) -
public class Square {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f
};
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // bytes per vertex
// red-green-blue-alpha
float color[] = { 0.63f, 0.76f, 0.22f, 1.0f };
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
private FloatBuffer vertexBuffer;
private ShortBuffer drawListBuffer;
public Square() {
ByteBuffer bb = ByteBuffer.allocateDirect(
// # of coords values * 4 bytes per float
triangleCoords.length * 4);
// use native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add coordination to FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read first coordinate
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = ChizRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = ChizRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// dispable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
And lastly, just so you could have some visuals:
This is how it looks on my phone with and without the three metrix functions mentioned before, it also seems that the only thing that did make any change with the width and height was GLES20.glViewport(0, 0, camWidth, camHeight);
It seems as if the metrix is doing nothing.
Looks like you adapted the sample to a square. Couple of issues here:
Call glViewPort only in onSurfaceChanged and only with the parameters given.
The vertex shader code does not use uMVPMatrix. You'd've seen this by checking the value of mMVPMatrixHandle (it's -1 for uniforms that don't exist, see here).
After the program has been linked, the locations of the shader variables are fixed, so the code may fetch them once, and not for every draw call.
Then, you'll need to adapt the coordinates of the square...