I was following a OpenGL tutorial (https://developer.android.com/training/graphics/opengl/touch.html#angle) and I did everything right I think. My problem is that the triangle doesnt rotate on the display. I tried to debug and mGLView.requestRender(); is getting the values but it doesnt update the image on the screen.
public class OpenGL extends Fragment {
private GLSurfaceView mGLView;
private final float TOUCH_SCALE_FACTOR = 180.0f / 320;
private float mPreviousX;
private float mPreviousY;
private OpenGLRenderer mRenderer;
public int height = 0;
public int width = 0;
public OpenGL() {
// Required empty public constructor
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mGLView = new GLSurfaceView(getActivity());
mGLView.setEGLContextClientVersion(2);
mRenderer = new OpenGLRenderer();
mGLView.setRenderer(mRenderer);
// Render the view only when there is a change in the drawing data
mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
DisplayMetrics displayMetrics = new DisplayMetrics();
((Activity) getContext()).getWindowManager()
.getDefaultDisplay()
.getMetrics(displayMetrics);
height = displayMetrics.heightPixels;
width = displayMetrics.widthPixels;
mGLView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - mPreviousX;
float dy = y - mPreviousY;
// reverse direction of rotation above the mid-line
if (y > height / 2) {
dx = dx * -1;
}
// reverse direction of rotation to left of the mid-line
if (x < width / 2) {
dy = dy * -1;
}
mRenderer.setAngle(
mRenderer.getAngle() +
((dx + dy) * TOUCH_SCALE_FACTOR));
mGLView.requestRender();
}
mPreviousX = x;
mPreviousY = y;
return true;
}
});
return mGLView;
}
#Override
public void onResume() {
super.onResume();
mGLView.onResume();
}
#Override
public void onPause() {
super.onPause();
mGLView.onPause();
}
}
class OpenGLRenderer implements GLSurfaceView.Renderer {
private Triangle mRectangle;
private OpenGL mOpenGL = new OpenGL();
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
float[] scratch = new float[16];
public volatile float mAngle;
public float getAngle() {
return mAngle;
}
public void setAngle(float angle) {
mAngle = angle;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Set the background frame color
//GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glViewport(0, 0, mOpenGL.width, mOpenGL.height);
float ratio = (float) mOpenGL.width / mOpenGL.height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
mRectangle = new Triangle();
}
public void onDrawFrame(GL10 gl) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mViewMatrix, 0);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
// Draw triangle
mRectangle.drawMatrix(scratch);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
class Triangle {
private final String vertexShaderCodeMatrix =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 0.5f, 0.5f, 1.0f };
private final int mProgram;
private short[] indices = {0,1,2,0,2,3};
private FloatBuffer vertexBuffer;
private ShortBuffer indexBuffer;
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = OpenGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = OpenGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void draw() {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public void drawMatrix(float[] mvpMatrix) { // pass in the calculated transformation matrix
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
You loaded 'vertexShaderCode' instead of 'vertexShaderCodeMatrix' in the Triangle class.
Related
I am testing to draw a textured sprite with opengl es 2.0. on android.
Demo works fine when I set ortho projection from - scr_w/2 to +scr_w/2 and - scr_h/ to +scr_h/2 with origin in middle of screen.
However if I translate to what I expect : 480 width (scr_w) and 720 (scr_h) height coordinate system + origin on top left corner of screen.
then Texture rendering goes wrong.
Matrix.orthoM(mProjectionMatrix, 0, 0, scr_w,0, scr_h, -1, 1); //then texture rendering doesn't work ?
I have tried to set Matrix.setLookAtM differently to point from (240,360,5) to (240,360,0)
//Matrix.setLookAtM(mViewMatrix, 0, scr_w/2, scr_h/2, 5, scr_w/2, scr_h/2, 0f, 0f, 1.0f, 0.0f); // then texture rendering doesn't work ?
The Renderer class:
public class SimpleRenderer implements GLSurfaceView.Renderer{
private final Context ctx;
static int scr_w = 480;
static int scr_h = 720;
private final float[] mViewMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mMVPMatrix = new float[16];
private Sprite spt1;
SimpleRenderer(final Context context)
{
this.ctx = context;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
//Set the background color
GLES20.glClearColor(0.0f, 0.0f, 2.0f, 1.0f);
//Disable depth test
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
// Set alpha blend on
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
spt1 = new Sprite(ctx);
}
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
Matrix.orthoM(mProjectionMatrix, 0, -scr_w/2, scr_w/2,-scr_h/2, scr_h/2, -1, 1);
// Matrix.orthoM(mProjectionMatrix, 0, 0, scr_w,0, scr_h, -1, 1); // DOESNT WORK ?
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 5, 0, 0, 0f, 0f, 1.0f, 0.0f);
//Matrix.setLookAtM(mViewMatrix, 0, scr_w/2, scr_h/2, 5, scr_w/2, scr_h/2, 0f, 0f, 1.0f, 0.0f); // DOESN'T WORK
// Mix the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
}
public void onDrawFrame(GL10 unused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
spt1.Draw(mMVPMatrix);
}}
The Sprite Class maybe mess with shader or fragment in 2D coordinates?
public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;
//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
"attribute vec2 a_TexCoordinate; \n" +
"varying vec2 v_TexCoordinate; \n" +
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main() { \n" +
" gl_Position = vPosition * uMVPMatrix; \n" +
" v_TexCoordinate = a_TexCoordinate; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (vColor * texture2D(u_Texture, v_TexCoordinate));" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = {
-50f, 50f, // top left
-50f, -50f, // bottom left
50f, -50f, // bottom right
50f, 50f //top right
};
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex
float color[] = { 1f, 1f, 1f, 1.0f };
public Sprite(final Context activityContext)
{
mActivityContext = activityContext;
//Initialize Vertex Byte Buffer
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(spriteCoords);
vertexBuffer.position(0);
final float[] TextureCoordinate =
{
1f, 0f,
1f, 1f,
0f, 1f,
0f, 0f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(TextureCoordinate.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(TextureCoordinate).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
mTextureDataHandle = loadTexture(mActivityContext, R.drawable.cathead);
}
public void Draw(float[] mvpMatrix)
{
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
//Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glEnable(GLES20.GL_BLEND_COLOR);
GLES20.glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
GLES20.glDepthMask(false);
//Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
public static int loadShader(int type, String shaderCode)
{
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
I'm using OpenGL in Android to visualize a large dataset point cloud in an array. At first, I wanted to show some point on the screen to check if the pipeline is correct. I used VBO to render it but it shows only one point in the middle of the screen. I have implemented the below code which I found in the internet here.
Here is my PointRenderer class
public class PointRenderer implements GLSurfaceView.Renderer {
private float[] mModelMatrix = new float[16];
private float[] mViewMatrix = new float[16];
private float[] mProjectionMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
float[] vertices = {
0.0f, 0.5f, 0.0f,
-100.5f, -100.5f, 0.0f,
100.5f, -100.5f, 0.0f
};
FloatBuffer vertexBuf;
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
float eyeX = 0.0f;
float eyeY = 0.0f;
float eyeZ = 0.0f;
float centerX = 0.0f;
float centerY = 0.0f;
float centerZ = -5.0f;
float upX = 0.0f;
float upY = 1.0f;
float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ " gl_PointSize = 10.0; \n"
+ "} \n";
final String fragmentShader =
"precision mediump float; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = vec4(1.0, \n"
+ " 1.0, 1.0, 1.0); \n"
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0) {
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0) {
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0) {
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0) {
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 100.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
//Push to the distance - note this will have no effect on a point size
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.multiplyMV(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMV(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
//Send the vertex
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertexBuf);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Draw the point
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, vertices.length/3);
}
}
I suspect the coordinates for two of your points are outside of the visible frustum. Try replacing the 100.5 and -100.5 with something smaller and see if that helps.
You have to Enable both vertex attribute and texture coordinate attribute
GLES20.glEnableVertexAttribArray(mVertexAttrib)
GLES20.glEnableVertexAttribArray(mTexCoordAttrib)
My code is almost exactly the same as the code on the website, with a few variable name changes:
package com.example.opengltraining;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
public class MyGL20Renderer implements GLSurfaceView.Renderer{
private Triangle triangle1;
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mMVPMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
public static int loadShader(int type, String shaderCode){
//create vertex shader type
//or fragment shader type
int shader = GLES20.glCreateShader(type);
//add source code to shader and compile
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public void redrawTriangle(float[]triangleCoords){
triangle1.changeTriangle(triangleCoords);
}
public void onSurfaceCreated(GL10 unused, EGLConfig config){
//set background fram color
GLES20.glClearColor(1f,0.5f, 0.25f, .5f);
triangle1 = new Triangle();
}
public void onDrawFrame(GL10 unused) {
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Draw triangle
triangle1.draw(mMVPMatrix);
}
public void onSurfaceChanged(GL10 unused, int width, int height){
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
//applies projection matrix to object coordinates
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 2, 7);
}
}
class Triangle{
//shaders - vertex
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
//shaders - fragment
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
//number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.5f, 0.0f, // top
-0.5f, -0.25f, 0.0f, // bottom left
0.5f, -0.25f, 0.0f // bottom right
};
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4;
//color of triangle
float color[] = { 0f, 1f, 0f, 1.0f };
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public void changeTriangle(float [] newTriangleCoords){
ByteBuffer bb = ByteBuffer.allocateDirect(newTriangleCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(newTriangleCoords);
vertexBuffer.position(0);
}
public Triangle(){
//init vertex buffer for shape cordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
//number of cord vals * 4 bytes per float
triangleCoords.length * 4);
//use hardware's native byte order
bb.order(ByteOrder.nativeOrder());
//create floating point buffer
vertexBuffer = bb.asFloatBuffer();
//add coords to floatbuffer
vertexBuffer.put(triangleCoords);
//set buffer to read first coord
vertexBuffer.position(0);
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
//creates program for the triangle to go in.,
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
}
}
I've already tested this code vs the original code without the added projection and camera views, and there is no difference between the two. Also, for some reason when I test this on an emulator it doesn't work, but it does work when I test it on an actual android phone (although the projection and camera views still don't work).
So I followed this tutorial from Google: http://developer.android.com/resources/tutorials/opengl/opengl-es20.html
The triangle is supposed to rotate in response to touch events, but nothing happens. It also didn't rotate in the earlier step when it was supposed to rotate on its own. Not sure what the problem is. I just copied their code.
The whole project is only two files:
HelloOpenGLES20Activity.java
package com.opengl.es20;
import android.app.Activity;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.MotionEvent;
public class HelloOpenGLES20Activity extends Activity {
private GLSurfaceView mGLView;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity
mGLView = new HelloOpenGLES20SurfaceView(this);
setContentView(mGLView);
}
#Override
protected void onPause() {
super.onPause();
// The following call pauses the rendering thread.
// If your OpenGL application is memory intensive,
// you should consider de-allocating objects that
// consume significant memory here.
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
// The following call resumes a paused rendering thread.
// If you de-allocated graphic objects for onPause()
// this is a good place to re-allocate them.
mGLView.onResume();
}
}
class HelloOpenGLES20SurfaceView extends GLSurfaceView {
private final float TOUCH_SCALE_FACTOR = 180.0f / 320;
private HelloOpenGLES20Renderer mRenderer;
private float mPreviousX;
private float mPreviousY;
public HelloOpenGLES20SurfaceView(Context context){
super(context);
// Create an OpenGL ES 2.0 context.
setEGLContextClientVersion(2);
// set the mRenderer member
mRenderer = new HelloOpenGLES20Renderer();
setRenderer(mRenderer);
// Render the view only when there is a change
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public boolean onTouchEvent(MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - mPreviousX;
float dy = y - mPreviousY;
// reverse direction of rotation above the mid-line
if (y > getHeight() / 2) {
dx = dx * -1 ;
}
// reverse direction of rotation to left of the mid-line
if (x < getWidth() / 2) {
dy = dy * -1 ;
}
mRenderer.mAngle += (dx + dy) * TOUCH_SCALE_FACTOR;
requestRender();
}
mPreviousX = x;
mPreviousY = y;
return true;
}
}
HelloRendererOpenGLES20Renderer.java
package com.opengl.es20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.SystemClock;
public class HelloOpenGLES20Renderer implements GLSurfaceView.Renderer {
private FloatBuffer triangleVB;
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
public float mAngle;
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
initShapes();
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mMMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width/height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private void initShapes(){
float triangleCoords[] = {
// X, Y, Z
-0.5f, -0.25f, 0,
0.5f, -0.25f, 0,
0.0f, 0.559016994f, 0
};
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
triangleVB.put(triangleCoords); // add the coordinates to the FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Matrix.setRotateM(mMMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
I believe this final multiplyMM is your problem. You are resetting your mMVPMatrix to be
mMVPMatrix = mProjMatrix * mVMatrix, and you are losing your model transformation matrix mMMatrix which would apply your rotation. Also, verify that mAngle is non-zero.
I'm new here and to Android and I have some trouble with this tutorial.
The errors I'm getting are that getHeight/Width/requestRender are undefined for the type of HelloOpenGLES20Activity. The code parts that contain errors need to be added at the end of the tutorial. I searched for about one hour but most of the errors dealing with getHeight are about it returning 0.
Here is the class (HelloOpenGLES20Activity):
package com.example.HelloOpenGLES20;
import android.app.Activity;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.Display;
import android.view.MotionEvent;
public class HelloOpenGLES20Activity extends Activity {
private GLSurfaceView mGLView;
private final float TOUCH_SCALE_FACTOR = 180.0f / 320;
private HelloOpenGLES20Renderer mRenderer;
private float mPreviousX;
private float mPreviousY;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity
mGLView = new HelloOpenGLES20SurfaceView(this);
setContentView(mGLView);
}
#Override
public boolean onTouchEvent(MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - mPreviousX;
float dy = y - mPreviousY;
// reverse direction of rotation above the mid-line
if (y > getHeight() / 2) {
dx = dx * -1 ;
}
// reverse direction of rotation to left of the mid-line
if (x < getWidth() / 2) {
dy = dy * -1 ;
}
mRenderer.mAngle += (dx + dy) * TOUCH_SCALE_FACTOR;
requestRender();
}
mPreviousX = x;
mPreviousY = y;
return true;
}
#Override
protected void onPause() {
super.onPause();
// The following call pauses the rendering thread.
// If your OpenGL application is memory intensive,
// you should consider de-allocating objects that
// consume significant memory here.
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
// The following call resumes a paused rendering thread.
// If you de-allocated graphic objects for onPause()
// this is a good place to re-allocate them.
mGLView.onResume();
}
class HelloOpenGLES20SurfaceView extends GLSurfaceView {
private HelloOpenGLES20Renderer mRenderer;
public HelloOpenGLES20SurfaceView(Context context){
super(context);
// Create an OpenGL ES 2.0 context.
setEGLContextClientVersion(2);
// set the mRenderer member
mRenderer = new HelloOpenGLES20Renderer();
setRenderer(mRenderer);
// Render the view only when there is a change
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
}
}
And here is the class HelloOpenGLES20Renderer:
package com.example.HelloOpenGLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
public class HelloOpenGLES20Renderer implements GLSurfaceView.Renderer {
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
// initialize the triangle vertex array
initShapes();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
//Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
//Create a rotation for the triangle (Boring! Comment this out:)
//long time = SystemClock.uptimeMillis() % 4000L;
//float angle = 0.090f * ((int) time);
//Use the mAngle member as the rotation value
Matrix.setRotateM(mMMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
//Apply a ModelView Projection transformation
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
//Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coodinates
// in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private FloatBuffer triangleVB;
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
public float mAngle;
private void initShapes(){
float triangleCoords[] = {
// X, Y, Z
-0.5f, -0.25f, 0,
0.5f, -0.25f, 0,
0.0f, 0.559016994f, 0
};
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
triangleVB.put(triangleCoords); // add the coordinates to the FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
private final String vertexShaderCode =
//This matrix member variable provides a hook to manipulate
//the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
//the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Thanks in advance.
Try using the following code to get the width of the screen:
WindowManager w = getWindowManager();
Display d = w.getDefaultDisplay();
int width = d.getWidth();
int height = d.getHeight();
Try read the tut again the ontouchevent is placed wrong
Hope this helped