Working through some OpenGL-ES tutorials, using the Android emulator. I've gotten up to texture mapping and am having some trouble mapping to a cube. Is it possible to map a texture to all faces of a cube that has 8 vertices and 12 triangles for the 6 faces as described below?
// Use half as we are going for a 0,0,0 centre.
width /= 2;
height /= 2;
depth /= 2;
float vertices[] = { -width, -height, depth, // 0
width, -height, depth, // 1
width, height, depth, // 2
-width, height, depth, // 3
-width, -height, -depth, // 4
width, -height, -depth, // 5
width, height, -depth, // 6
-width, height, -depth, // 7
};
short indices[] = {
// Front
0,1,2,
0,2,3,
// Back
5,4,7,
5,7,6,
// Left
4,0,3,
4,3,7,
// Right
1,5,6,
1,6,2,
// Top
3,2,6,
3,6,7,
// Bottom
4,5,1,
4,1,0,
};
float texCoords[] = {
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
I have gotten the front and back faces working correctly, however, none of the other faces are showing the texture.
Drawing code
public void draw(GL10 gl) {
// Counter-clockwise winding.
gl.glFrontFace(GL10.GL_CCW);
// Enable face culling.
gl.glEnable(GL10.GL_CULL_FACE);
// What faces to remove with the face culling.
gl.glCullFace(GL10.GL_BACK);
// Enabled the vertices buffer for writing and to be used during
// rendering.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
// Specifies the location and data format of an array of vertex
// coordinates to use when rendering.
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, verticesBuffer);
if (normalsBuffer != null) {
// Enabled the normal buffer for writing and to be used during rendering.
gl.glEnableClientState(GL10.GL_NORMAL_ARRAY);
// Specifies the location and data format of an array of normals to use when rendering.
gl.glNormalPointer(GL10.GL_FLOAT, 0, normalsBuffer);
}
// Set flat color
gl.glColor4f(rgba[0], rgba[1], rgba[2], rgba[3]);
// Smooth color
if ( colorBuffer != null ) {
// Enable the color array buffer to be used during rendering.
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
// Point out the where the color buffer is.
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
}
// Use textures?
if ( textureBuffer != null) {
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
}
// Translation and rotation before drawing
gl.glTranslatef(x, y, z);
gl.glRotatef(rx, 1, 0, 0);
gl.glRotatef(ry, 0, 1, 0);
gl.glRotatef(rz, 0, 0, 1);
gl.glDrawElements(GL10.GL_TRIANGLES, numOfIndices, GL10.GL_UNSIGNED_SHORT, indicesBuffer);
// Disable the vertices buffer.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Disable face culling.
gl.glDisable(GL10.GL_CULL_FACE);
}
You have to use 24 vertexes. In OpenGL, a vertex is more than just position, it is the collection of all vertex attributes. Every vertex array is accessed with the same index.
The infamous cube example is something almost everyone feels is inefficient when starting to use OpenGL, but in real-world, more complex models, the degree of duplication is quite low.
I have "only" modified the GLES20TriangleRenderer.java file into the SDK example BasicGLSurfaceView, compile it and test it on two Android devices, an Android Phone and a Nexus 7 and this work good on the two devices :)
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
Modified by YLP (06 January 2014) for to handle a rotated texture mapped cube
*/
package com.example.android.basicglsurfaceview;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.opengl.Matrix;
import android.os.SystemClock;
import android.util.Log;
class GLES20TriangleRenderer implements GLSurfaceView.Renderer {
public GLES20TriangleRenderer(Context context) {
mContext = context;
// mTriangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
// mTriangleVertices.put(mTriangleVerticesData).position(0);
mTriangleVertices = ByteBuffer.allocateDirect(cubeVerticesStrip.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(cubeVerticesStrip).position(0);
mTriangleTexcoords = ByteBuffer.allocateDirect(cubeTexCoordsStrip.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleTexcoords.put(cubeTexCoordsStrip).position(0);
}
public void onDrawFrame(GL10 glUnused) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable( GLES20.GL_DEPTH_TEST );
GLES20.glDepthFunc( GLES20.GL_LEQUAL );
GLES20.glDepthMask( true );
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID);
// mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
// GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
// TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
// checkGlError("glVertexAttribPointer maPosition");
// mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
// GLES20.glEnableVertexAttribArray(maPositionHandle);
// checkGlError("glEnableVertexAttribArray maPositionHandle");
// GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
// TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
// checkGlError("glVertexAttribPointer maTextureHandle");
// GLES20.glEnableVertexAttribArray(maTextureHandle);
// checkGlError("glEnableVertexAttribArray maTextureHandle");
// From http://www.endodigital.com/opengl-es-2-0-on-the-iphone/part-fourteen-creating-the-cube
// (but slighty modified)
mTriangleVertices.position(0);
// GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, mTriangleVertices);
GLES20.glEnableVertexAttribArray(maPositionHandle);
mTriangleTexcoords.position(0);
// GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 0, mTriangleTexcoords);
GLES20.glEnableVertexAttribArray(maTextureHandle);
long time = SystemClock.uptimeMillis() % 4000L;
float angle = 0.090f * ((int) time);
float scale = 0.7f;
Matrix.setRotateM(mMMatrix, 0, angle, 0, 0, 1.0f);
// YLP : add others movements cycles
Matrix.rotateM(mMMatrix, 0, angle, 1.0f, 0.0f, 0.0f );
// Matrix.rotateM(mMMatrix, 0, angle, 0.0f, 1.0f, 0.0f );
// float scale = (float)( Math.abs( Math.sin( ((float)time) * (6.28f/4000.0f) ) ));
Matrix.scaleM(mMMatrix, 0, scale, scale, scale);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Somes tests with only somes triangles
// GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3); // worked initialy but only one triangle
// GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); // worked initialy but only two triangles
// GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); // GL_QUADS does not exist in GL 2.0 :(
// GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 8); // GL_QUADS does not exist in GL 2.0 :(
// Draw the cube
// TODO : make only one glDraWArrays() call instead one per face
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 4, 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 8, 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 12, 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 16, 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 20, 4);
checkGlError("glDrawArrays");
}
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
/*
* Create our texture. This has to be done each time the
* surface is created.
*/
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_REPEAT);
InputStream is = mContext.getResources()
.openRawResource(R.raw.robot);
Bitmap bitmap;
try {
bitmap = BitmapFactory.decodeStream(is);
} finally {
try {
is.close();
} catch(IOException e) {
// Ignore.
}
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private static final int TRIANGLE_TEXCOORDS_DATA_STRIDE_BYTES = 2 * FLOAT_SIZE_BYTES;
private final float[] mTriangleVerticesData =
{
// X, Y, Z, U, V
// initial triangle from the source
// -1.0f, -0.5f, 0, -0.5f, 0.0f,
// 1.0f, -0.5f, 0, 1.5f, -0.0f,
// 0.0f, 1.11803399f, 0, 0.5f, 1.61803399f,
// YLP : transform this to two triangles for to have a quad
// -1, -1, 0, 0, 0,
// 1, -1, 0, 1, 0,
// -1, 1, 0, 0, 1,
// 1, 1, 0, 1, 1,
// -1, 1, 0, 0, 1,
// 1, -1, 0, 1, 0
// YLP : use two quads with GL_TRIANGLE_STRIP
// Don't work because this make one accordeon effect :(
-1, -1, -1, 0, 0,
1, -1, -1, 1, 0,
-1, 1, -1, 0, 1,
1, 1, -1, 1, 1,
-1, -1, 1, 0, 0,
1, -1, 1, 1, 0,
-1, 1, 1, 0, 1,
1, 1, 1, 1, 1,
};
// From http://www.endodigital.com/opengl-es-2-0-on-the-iphone/part-fourteen-creating-the-cube/
// (only moodify "static const GLfloat" to "private final float" on it)
private final float cubeVerticesStrip[] = {
// Front face
-1,-1,1, 1,-1,1, -1,1,1, 1,1,1,
// Right face
1,-1,1, 1,-1,-1, 1,1,1, 1,1,-1,
// Back face
1,-1,-1, -1,-1,-1, 1,1,-1, -1,1,-1,
// Left face
-1,-1,-1, -1,-1,1, -1,1,-1, -1,1,1,
// Bottom face
-1,-1,-1, 1,-1,-1, -1,-1,1, 1,-1,1,
// Top face
-1,1,1, 1,1,1, -1,1,-1, 1,1,-1
};
private final float cubeTexCoordsStrip[] = {
// Front face
0,0, 1,0, 0,1, 1,1,
// Right face
0,0, 1,0, 0,1, 1,1,
// Back face
0,0, 1,0, 0,1, 1,1,
// Left face
0,0, 1,0, 0,1, 1,1,
// Bottom face
0,0, 1,0, 0,1, 1,1,
// Top face
0,0, 1,0, 0,1, 1,1
};
private FloatBuffer mTriangleVertices;
private FloatBuffer mTriangleTexcoords;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}\n";
private final String mFragmentShader =
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform sampler2D sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mMMatrix2 = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private Context mContext;
private static String TAG = "GLES20TriangleRenderer";
}
=> I managed to implement this in just a few hours and this work :)
==> so if Android is not the best platform, this seem however a really good and viable plateform for that I begin to play a little more with some multimédia developments on Android's devices :)
Related
I am new to opengl es on android and learning opengl by doing some examples. I am using two program to draw 3 objects. The following code loads the texture and draws a square. But it is appearing as a black square instead of applying the texture to the body.
my fragment-shader code
precision mediump float;
uniform sampler2D u_Texture;
varying vec2 v_TexCoordinate;
void main() {
gl_FragColor = texture2D(u_Texture, v_TexCoordinate);
}
my vertex-shader code
attribute vec2 a_TexCoordinate;
varying vec2 v_TexCoordinate;
attribute vec4 a_Position;
uniform mat4 u_Matrix;
void main() {
gl_Position = u_Matrix * a_Position;
v_TexCoordinate = a_TexCoordinate;
}
my object vertex buffer
float [] vBufferFloat = new float[] {
-0.2f, -0.2f, 1f,
0.2f, -0.2f, 1f,
0.2f, 0.2f, 1f,
-0.2f, 0.2f, 1f,
-0.2f, -0.2f, 1f,
};
my texture buffer
float [] texCoordinate = new float[] {
-0.2f, -0.2f,
0.2f, -0.2f,
0.2f, 0.2f,
-0.2f, 0.2f,
-0.2f, -0.2f,
};
my onSurfaceCreated && onDrawFrame code
public void onSurfaceCreated() {
cloudRendereProgram = ShaderHelper.createProgram(mContext, R.raw.sky_texture_vertex_shader, R.raw.sky_texture_fragment_shader);
cloudTextureId = Utils.loadTexture(mContext, com.elpis.gamecontroller.R.drawable.cloud);
aTextureLocation = GLES20.glGetAttribLocation(cloudRendereProgram, "a_TexCoordinate");
uMatrixLocation = GLES20.glGetUniformLocation(cloudRendereProgram, "u_Matrix");
aPositionLocation = GLES20.glGetAttribLocation(cloudRendereProgram, "a_Position");
uTextureLocation = GLES20.glGetUniformLocation(cloudRendereProgram, "u_Texture");
}
public void onDrawFrame() {
float [] mVMatrix = new float[16];
GLES20.glUseProgram(cloudRendereProgram);
GLES20.glVertexAttribPointer(aPositionLocation, 3, GLES20.GL_FLOAT, false, 0, vBuff.buffer);
GLES20.glEnableVertexAttribArray(aPositionLocation);
Matrix.multiplyMM(mVMatrix, 0, modelMatrix, 0, projectionMatrix, 0);
GLES20.glUniformMatrix4fv(uMatrixLocation, 1, false, mVMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, this.cloudTextureId);
GLES20.glUniform1i(uTextureLocation, 0);
GLES20.glVertexAttribPointer(aTextureLocation, 2, GLES20.GL_FLOAT, false, 0, texBuff.buffer);
GLES20.glEnableVertexAttribArray(aTextureLocation);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 5);
}
and my texture loader helper code
public static int loadTexture(Context ctx, int resId) {
final int [] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] == 0)
return 0;
final BitmapFactory.Options options = new Options();
options.inScaled = false;
final Bitmap imgTexture = BitmapFactory.decodeResource(ctx.getResources(), resId);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, imgTexture, 0);
imgTexture.recycle();
return textureHandle[0];
}
When I run the android application all I see is a black square instead of seeing a texture of cloud. So, I would appreciate if anyone point me in the right direction.
Two quick questions; Is it valid to create multiple opengl program objects with different shaders and run them concurrently?
[UPDATE]
The problem was on the onDrawFrame(). I had to use vBuff.buffer.position(0) and texBuff.buffer.position(0) to be able to draw the texture correctly.
public void onDrawFrame() {
float [] mVMatrix = new float[16];
GLES20.glUseProgram(cloudRendereProgram);
// FIX
vBuff.buffer.position(0);
texBuff.buffer.position(0);
// END FIX
GLES20.glVertexAttribPointer(aPositionLocation, 3, GLES20.GL_FLOAT, false, 0, vBuff.buffer);
GLES20.glEnableVertexAttribArray(aPositionLocation);
Matrix.multiplyMM(mVMatrix, 0, modelMatrix, 0, projectionMatrix, 0);
GLES20.glUniformMatrix4fv(uMatrixLocation, 1, false, mVMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, this.cloudTextureId);
GLES20.glUniform1i(uTextureLocation, 0);
GLES20.glVertexAttribPointer(aTextureLocation, 2, GLES20.GL_FLOAT, false, 0, texBuff.buffer);
GLES20.glEnableVertexAttribArray(aTextureLocation);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 5);
}
You're not calling glEnableVertexAttribArray for your texture coords. Add this line to your onDrawFrame():
GLES20.glEnableVertexAttribArray(aTextureLocation);
Call glGenerateMipmap after you upload the texture texImage2D() in your loadTexture() function, to make sure all mip-map levels are valid:
glGenerateMipmap(GLES20.GL_TEXTURE_2D);
Also, move these calls from your surfaceCreated() function to the start of drawFrame():
aTextureLocation = GLES20.glGetAttribLocation(cloudRendereProgram, "a_TexCoordinate");
uMatrixLocation = GLES20.glGetUniformLocation(cloudRendereProgram, "u_Matrix");
aPositionLocation = GLES20.glGetAttribLocation(cloudRendereProgram, "a_Position");
uTextureLocation = GLES20.glGetUniformLocation(cloudRendereProgram, "u_Texture");
(it could be that these variables are not bound properly or the GL context is not yet properly set up in surfaceCreated())
A debugging tip for OpenGLES. Add this function to your code (it's from the Android OpenGLES samples):
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
you can call it after each OpenGLES call, and pass in a String that can be whatever debugging message you want. If anything goes wrong then you'll get an exception instead of just silent failure which will leave you scratching your head trying to figure out what went wrong. Make sure to remove it from your final build to avoid force closes.
Because of performance I moved to OpenGL ES 2D from canvas.drawBitmap
This is sprite sheet 4x1:
Now to make it work I had followed class:
public Vulcan(ScreenObjectsView objectsView, int vulkanSpriteID, Context context) {
this.b = BitmapFactory.decodeResource(context.getResources(), vulkanSpriteID);
// 1x4
height = b.getHeight();
width = b.getWidth()/4;
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
x = display.getWidth()/2-width/2; // deprecated
y = display.getHeight()-height; // deprecated
}
public void update() {
frameFreq++;
if(frameFreq > 0){
currentFrame = ++currentFrame % 4;
frameFreq = 0;
}
}
#Override
public void draw(Canvas canvas) {
update();
int srcX = currentFrame * width;
Rect src = new Rect(srcX, 0, srcX+width, height);
Rect dst = new Rect(x, y, x+width, y+height);
canvas.drawBitmap(b, src, dst, null);
}
Each period of time I take Rect and shift from left to right (in loop):
currentFrame = ++currentFrame % 4;
So far so good.
How can I animate above mentioned sprite sheet in in OpenGL ES?
Today, I know how to draw and move objects in OpenGL ES (thanks to good demo)
but don't know to play with sprites.
Any ideas, links, snippets of code?
[Edit]
Ther is no mater to use sprite sheet or 4 images like:
, and so on.
Strange that still didn't get any answer or direction.
Thank you,
[Edit 2]
According to what Aert says I implemented the following code and it works.
But it seems messy
Too much code for OpenGL ES. For each texture (I have 4), I need create FloatBuffer:
Maybe someone have shorter way, or I did something wrong.
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
public class DevQuestSpriteBase {
private static final String LOG_TAG = "Fess";//DevQuestSpriteBase.class.getSimpleName();
protected int mFrame = 0;
protected int mSwitcher = 0;
private int textureCount = 1; // frame animation
protected int[] textures = new int[textureCount]; // frame animation
// texture and verts
protected FloatBuffer vertexBuffer,
textureBuffer1,
textureBuffer2,
textureBuffer3,
textureBuffer4;
ByteBuffer bb1;
protected float vertices[] = {
0f,0f,0.0f,
1f,0f,0.0f,
0f,1f,0.0f,
1f,1f,0.0f
};
/** 1 frame */
protected float texture1[] = {
0.0f, 1.0f,
0.0f, 0.0f,
0.25f, 1.0f,
0.25f, 0.0f
};
/** 2 frame */
protected float texture2[] = {
0.25f, 1.0f,
0.25f, 0.0f,
0.5f, 1.0f,
0.5f, 0.0f
};
/** 3 frame */
protected float texture3[] = {
0.5f, 1.0f,
0.5f, 0.0f,
0.75f, 1.0f,
0.75f, 0.0f
};
/** 4 frame */
protected float texture4[] = {
0.75f, 1.0f,
0.75f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
public DevQuestSpriteBase(){
// vertices buffer
bb1 = ByteBuffer.allocateDirect(vertices.length * 4);
bb1.order(ByteOrder.nativeOrder());
vertexBuffer = bb1.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
// texture buffer
bb1 = ByteBuffer.allocateDirect(texture1.length * 4);
bb1.order(ByteOrder.nativeOrder());
textureBuffer1 = bb1.asFloatBuffer();
textureBuffer1.put(texture1);
textureBuffer1.position(0);
//#########################################################
// texture buffer
bb1 = ByteBuffer.allocateDirect(texture2.length * 4);
bb1.order(ByteOrder.nativeOrder());
textureBuffer2 = bb1.asFloatBuffer();
textureBuffer2.put(texture2);
textureBuffer2.position(0);
//#########################################################
// texture buffer
bb1 = ByteBuffer.allocateDirect(texture3.length * 4);
bb1.order(ByteOrder.nativeOrder());
textureBuffer3 = bb1.asFloatBuffer();
textureBuffer3.put(texture3);
textureBuffer3.position(0);
//#########################################################
// texture buffer
bb1 = ByteBuffer.allocateDirect(texture4.length * 4);
bb1.order(ByteOrder.nativeOrder());
textureBuffer4 = bb1.asFloatBuffer();
textureBuffer4.put(texture4);
textureBuffer4.position(0);
}
private void update() {
if(mSwitcher == 5){
mFrame = ++mFrame % 4;
mSwitcher = 0;
// Log.e(LOG_TAG, "DevQuestSpriteBase :: " + mFrame);
}
else{
mSwitcher++;
}
}
public void draw(GL10 gl){
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
if(mFrame == 0){
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer1);
}
else if(mFrame == 1){
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer2);
}
else if(mFrame == 2){
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer3);
}
else if(mFrame == 3){
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer4);
}
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//Log.e(LOG_TAG, "DevQuestSpriteBase :: draw");
update();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
//gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer1);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public int[] getTextures() {
return textures;
}
}
Without going into a lot of detail, you need to do the following (assuming you are already drawing a sprite using 4 vertices):
Define the texture coordinates corresponding to the vertices of the sprite for each animation frame, e.g.
texCoordsFrame1 = [0.0f, 0.0f, 0.25f, 0.0f, 0.0f, 1.0f, 0.25f, 1.0f];
Upload the spritesheet texture, e.g.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
Draw using the texture coordinates corresponding to the frame you want to show when required, e.g.
...
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexCoordPointer(2, GL_FLOAT, 0, texCoordsFrame1);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
Alternatively, you can upload the separate frames as individual textures, but that is undesirable from a performance point of view.
There are a few gotcha's
When using GLES1, you can only use power-of-two textures. In that case you'll have to scale the texture or increase its size to be power-of-two and adjust the texture coordinates.
The bitmap vs GL y-coordinate direction difference is a bit confusing, and you might end up with a vertically flipped sprite.
So i took code from http://www.learnopengles.com/android-lesson-one-getting-started/ and have been trying to draw a square (while still keeping everything else in the background). For whatever reason, it shows up as a triangle. My code will be provided below. I knew to change GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 4), but I can't find out what else I'm missing.
Furthermore, is there a better way to find out how to use functions within the class GLES20? http://developer.android.com/reference/android/opengl/GLES20.html does a good job providing parameters, but doesn't explain what each parameter does.
I would imagine my error is at the bottom of my code, probably in the function drawSquare, but the entire file will be pasted here in case I'm mistaken.
public class LessonOneRenderer implements GLSurfaceView.Renderer
{/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** Store our model data in a float buffer. */
private final FloatBuffer mTriangle1Vertices;
private final FloatBuffer mTriangle2Vertices;
private final FloatBuffer mTriangle3Vertices;
private final FloatBuffer mSquare1Vertices;
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model color information. */
private int mColorHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** How many elements per vertex. */
private final int mStrideBytes = 7 * mBytesPerFloat;
/** Offset of the position data. */
private final int mPositionOffset = 0;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Offset of the color data. */
private final int mColorOffset = 3;
/** Size of the color data in elements. */
private final int mColorDataSize = 4;
/**
* Initialize the model data.
*/
public LessonOneRenderer()
{
// Define points for equilateral triangles.
// This triangle is red, green, and blue.
final float[] triangle1VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 0.559016994f, 0.0f,
0.0f, 1.0f, 0.0f, 1.0f};
// This triangle is yellow, cyan, and magenta.
final float[] triangle2VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 0.559016994f, 0.0f,
1.0f, 0.0f, 1.0f, 1.0f};
// This triangle is white, gray, and black.
final float[] triangle3VerticesData = {
// X, Y, Z,
// R, G, B, A
-0.5f, -0.25f, 0.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.5f, -0.25f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f,
0.0f, 0.559016994f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f};
final float[] square1VerticesData = {
//topleft
-0.25f, 0.25f, -.5f,
1.0f, 0.0f, 0.0f, 1.0f,
//top right
0.25f,0.25f, -.5f,
0.0f, 1.0f, 0.0f, 1.0f,
//bottom left
-0.25f, -0.25f, -.5f,
1.0f, 0.0f, 0.0f, 1.0f,
//right
0.25f, -0.25f, -.5f,
1.0f, 0.0f, 0.0f, 1.0f};
// Initialize the buffers.
mTriangle1Vertices = ByteBuffer.allocateDirect(triangle1VerticesData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangle2Vertices = ByteBuffer.allocateDirect(triangle2VerticesData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangle3Vertices = ByteBuffer.allocateDirect(triangle3VerticesData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mSquare1Vertices = ByteBuffer.allocateDirect(square1VerticesData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangle1Vertices.put(triangle1VerticesData).position(0);
mTriangle2Vertices.put(triangle2VerticesData).position(0);
mTriangle3Vertices.put(triangle3VerticesData).position(0);
mSquare1Vertices.put(square1VerticesData).position(0);
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
// Position the eye behind the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 1.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n" // A constant representing the combined model/view/projection matrix.
+ "attribute vec4 a_Position; \n" // Per-vertex position information we will pass in.
+ "attribute vec4 a_Color; \n" // Per-vertex color information we will pass in.
+ "varying vec4 v_Color; \n" // This will be passed into the fragment shader.
+ "void main() \n" // The entry point for our vertex shader.
+ "{ \n"
+ " v_Color = a_Color; \n" // Pass the color through to the fragment shader.
// It will be interpolated across the triangle.
+ " gl_Position = u_MVPMatrix \n" // gl_Position is a special variable used to store the final position.
+ " * a_Position; \n" // Multiply the vertex by the matrix to get the final point in
+ "} \n"; // normalized screen coordinates.
final String fragmentShader =
"precision mediump float; \n" // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
+ "varying vec4 v_Color; \n" // This is the color from the vertex shader interpolated across the
// triangle per fragment.
+ "void main() \n" // The entry point for our fragment shader.
+ "{ \n"
+ " gl_FragColor = v_Color; \n" // Pass the color directly through the pipeline.
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0)
{
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
// Draw the triangle facing straight on.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle1Vertices);
// Draw one translated a bit down and rotated to be flat on the ground.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -1.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, 90.0f, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle2Vertices);
// Draw one translated a bit to the right and rotated to be facing to the left.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 1.0f, 0.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, 90.0f, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(mTriangle3Vertices);
// Draw square facing strait on
float smallerAngle = -angleInDegrees;
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0, 0, 0.0f);
Matrix.rotateM(mModelMatrix, 0, smallerAngle, 0.0f, 0.0f, 1.0f);
drawSquare(mSquare1Vertices);
}
/**
* Draws a triangle from the given vertex data.
*
* #param aTriangleBuffer The buffer containing the vertex data.
*/
private void drawTriangle(final FloatBuffer aTriangleBuffer)
{
// Pass in the position information
aTriangleBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
aTriangleBuffer.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
private void drawSquare(final FloatBuffer aSquareBuffer) {
// Pass in the position information
aSquareBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aSquareBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
aSquareBuffer.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aSquareBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 4);
}
}
I'm assuming that you are very new to OpenGL ES 2.0 programming. So, before you start playing around with 3.D-transformation matrices (such as, for modeling, viewing, or projection) please try this basic example first (to render a rectangle) -
public class GLES20Renderer implements Renderer {
private int _rectangleProgram;
private int _rectangleAPositionLocation;
private FloatBuffer _rectangleVFB;
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClearColor(0.0f, 0.0f, 0.0f, 1);
initShapes();
int _rectangleVertexShader = loadShader(GLES20.GL_VERTEX_SHADER, _rectangleVertexShaderCode);
int _rectangleFragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, _rectangleFragmentShaderCode);
_rectangleProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(_rectangleProgram, _rectangleVertexShader);
GLES20.glAttachShader(_rectangleProgram, _rectangleFragmentShader);
GLES20.glLinkProgram(_rectangleProgram);
_rectangleAPositionLocation = GLES20.glGetAttribLocation(_rectangleProgram, "aPosition");
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
}
public void onDrawFrame(GL10 gl) {
gl.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(_rectangleProgram);
GLES20.glVertexAttribPointer(_rectangleAPositionLocation, 3, GLES20.GL_FLOAT, false, 12, _rectangleVFB);
GLES20.glEnableVertexAttribArray(_rectangleAPositionLocation);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);
}
private void initShapes() {
float rectangleVFA[] = {
0, 0, 0,
0, 0.5f, 0,
0.75f, 0.5f, 0,
0.75f, 0.5f, 0,
0.75f, 0, 0,
0, 0, 0,
};
ByteBuffer rectangleVBB = ByteBuffer.allocateDirect(rectangleVFA.length * 4);
rectangleVBB.order(ByteOrder.nativeOrder());
_rectangleVFB = rectangleVBB.asFloatBuffer();
_rectangleVFB.put(rectangleVFA);
_rectangleVFB.position(0);
}
private final String _rectangleVertexShaderCode =
"attribute vec4 aPosition; \n"
+ "void main() { \n"
+ " gl_Position = aPosition; \n"
+ "} \n";
private final String _rectangleFragmentShaderCode =
"void main() { \n"
+ " gl_FragColor = vec4(1,1,1,1); \n"
+ "} \n";
private int loadShader(int type, String source) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
return shader;
}
}
More of these at -
http://www.apress.com/9781430250531
Define your square as two triangles. I do it this way (I give you an example without color data, only vertices):
final float[] squareCoords = {
//first triangle
-1.0f, 1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
//second triangle
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f
};
And then, in drawSquare() use
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6);
It will draw two triangles with your square coords.
I am trying to make a projection matrix scaling the screen and making a coordination system. For some reason I don't think any of my matrix calling is working... the 3 function I am using are
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -1, 10);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
Are they canceling each other out? anything wrong with it? (the full rendering class code is at the end)
My main goal in doing this is eventually getting to a situation where when I make a square, I could provide coordinates such as (200, 100,0) //x, y, z which are not only between -1 and 1.
Here is my full rendering class:
public class MyRenderer implements Renderer {
private static final String TAG = "MyRenderer";
Square square;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private final float[] mRotationMatrix = new float[16];
private int camWidth,camHeight;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 0.5f);
camWidth=480;camHeight=320;
// initialize a square
square = new Square();
}
#Override
public void onDrawFrame(GL10 nope) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//set camera position
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
square.draw(mMVPMatrix);
}
#Override
public void onSurfaceChanged(GL10 nope, int width, int height) {
GLES20.glViewport(0, 0, camWidth, camHeight);
Matrix.orthoM(mProjMatrix, 0, 0, 1520, 0, 1000, -10, 999999);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 1.0f, 0.0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
My Square class (wasn't sure if needed or not, but just to be safe :) ) -
public class Square {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f
};
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // bytes per vertex
// red-green-blue-alpha
float color[] = { 0.63f, 0.76f, 0.22f, 1.0f };
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
private FloatBuffer vertexBuffer;
private ShortBuffer drawListBuffer;
public Square() {
ByteBuffer bb = ByteBuffer.allocateDirect(
// # of coords values * 4 bytes per float
triangleCoords.length * 4);
// use native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add coordination to FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read first coordinate
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = ChizRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = ChizRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// dispable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
And lastly, just so you could have some visuals:
This is how it looks on my phone with and without the three metrix functions mentioned before, it also seems that the only thing that did make any change with the width and height was GLES20.glViewport(0, 0, camWidth, camHeight);
It seems as if the metrix is doing nothing.
Looks like you adapted the sample to a square. Couple of issues here:
Call glViewPort only in onSurfaceChanged and only with the parameters given.
The vertex shader code does not use uMVPMatrix. You'd've seen this by checking the value of mMVPMatrixHandle (it's -1 for uniforms that don't exist, see here).
After the program has been linked, the locations of the shader variables are fixed, so the code may fetch them once, and not for every draw call.
Then, you'll need to adapt the coordinates of the square...
I'm having a problem where my application looks right on my emulator, but on my phone it only displays a fragment of my scene.
Images here (The emulator is the one on the right.
My renderer code is seen here. (This class is abstract but all the implementing class is doing is draw the polygons)
public abstract class AbstractRenderer implements Renderer {
float x = 0.5f;
float y = 1f;
float z = 3;
boolean displayCoordinateSystem = true;
public void onSurfaceCreated(GL10 gl, EGLConfig eglConfig) {
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glClearColor(.5f, .5f, .5f, 1);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
float ratio = (float) w / h;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 0, 10);
}
public void onDrawFrame(GL10 gl) {
gl.glDisable(GL10.GL_DITHER);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, x, y, z, 0f, 0, 0f, 0f, 1f, 0f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
if(displayCoordinateSystem) {
drawCoordinateSystem(gl);
}
draw(gl);
// gl.glFlush();
}
private void drawCoordinateSystem(GL10 gl) {
ByteBuffer vbb = ByteBuffer.allocateDirect(6*3*4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer vertices = vbb.asFloatBuffer();
ByteBuffer ibb = ByteBuffer.allocateDirect(6*2);
ibb.order(ByteOrder.nativeOrder());
ShortBuffer indexes = ibb.asShortBuffer();
final float coordLength = 27f;
//add point (-1, 0, 0)
vertices.put(-coordLength);
vertices.put(0);
vertices.put(0);
//add point (1, 0, 0)
vertices.put(coordLength);
vertices.put(0);
vertices.put(0);
//add point (0, -1, 0)
vertices.put(0);
vertices.put(-coordLength);
vertices.put(0);
//add point (0, 1, 0)
vertices.put(0);
vertices.put(coordLength);
vertices.put(0);
//add point (0, 0, -1)
vertices.put(0);
vertices.put(0);
vertices.put(-coordLength);
//add point (0, 0, 1)
vertices.put(0);
vertices.put(0);
vertices.put(coordLength);
for(int i = 0; i < 6; i++) {
indexes.put((short)i);
}
vertices.position(0);
indexes.position(0);
gl.glColor4f(1, 1, 0, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertices);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
indexes.position(2);
gl.glColor4f(0, 1, 0, 0.5f);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
indexes.position(4);
gl.glColor4f(0, 0, 1, 0.5f);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
}
protected abstract void draw(GL10 gl);
}
My guess is that i'm not setting some value that is set by default by the emulator implementation. Only thing is i have no clue as to what that thing might be.
Hoping to hear from you dudes and dudettes!
It's a depth buffer problem: From the "notes" section in the man page of glFrustum:
near must never be set to 0.
You should calculate the near value to be as far from the camera as possible, and the far to be as close as possible, while still encompassing the things you want to draw.