how to replace cube object with file.obj in GLsurfaceView from android? - android

i create object 3d in android using tutorial from learnopengles, and i create cube from the lesson six of that tutorial (texture filtering), after that i want replace the cube with my object (i create the strawberry object). i want my object can display in the view, so i parsing the my object (my object use extension file .obj) to my renderer class, but the object in view is displaying random triangle object.
this is my parsing code :
public ObjLoader(Context mActivityContext) {
FileReader fr;
String str;
ArrayList<Float> tempModelVertices = new ArrayList<Float>();
ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
ArrayList<Integer> facesM = new ArrayList<Integer>();
ArrayList<Integer> facesT = new ArrayList<Integer>();
ArrayList<Integer> facesN = new ArrayList<Integer>();
try {
fr = new FileReader(new File("model/straw_obj"));
BufferedReader br = new BufferedReader(fr);
while((str = br.readLine())!=null){
if(str.startsWith("f")){
String[] strAr = str.replaceAll("f", "").trim().split(" ");
for(String s : strAr){
String[] cornerAr = s.split("/");
facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
}
}
else if(str.startsWith("vt")){
String[] strAr = str.replaceAll("vt", "").trim().split(" ");
tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
}
else if(str.startsWith("vn")){
String[] strAr = str.replaceAll("vn", "").trim().split(" ");
tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
}
else if(str.startsWith("v")){
String[] strAr = str.replaceAll("v", "").trim().split(" ");
tempModelVertices.add(Float.valueOf(strAr[0].trim()));
tempModelVertices.add(Float.valueOf(strAr[1].trim()));
tempModelVertices.add(Float.valueOf(strAr[2].trim()));
}
}
//Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
} catch (IOException e) {
// TODO Auto-generated catch block
Log.v(TAG, "error");
}
Log.v(TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));
ModelPositionData = new float[facesM.size()];
ModelTextureCoordinateData = new float[facesT.size()];
ModelNormalData = new float[facesN.size()];
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
for(int i=0; i<facesT.size(); i++){
ModelTextureCoordinateData[i] = tempTextureVertices.get(facesT.get(i));
}
for(int i=0; i<facesN.size(); i++){
ModelNormalData[i] = tempNormalVertices.get(facesN.get(i));
}
}
and this is how i create the glsurface renderer
public class TesterRenderer implements GLSurfaceView.Renderer{
private static final String TAG = "TesterRenderer";
private final Context mActivityContext;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** Store the accumulated rotation. */
private final float[] mAccumulatedRotation = new float[16];
/** Store the current rotation. */
private final float[] mCurrentRotation = new float[16];
/** A temporary matrix. */
private float[] mTemporaryMatrix = new float[16];
/**
* Stores a copy of the model matrix specifically for the light position.
*/
private float[] mLightModelMatrix = new float[16];
/** Store our model data in a float buffer. */
private final FloatBuffer mModelPositions;
private final FloatBuffer mModelNormals;
private final FloatBuffer mModelTextureCoordinates;
// private final FloatBuffer mModelTextureCoordinatesForPlane;
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;
/** This will be used to pass in the light position. */
private int mLightPosHandle;
/** This will be used to pass in the texture. */
private int mTextureUniformHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model normal information. */
private int mNormalHandle;
/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Size of the normal data in elements. */
private final int mNormalDataSize = 3;
/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;
/** Used to hold a light centered on the origin in model space. We need a 4th coordinate so we can get translations to work when
* we multiply this by our transformation matrices. */
private final float[] mLightPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};
/** Used to hold the current position of the light in world space (after transformation via model matrix). */
private final float[] mLightPosInWorldSpace = new float[4];
/** Used to hold the transformed position of the light in eye space (after transformation via modelview matrix) */
private final float[] mLightPosInEyeSpace = new float[4];
/** This is a handle to our cube shading program. */
private int mProgramHandle;
/** This is a handle to our light point program. */
private int mPointProgramHandle;
/** These are handles to our texture data. */
private int mTextureDataHandle;
// private int mGrassDataHandle;
/** Temporary place to save the min and mag filter, in case the activity was restarted. */
private int mQueuedMinFilter;
private int mQueuedMagFilter;
// These still work without volatile, but refreshes are not guaranteed to happen.
public volatile float mDeltaX;
public volatile float mDeltaY;
public TesterRenderer(final Context activityContext)
{
mActivityContext = activityContext;
ObjLoader obj = new ObjLoader(mActivityContext);
mModelPositions = ByteBuffer.allocateDirect(obj.ModelPositionData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelPositions.put(obj.ModelPositionData).position(0);
mModelNormals = ByteBuffer.allocateDirect(obj.ModelNormalData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelNormals.put(obj.ModelNormalData).position(0);
mModelTextureCoordinates = ByteBuffer.allocateDirect(obj.ModelTextureCoordinateData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelTextureCoordinates.put(obj.ModelTextureCoordinateData).position(0);
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
// Set the background clear color to black.
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
// Enable texture mapping
// GLES20.glEnable(GLES20.GL_TEXTURE_2D);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = -0.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_vertex_shader_tex_and_light);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_fragment_shader_tex_and_light);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[] {"a_Position", "a_Normal", "a_TexCoordinate"});
// Define a simple shader program for our point.
final String pointVertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_vertex_shader);
final String pointFragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_fragment_shader);
final int pointVertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, pointVertexShader);
final int pointFragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, pointFragmentShader);
mPointProgramHandle = ShaderHelper.createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle,
new String[] {"a_Position"});
// Load the texture
mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.strawberry_texture);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
// mGrassDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.noisy_grass_public_domain);
// GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
if (mQueuedMinFilter != 0)
{
setMinFilter(mQueuedMinFilter);
}
if (mQueuedMagFilter != 0)
{
setMagFilter(mQueuedMagFilter);
}
// Initialize the accumulated rotation matrix
Matrix.setIdentityM(mAccumulatedRotation, 0);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 1000.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
long slowTime = SystemClock.uptimeMillis() % 100000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
float slowAngleInDegrees = (360.0f / 100000.0f) * ((int) slowTime);
// Set our per-vertex lighting program.
GLES20.glUseProgram(mProgramHandle);
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mLightPosHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_LightPos");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
// Calculate position of the light. Rotate and then push into the distance.
Matrix.setIdentityM(mLightModelMatrix, 0);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, -2.0f);
Matrix.rotateM(mLightModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, 3.5f);
Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);
// Draw a cube.
// Translate the cube into the screen.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);
// Set a matrix that contains the current rotation.
Matrix.setIdentityM(mCurrentRotation, 0);
Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f);
mDeltaX = 0.0f;
mDeltaY = 0.0f;
// Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result.
Matrix.multiplyMM(mTemporaryMatrix, 0, mCurrentRotation, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16);
// Rotate the cube taking the overall rotation into account.
Matrix.multiplyMM(mTemporaryMatrix, 0, mModelMatrix, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mModelMatrix, 0, 16);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mModelTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mModelTextureCoordinates);
drawModel();
// Draw a plane
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -2.0f, -5.0f);
Matrix.scaleM(mModelMatrix, 0, 25.0f, 1.0f, 25.0f);
Matrix.rotateM(mModelMatrix, 0, slowAngleInDegrees, 0.0f, 1.0f, 0.0f);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
//GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
drawModel();
GLES20.glUseProgram(mPointProgramHandle);
drawLight();
}
public void setMinFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
}
else
{
mQueuedMinFilter = filter;
}
}
public void setMagFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
}
else
{
mQueuedMagFilter = filter;
}
}
private void drawModel()
{
// Pass in the position information
GLES20.glEnableVertexAttribArray(mPositionHandle);
mModelPositions.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, mModelPositions);
// Pass in the normal information
GLES20.glEnableVertexAttribArray(mNormalHandle);
mModelNormals.position(0);
GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false,
0, mModelNormals);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in the light position in eye space.
GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
}
/**
* Draws a point representing the position of the light.
*/
private void drawLight()
{
final int pointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointProgramHandle, "u_MVPMatrix");
final int pointPositionHandle = GLES20.glGetAttribLocation(mPointProgramHandle, "a_Position");
// Pass in the position.
GLES20.glVertexAttrib3f(pointPositionHandle, mLightPosInModelSpace[0], mLightPosInModelSpace[1], mLightPosInModelSpace[2]);
// Since we are not using a buffer object, disable vertex arrays for this attribute.
GLES20.glDisableVertexAttribArray(pointPositionHandle);
// Pass in the transformation matrix.
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mLightModelMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the point.
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
}
can someone help me fix this ?

It looks like there is a problem with the way you reorder the coordinates based on the indices in the faces:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
Each position consists of 3 coordinates. This loop copies only one value per position, though. It should look something like this:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[3 * i ] = tempModelVertices.get(3 * facesM.get(i) );
ModelPositionData[3 * i + 1] = tempModelVertices.get(3 * facesM.get(i) + 1);
ModelPositionData[3 * i + 2] = tempModelVertices.get(3 * facesM.get(i) + 2);
}
You will also need to adjust the allocation accordingly:
ModelPositionData = new float[3 * facesM.size()];
and make the equivalent changes for the normals and texture coordinates.

Related

OpenGL: rendering a point cloud using VBO doesn't work

I'm using OpenGL in Android to visualize a large dataset point cloud in an array. At first, I wanted to show some point on the screen to check if the pipeline is correct. I used VBO to render it but it shows only one point in the middle of the screen. I have implemented the below code which I found in the internet here.
Here is my PointRenderer class
public class PointRenderer implements GLSurfaceView.Renderer {
private float[] mModelMatrix = new float[16];
private float[] mViewMatrix = new float[16];
private float[] mProjectionMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
float[] vertices = {
0.0f, 0.5f, 0.0f,
-100.5f, -100.5f, 0.0f,
100.5f, -100.5f, 0.0f
};
FloatBuffer vertexBuf;
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
float eyeX = 0.0f;
float eyeY = 0.0f;
float eyeZ = 0.0f;
float centerX = 0.0f;
float centerY = 0.0f;
float centerZ = -5.0f;
float upX = 0.0f;
float upY = 1.0f;
float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ " gl_PointSize = 10.0; \n"
+ "} \n";
final String fragmentShader =
"precision mediump float; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = vec4(1.0, \n"
+ " 1.0, 1.0, 1.0); \n"
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0) {
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0) {
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0) {
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0) {
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 100.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
//Push to the distance - note this will have no effect on a point size
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.multiplyMV(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMV(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
//Send the vertex
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertexBuf);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Draw the point
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, vertices.length/3);
}
}
I suspect the coordinates for two of your points are outside of the visible frustum. Try replacing the 100.5 and -100.5 with something smaller and see if that helps.
You have to Enable both vertex attribute and texture coordinate attribute
GLES20.glEnableVertexAttribArray(mVertexAttrib)
GLES20.glEnableVertexAttribArray(mTexCoordAttrib)

Android - OpenGL Image doesnt rotate (almost completed)

I was following a OpenGL tutorial (https://developer.android.com/training/graphics/opengl/touch.html#angle) and I did everything right I think. My problem is that the triangle doesnt rotate on the display. I tried to debug and mGLView.requestRender(); is getting the values but it doesnt update the image on the screen.
public class OpenGL extends Fragment {
private GLSurfaceView mGLView;
private final float TOUCH_SCALE_FACTOR = 180.0f / 320;
private float mPreviousX;
private float mPreviousY;
private OpenGLRenderer mRenderer;
public int height = 0;
public int width = 0;
public OpenGL() {
// Required empty public constructor
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mGLView = new GLSurfaceView(getActivity());
mGLView.setEGLContextClientVersion(2);
mRenderer = new OpenGLRenderer();
mGLView.setRenderer(mRenderer);
// Render the view only when there is a change in the drawing data
mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
DisplayMetrics displayMetrics = new DisplayMetrics();
((Activity) getContext()).getWindowManager()
.getDefaultDisplay()
.getMetrics(displayMetrics);
height = displayMetrics.heightPixels;
width = displayMetrics.widthPixels;
mGLView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - mPreviousX;
float dy = y - mPreviousY;
// reverse direction of rotation above the mid-line
if (y > height / 2) {
dx = dx * -1;
}
// reverse direction of rotation to left of the mid-line
if (x < width / 2) {
dy = dy * -1;
}
mRenderer.setAngle(
mRenderer.getAngle() +
((dx + dy) * TOUCH_SCALE_FACTOR));
mGLView.requestRender();
}
mPreviousX = x;
mPreviousY = y;
return true;
}
});
return mGLView;
}
#Override
public void onResume() {
super.onResume();
mGLView.onResume();
}
#Override
public void onPause() {
super.onPause();
mGLView.onPause();
}
}
class OpenGLRenderer implements GLSurfaceView.Renderer {
private Triangle mRectangle;
private OpenGL mOpenGL = new OpenGL();
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
float[] scratch = new float[16];
public volatile float mAngle;
public float getAngle() {
return mAngle;
}
public void setAngle(float angle) {
mAngle = angle;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Set the background frame color
//GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glViewport(0, 0, mOpenGL.width, mOpenGL.height);
float ratio = (float) mOpenGL.width / mOpenGL.height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
mRectangle = new Triangle();
}
public void onDrawFrame(GL10 gl) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mViewMatrix, 0);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
// Draw triangle
mRectangle.drawMatrix(scratch);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
class Triangle {
private final String vertexShaderCodeMatrix =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 0.5f, 0.5f, 1.0f };
private final int mProgram;
private short[] indices = {0,1,2,0,2,3};
private FloatBuffer vertexBuffer;
private ShortBuffer indexBuffer;
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = OpenGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = OpenGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void draw() {
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public void drawMatrix(float[] mvpMatrix) { // pass in the calculated transformation matrix
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
You loaded 'vertexShaderCode' instead of 'vertexShaderCodeMatrix' in the Triangle class.

How to rotate a camera view in OpenGL 2.0 ES

I want to make a 360-world using OpenGL 2.0 ES on Smart phone.
And I want to make camera view follow the Smart phone Sensor using a quaternion, like an youtube 360 video or an Oculus.
I get a Quarternion value using public void setquaternion(...) Function.
I change the Quarternion values to 4x4 rotate matrix using makeRotateMatrix() function.
How can I control the camera view using this matrixes....?(moving camera's sight)
I'm getting hard time because of this... please help me..... ;(
And My Rendering source code is..
(This source just shows the Blue Square box)
package com.lookie.tom.superlookie;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
public class MyGLRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer";
private Square mSquare;
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float mQw = 0;
private float mQx = 0;
private float mQy = 0;
private float mQz = 0;
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mSquare = new Square();
}
#Override
public void onDrawFrame(GL10 unused) {
float[] scratch = new float[16];
float[] temp = new float[16];
// Draw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Matrix.setLookAtM(mViewMatrix, 0,
0, 0, -0.2f,
0f, 0f, 0f,
0.0f, 1.0f, 0.0f
);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
mSquare.draw(mMVPMatrix);
// Draw square
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1, 0.2f, 7 );
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private float[] makeRotateMatrix(){
float [] tempMatrix = {
1.0f - 2.0f*mQy*mQy - 2.0f*mQz*mQz, 2.0f*mQx*mQy - 2.0f*mQz*mQw, 2.0f*mQx*mQz + 2.0f*mQy*mQw, 0.0f,
2.0f*mQx*mQy + 2.0f*mQz*mQw, 1.0f - 2.0f*mQx*mQx - 2.0f*mQz*mQz, 2.0f*mQy*mQz - 2.0f*mQx*mQw, 0.0f,
2.0f*mQx*mQz - 2.0f*mQy*mQw, 2.0f*mQy*mQz + 2.0f*mQx*mQw, 1.0f - 2.0f*mQx*mQx - 2.0f*mQy*mQy, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f
};
return tempMatrix;
}
public void setQuaternion(float x, float y, float z, float w){
mQx=x;
mQy=y;
mQz=z;
mQw=w;
}
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
}

Texture not rendering properly in openGL ES 2 on Android

I'm having an issue rendering textures in openGL ES 2 on Android. The image is being drawn but the texture ism't wrapping correctly by the look of it.
I have tried all the usual things to fix the issue but nothing has worked.
Here's how one of the images should look:
But here's how they look on the screen:
Ignore the black border that's part of the texture.
Here is my Texture class:
public class HFTexture {
private int width;
private int height;
private int textureId;
private HFGame game;
private String textureFile;
public HFTexture(HFGame game, String textureFile) {
this.game = game;
this.textureFile = textureFile;
//load();
}
public void load() {
int[] texIds = new int[1];
GLES20.glGenTextures(1, texIds, 0);
textureId = texIds[0];
InputStream in;
try {
in = game.getFileManager().getAsset(textureFile);
Bitmap bitmap = BitmapFactory.decodeStream(in);
width = bitmap.getWidth();
height = bitmap.getHeight();
bind();
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
bitmap.recycle();
} catch(IOException ex) {
}
}
public void bind() {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
}
public void activate(HFShader shader, int texture) {
GLES20.glActiveTexture(texture);
bind();
GLES20.glUniform1i(shader.getHandle("sampler0"), 0);
}
public void delete() {
bind();
int[] textureIds = {textureId};
GLES20.glDeleteTextures(1, textureIds, 0);
}
}
Here is my Vertices class:
public class Vertices {
private FloatBuffer vertexBuffer;
private FloatBuffer normalBuffer;
private FloatBuffer texCoordBuffer;
private ShortBuffer indexBuffer;
private final int VERTEX_COUNT;
private final int VERTEX_STRIDE;
private final int VERTEX_SIZE = 3;
private final int NORMAL_STRIDE;
private final int NORMAL_SIZE = 3;
private final int TEXTURE_COORD_STRIDE;
private final int TEXTURE_COORD_SIZE = 2;
private final int INDEX_COUNT;
public Vertices(float[] vertices, float[] normals, float[] texCoords, short[] indices) {
VERTEX_STRIDE = VERTEX_SIZE * 4;
NORMAL_STRIDE = NORMAL_SIZE * 4;
TEXTURE_COORD_STRIDE = TEXTURE_COORD_SIZE * 4;
VERTEX_COUNT = vertices.length;
INDEX_COUNT = indices.length;
ByteBuffer bb = ByteBuffer.allocateDirect(VERTEX_COUNT * VERTEX_STRIDE);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
bb = ByteBuffer.allocateDirect(normals.length * NORMAL_STRIDE);
bb.order(ByteOrder.nativeOrder());
normalBuffer = bb.asFloatBuffer();
normalBuffer.put(normals);
normalBuffer.position(0);
bb = ByteBuffer.allocateDirect(texCoords.length * TEXTURE_COORD_STRIDE);
bb.order(ByteOrder.nativeOrder());
texCoordBuffer = bb.asFloatBuffer();
texCoordBuffer.put(texCoords);
texCoordBuffer.position(0);
bb = ByteBuffer.allocateDirect(indices.length * 2);
bb.order(ByteOrder.nativeOrder());
indexBuffer = bb.asShortBuffer();
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void bind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(
positionHandle, VERTEX_SIZE,
GLES20.GL_FLOAT, false,
VERTEX_STRIDE, vertexBuffer);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glVertexAttribPointer(
normalHandle, NORMAL_SIZE,
GLES20.GL_FLOAT, false,
NORMAL_STRIDE, normalBuffer);
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer);
}
public void unbind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(texCoordHandle);
}
public void draw() {
if(indexBuffer != null) {
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDEX_COUNT, GLES20.GL_UNSIGNED_SHORT, indexBuffer);
} else {
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, VERTEX_COUNT);
}
}
}
And here is my Vertex data:
float[] verts = {
-(width / 2f), (height / 2f), 0f, // index 0
-(width / 2f), -(height / 2f), 0f, // index 1
(width / 2f), -(height / 2f), 0f, // index 2
(width / 2f), (height / 2f), 0f // index 3
};
float[] norms = {
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f
};
float[] texCoords = {
0f, 1f,
0f, 0f,
1f, 0f,
1f, 1f
};
short[] indices = {
0,1,2,2,3,0
};
I've tried adding the clamp to edge texture parameters as well but that didn't seem to help. Have I just put the vertex and texture coords in the wrong order or is there something I'm missing altogether?
You are setting your vertex buffer instead of your texture coord buffer for the texture co-ordinates:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer); // <-- here
should be:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, texCoordBuffer);

Triangle won't rotate

So I followed this tutorial from Google: http://developer.android.com/resources/tutorials/opengl/opengl-es20.html
The triangle is supposed to rotate in response to touch events, but nothing happens. It also didn't rotate in the earlier step when it was supposed to rotate on its own. Not sure what the problem is. I just copied their code.
The whole project is only two files:
HelloOpenGLES20Activity.java
package com.opengl.es20;
import android.app.Activity;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.MotionEvent;
public class HelloOpenGLES20Activity extends Activity {
private GLSurfaceView mGLView;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity
mGLView = new HelloOpenGLES20SurfaceView(this);
setContentView(mGLView);
}
#Override
protected void onPause() {
super.onPause();
// The following call pauses the rendering thread.
// If your OpenGL application is memory intensive,
// you should consider de-allocating objects that
// consume significant memory here.
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
// The following call resumes a paused rendering thread.
// If you de-allocated graphic objects for onPause()
// this is a good place to re-allocate them.
mGLView.onResume();
}
}
class HelloOpenGLES20SurfaceView extends GLSurfaceView {
private final float TOUCH_SCALE_FACTOR = 180.0f / 320;
private HelloOpenGLES20Renderer mRenderer;
private float mPreviousX;
private float mPreviousY;
public HelloOpenGLES20SurfaceView(Context context){
super(context);
// Create an OpenGL ES 2.0 context.
setEGLContextClientVersion(2);
// set the mRenderer member
mRenderer = new HelloOpenGLES20Renderer();
setRenderer(mRenderer);
// Render the view only when there is a change
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public boolean onTouchEvent(MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
float dx = x - mPreviousX;
float dy = y - mPreviousY;
// reverse direction of rotation above the mid-line
if (y > getHeight() / 2) {
dx = dx * -1 ;
}
// reverse direction of rotation to left of the mid-line
if (x < getWidth() / 2) {
dy = dy * -1 ;
}
mRenderer.mAngle += (dx + dy) * TOUCH_SCALE_FACTOR;
requestRender();
}
mPreviousX = x;
mPreviousY = y;
return true;
}
}
HelloRendererOpenGLES20Renderer.java
package com.opengl.es20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.SystemClock;
public class HelloOpenGLES20Renderer implements GLSurfaceView.Renderer {
private FloatBuffer triangleVB;
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
public float mAngle;
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
initShapes();
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the triangle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Create a rotation for the triangle
// long time = SystemClock.uptimeMillis() % 4000L;
// float angle = 0.090f * ((int) time);
Matrix.setRotateM(mMMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width/height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private void initShapes(){
float triangleCoords[] = {
// X, Y, Z
-0.5f, -0.25f, 0,
0.5f, -0.25f, 0,
0.0f, 0.559016994f, 0
};
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
triangleVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
triangleVB.put(triangleCoords); // add the coordinates to the FloatBuffer
triangleVB.position(0); // set the buffer to read the first coordinate
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Matrix.setRotateM(mMMatrix, 0, mAngle, 0, 0, 1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
I believe this final multiplyMM is your problem. You are resetting your mMVPMatrix to be
mMVPMatrix = mProjMatrix * mVMatrix, and you are losing your model transformation matrix mMMatrix which would apply your rotation. Also, verify that mAngle is non-zero.

Categories

Resources