Finding the right kind of Android sensors and using them - android

I've tried to search both on google and here the solution to my problem and I don't think it has been asked before (or may I be using the wrong words in my search? ^^')
Anyway, this is what I want to have: a OpenGL surface view (showing a cube for instance) that can rotate according to the orientation of the tablet. So far, nothing hard I guess and I have the code below that works perfectly well
public class RotationVectorDemo extends Activity {
private GLSurfaceView mGLSurfaceView;
private SensorManager mSensorManager;
private MyRenderer mRenderer;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Get an instance of the SensorManager
mSensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
// Create our Preview view and set it as the content of our
// Activity
mRenderer = new MyRenderer();
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setRenderer(mRenderer);
setContentView(mGLSurfaceView);
}
#Override
protected void onResume() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onResume();
mRenderer.start();
mGLSurfaceView.onResume();
}
#Override
protected void onPause() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onPause();
mRenderer.stop();
mGLSurfaceView.onPause();
}
class MyRenderer implements GLSurfaceView.Renderer, SensorEventListener {
private Cube mCube;
private Sensor mRotationVectorSensor;
private final float[] mRotationMatrix = new float[16];
public MyRenderer() {
// find the rotation-vector sensor
mRotationVectorSensor = mSensorManager.getDefaultSensor(
Sensor.TYPE_ROTATION_VECTOR);
mCube = new Cube();
// initialize the rotation matrix to identity
mRotationMatrix[ 0] = 1;
mRotationMatrix[ 4] = 1;
mRotationMatrix[ 8] = 1;
mRotationMatrix[12] = 1;
}
public void start() {
// enable our sensor when the activity is resumed, ask for
// 10 ms updates.
mSensorManager.registerListener(this, mRotationVectorSensor, 10000);
}
public void stop() {
// make sure to turn our sensor off when the activity is paused
mSensorManager.unregisterListener(this);
}
public void onSensorChanged(SensorEvent event) {
// we received a sensor event. it is a good practice to check
// that we received the proper event
if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
// convert the rotation-vector to a 4x4 matrix. the matrix
// is interpreted by Open GL as the inverse of the
// rotation-vector, which is what we want.
SensorManager.getRotationMatrixFromVector(
mRotationMatrix , event.values);
}
}
public void onDrawFrame(GL10 gl) {
// clear screen
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
// set-up modelview matrix
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0, 0, -3.0f);
gl.glMultMatrixf(mRotationMatrix, 0);
// draw our object
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
mCube.draw(gl);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
// set view-port
gl.glViewport(0, 0, width, height);
// set projection matrix
float ratio = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// dither is enabled by default, we don't need it
gl.glDisable(GL10.GL_DITHER);
// clear screen in white
gl.glClearColor(1,1,1,1);
}
class Cube {
// initialize our cube
private FloatBuffer mVertexBuffer;
private FloatBuffer mColorBuffer;
private ByteBuffer mIndexBuffer;
public Cube() {
final float vertices[] = {
-1, -1, -1, 1, -1, -1,
1, 1, -1, -1, 1, -1,
-1, -1, 1, 1, -1, 1,
1, 1, 1, -1, 1, 1,
};
final float colors[] = {
0, 0, 0, 1, 1, 0, 0, 1,
1, 1, 0, 1, 0, 1, 0, 1,
0, 0, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 0, 1, 1, 1,
};
final byte indices[] = {
0, 4, 5, 0, 5, 1,
1, 5, 6, 1, 6, 2,
2, 6, 7, 2, 7, 3,
3, 7, 4, 3, 4, 0,
4, 7, 6, 4, 6, 5,
3, 0, 1, 3, 1, 2
};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length*4);
vbb.order(ByteOrder.nativeOrder());
mVertexBuffer = vbb.asFloatBuffer();
mVertexBuffer.put(vertices);
mVertexBuffer.position(0);
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length*4);
cbb.order(ByteOrder.nativeOrder());
mColorBuffer = cbb.asFloatBuffer();
mColorBuffer.put(colors);
mColorBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
mIndexBuffer.put(indices);
mIndexBuffer.position(0);
}
public void draw(GL10 gl) {
gl.glEnable(GL10.GL_CULL_FACE);
gl.glFrontFace(GL10.GL_CW);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, mColorBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES, 36, GL10.GL_UNSIGNED_BYTE, mIndexBuffer);
}
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
}
However, when I am locking the screen, moving around and unlocking it afterwards the cube has moved too. Which is logical and I understand perfectly well why. Yet, I would like to know if it's possible to avoid that, like kinda resetting the sensors or something like that, and how I can do it.
I'm not sure I'm using the good kind of sensor at all of if I should change it, or if it's something that can be solved in the code or so. Bear with me as I'm just beginning to work with android sensors.
Basically, this problem is linked to an other that I have on a bigger application but I figured out it would be simpler to use this example to try and solve that. However, if you want to know what my problem is in my bigger application it's essentially the same except that to move the cube, people can either use their fingers (finger_mode) or the sensors (phone_mode). What I want is somehow to be able to rotate the cube with the fingers without paying attention to the sensors and when I go into sensor_mode that they do not change the orientation of the cube just because they are activated. I'm not sure it's clear, if it's not, lemme know.
I'm guessing, since i use touch to modify the rotation matrix that is used by OpenGL there might be some operations that can be done on rotation matrices to solve my problem. Or maybe it's just a sensor problem. Maybe both actually I have no clue so far but these are the different solutions I have been trying to use.
Thanks in advance for helping me figuring this out.
Best,

So the rotation vector is definitely the good kind of sensor to use. Both the accelerometer and the gyroscope won't be of any help for what I want to do.
However I still have to figure out what to do with the rotation matrices that I have now.

Related

OpenGL ES 2 Projection Switching Foregroung and Background

I have an array containing the the height of the vertices of a terrain map.
When I first draw the terrain it looks fine:
But as I rotate it across the z-axis, parts of the shape seem to be projected behind vertices on the back:
90 degree rotation (z-axis):
~180 degree rotation (z-axis):
Besides my implementation of the map, my code is fairly simple:
Vertex Shader:
attribute vec4 position;
attribute vec4 color;
uniform mat4 matrix;
varying vec4 interpolated_color;
void main() {
gl_Position = matrix * position;
interpolated_color = color;
}
Fragment_shader:
precision mediump float;
varying vec4 interpolated_color;
void main(){
gl_FragColor = interpolated_color;
}
Renderer:
public class MapRenderer implements GLSurfaceView.Renderer {
...
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(1.0f, 0.0f,0.0f, 1.0f);
map = mapGen.composeMap(); //gets array with vertices heights
mapView = new MapView(context, map, mapGen);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspect_ratio = (float) width/height;
Matrix.perspectiveM(projectionMatrix, 0, 45, aspect_ratio, 1f, 10f);
}
#Override
public void onDrawFrame(GL10 gl) {
float[] scratch = new float[16];
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Matrix.setIdentityM(modelMatrix, 0);
Matrix.translateM(modelMatrix, 0, 0, 0, -4);
Matrix.rotateM(modelMatrix, 0, -cameraAngle, 1, 0, 0); //cameraAngle initialized at 0 changes with user input
Matrix.rotateM(modelMatrix, 0, mapAngle, 0, 0, 1); //mapAngle initialized at 0 changes with user input
Matrix.multiplyMM(scratch, 0, projectionMatrix, 0, modelMatrix, 0);
mapView.draw(scratch);
}
}
MapView Class:
public void draw(float[] mvpMatrix){
int matrix = GLES20.glGetUniformLocation(program, "matrix");
GLES20.glUniformMatrix4fv(matrix, 1, false, mvpMatrix, 0);
//nFaces and facesBuffer are class variables
GLES20.glDrawElements(GLES20.GL_TRIANGLES, nFaces*3, GLES20.GL_UNSIGNED_SHORT, facesBuffer);
}
I tried to turn on and off face culling to see if any differences occurred but none did.
Changing the projection matrix also did not seem to have any effects besides changing the angle the error starts to occur. It seems to happen at ~90 degrees and up to ~270 degrees when using Matrix.perspectiveM and exactly at 90 and 270 when using Matrix.orthoM.
I also checked if OpenGL returned any errors by the glGetErrors() method and did not get anything.
My vertices are sorted in the buffer sequentially from the one located at (-1, 1, 0) to the last one located at (1, -1, 0). I don't know if that could cause this issue or, even if that was the case, how I could solve this in OpenGL ES 2 to support rotation accross the z-axis.
Depth Test needs to be enabled in order for OpenGL take distance into consideration.
On the Renderer:
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(1.0f, 0.0f,0.0f, 1.0f);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
map = mapGen.composeMap(); //gets array with vertices heights
mapView = new MapView(context, map, mapGen);
}

Frames appearing out of order on GLSurfaceView

I'm writing an Android app using OpenGL ES and encountered this problem in the Nexus 5 emulator that comes with Android Studio. I have reduced my code to this small app, which simply draws a box going back and forth:
package net.jesbus.stuttertest;
import android.app.Activity;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class MainActivity extends Activity
{
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
// Create GLSurfaceView
GLSurfaceView glsv = new GLSurfaceView(this);
glsv.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
// Create GLSurfaceView.Renderer
glsv.setRenderer(new GLSurfaceView.Renderer()
{
float step = 0;
boolean direction = false;
ShortBuffer iBuff;
FloatBuffer vBuff;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
// Generate vertices index buffer
short[] pIndex = {0, 1, 2, 3};
ByteBuffer pbBuff = ByteBuffer.allocateDirect(pIndex.length * 2);
pbBuff.order(ByteOrder.nativeOrder());
iBuff = pbBuff.asShortBuffer();
iBuff.put(pIndex);
iBuff.position(0);
// Generate vertices buffer
float[] vs = new float[]
{
-1, +1, 0,
+1, +1, 0,
-1, -1, 0,
+1, -1, 0,
};
ByteBuffer bBuff = ByteBuffer.allocateDirect(vs.length * 4);
bBuff.order(ByteOrder.nativeOrder());
vBuff = bBuff.asFloatBuffer();
vBuff.put(vs);
vBuff.position(0);
}
#Override
public void onDrawFrame(final GL10 gl)
{
// Animation calculation
step += direction ? 0.02f : -0.02f;
if (step > 1) direction = false;
else if (step < 0) direction = true;
// Set background color
gl.glClearColor(0.7f, 0.7f, 1, 1);
// Clear screen
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// Set matrix to correct location
gl.glLoadIdentity();
gl.glTranslatef(-1 + step * 2, 0, 0);
gl.glScalef(0.25f, 0.4f, 1);
// Draw box
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glFrontFace(GL10.GL_CW);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vBuff);
gl.glDrawElements(GL10.GL_TRIANGLE_STRIP, 4, GL10.GL_UNSIGNED_SHORT, iBuff);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height)
{
}
});
setContentView(glsv);
}
}
I looked at it frame by frame, and it seems that instead of showing the next frame, it shows the previous frame, and then skips the frame it was supposed to show and continues:
The circles represent frames produced in onDrawFrame, and the arrows represent the flow of time.
Video showing the problem
I don't exactly know how threading is used OpenGL, but try this:
// Animation calculation
synchronized (this) {
step += direction ? 0.02f : -0.02f;
if (step > 1) direction = false;
else if (step < 0) direction = true;
}
or make the whole onDrawFrame() method synchronized if the compiler consents and OpenGL doesn't lock up...

OPENGL ES 2.0. Android. Strange behaviour of depth buffer

My Android application show the unexpected behaviour for PowerVR SGX 544MP.
In case render setting to "RENDERMODE_WHEN_DIRTY" it seems that the
depth buffer don't work, however, if the mode set
"RENDERMODE_CONTINUOUSLY" the drawing come to right:
Wrong result:
Proper result:
The emulator draw right in both case.
The default buffer of device is 24 bit, setting the buffer to same
range as emulator (16bit) unchanged drawing. I tried the varying witn
values of Near and Far of projection matrix but it was unsuccessful.
Only one of the my matrices have modification of near plane.The
martix may make bad data in the depth buffer. I turn off the
writing in the depth buffer before drawing using this matrix. In that
case, I sets "GLES20.glDepthMask( false )" before to call
"glDrawElements".
Initialisation OPENGL ES and work with VBOs are new for me, so perhaps my misunderstanding of trouble is more deep than that seems to me.
I sends to uniform different matrix values and draw with same VBOs.
I do "Enabling" for attributes globally only one time and I don't use Disable for them later.
//MyGLSurfaceView
public MyGLSurfaceView(Context context) {
super(context);
setEGLContextClientVersion(2);
// super.setEGLConfigChooser(8,8,8,8,16,0); // same result
mRenderer = new MyGLRenderer(context);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
}
//MyGLRenderer
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.2f, 0.3f, 1.0f);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendEquation(GLES20.GL_FUNC_ADD);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthRangef(0.f, 1.f);
GLES20.glClearDepthf(1.f);
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glFrontFace(GLES20.GL_CCW);
GLES20.glDepthFunc(GLES20.GL_LEQUAL);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
perspectiveFieldOfViewRH(mProjectionMatrix, 0, 28.4f, ratio, 0.4f, 28.f);
}
#Override
public void onDrawFrame(GL10 unused) {
GLES20.glDepthMask( true );
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
int i,j;
// turn off the writing. Only read
GLES20.glDepthMask( false );
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ZERO);
GLES20.glUseProgram(prg_shaderCube);
// draw with modified projection matrix:
for (i = 0; i < 4; i++){
for (j = 0; j < 6; j++){
System.arraycopy(arrFacesMatrices[i][j], 0, mModelMatrix, 0, 16);
mModelMatrix[14] = translations[i];
Matrix.multiplyMM(mMirrorFlankWithClippingMVP, 0, mMirrorFlankViewProjectionWithClippingMatrix, 0, mModelMatrix, 0);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorFlankWithClippingMVP, 0);
GLES20.glUniformMatrix4fv(u_modelmatrixCube, 1, false, mModelMatrix, 0);
GLES20.glCullFace(GLES20.GL_BACK);
switch(pattern[i][j]){
case 0:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[0]);
GLES20.glVertexAttribPointer(attr_position_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 0);
GLES20.glVertexAttribPointer(attr_color_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 12);
GLES20.glVertexAttribPointer(attr_normal_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 24);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, ibo[0]);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
break;
case 1:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[1]);
....
break;
....
....
}
}
}
// others
GLES20.glDepthMask( true );
for (i = 3; i >= 0; i--){
for (j = 0; j < 6; j++){
System.arraycopy(arrFacesMatrices[i][j], 0, mModelMatrix, 0, 16);
mModelMatrix[14] = translations[i];
Matrix.multiplyMM(mMirrorFlankMVP, 0, mMirrorFlankViewProjectionMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMirrorDownMVP, 0, mMirrorDownViewProjectionMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVP, 0, mViewMatrix, 0, mModelMatrix, 0);
GLES20.glUniformMatrix4fv(u_modelmatrixCube, 1, false, mModelMatrix, 0);
switch(pattern[i][j]){
case 0:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[0]);
GLES20.glVertexAttribPointer(attr_position_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 0);
GLES20.glVertexAttribPointer(attr_color_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 12);
GLES20.glVertexAttribPointer(attr_normal_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 24);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, ibo[0]);
GLES20.glCullFace(GLES20.GL_FRONT);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorFlankMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorDownMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glCullFace(GLES20.GL_BACK);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
break;
case 1:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[1]);
....
break;
....
....
}
}
}
}
I would prefer to work on a mode RENDERMODE_WHEN_DIRTY,and I would like to understand what is happening with my depth buffer?
The following is not as conclusive as I normally like answers to be. Particularly, I have no explanation why this would behave differently between RENDERMODE_WHEN_DIRTY and RENDERMODE_CONTINUOUSLY. But there is one point in your question that is worth explaining anyway.
Only one of the my matrices have modification of near plane. The matrix may make bad data in the depth buffer.
You'll have to be very careful here. The range between near and far plane gets mapped to the range of the depth buffer. So if you use a standard projection matrix, and change the near plane, this mapping will change.
In other words, say you use a vertex at a given z-value (in eye coordinates) for your rendering while your projection matrix was set up with a near value of near1. Now you set the projection matrix with near value near2, and use a vertex with the same z-value. This vertex will now be mapped to a different depth buffer value. So depending on your projection, the same vertex will be mapped to different depth buffer values. Or a vertex that is farther away from the camera can end up with a smaller (closer) depth buffer value because you changed your projection matrix.
You could try to compensate for this by setting the depth range accordingly. But even that seems tricky if you use a perspective projection, because the mapping of eye space depth is to depth buffer values is not linear.
If you need to clip away close parts of some of your geometry, you're probably better off keeping the projection matrix unchanged, and clipping explicitly. OpenGL ES does not support arbitrary clip planes, so the easiest approach is to pass the distance to the fragment shader, and discard the clipped fragments there. Or if it's anyway possible, have logic in your app code to avoid rendering the geometry that would be clipped.
Adding of calling the glSurfaceView.requestRender() improve a performance. My focus on depth buffer dragged away from realy cause of problem.

GL Wallpaper example only shows green screen in Emulator, but it's working in device

Do there is any special emulator settings needed to run OpenGL Apps?
I already set "GPU emulation" property to "yes".
I am trying to run an Android sample live wallpaper, using the sample source found from this link, The desired output is a rotating triangle.
After a little effort I got the app running but it doesn't draw anything in emulator but when I tested in device it works, But in the emulator it still just shows a green screen, I found a discussion on it in Google groups here. I tried to set view port as said in it. But still it doesn't show any result, on surface changed I had added this line
gl.glViewport(0, 0, width, height);
Do this is the correct way to set view port?
This is my render class,
public class MyRenderer implements GLWallpaperService.Renderer {
GLTriangle mTriangle;
public void onDrawFrame(GL10 gl) {
gl.glClearColor(0.2f, 0.4f, 0.2f, 1f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
autoRotate(gl);
gl.glColor4f(.2f, 0f, .5f, 1f);
mTriangle.draw(gl);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 60f, (float)width/(float)height, 1f, 100f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0, 0, -5);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
mTriangle = new GLTriangle();
gl.glClearDepthf(1f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
}
/**
* Called when the engine is destroyed. Do any necessary clean up because
* at this point your renderer instance is now done for.
*/
public void release() {
}
private void autoRotate(GL10 gl) {
gl.glRotatef(1, 0, 1, 0);
gl.glRotatef(0.5f, 1, 0, 0);
}
}
Herse is GLTriangle class
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.opengles.GL10;
public class GLTriangle {
private FloatBuffer _vertexBuffer;
private final int _nrOfVertices = 3;
private ShortBuffer _indexBuffer;
public GLTriangle() {
init();
}
private void init() {
// We use ByteBuffer.allocateDirect() to get memory outside of
// the normal, garbage collected heap. I think this is done
// because the buffer is subject to native I/O.
// See http://download.oracle.com/javase/1.4.2/docs/api/java/nio/ByteBuffer.html#direct
// 3 is the number of coordinates to each vertex.
_vertexBuffer = BufferFactory.createFloatBuffer(_nrOfVertices * 3);
_indexBuffer = BufferFactory.createShortBuffer(_nrOfVertices);
// Coordinates for the vertexes of the triangle.
float[] coords = {
-1f, -1f, 0f, // (x1, y1, z1)
1f, -1f, 0f, // (x2, y2, z2)
0f, 1f, 0f // (x3, y3, z3)
};
short[] _indicesArray = {0, 1, 2};
_vertexBuffer.put(coords);
_indexBuffer.put(_indicesArray);
_vertexBuffer.position(0);
_indexBuffer.position(0);
}
public void draw(GL10 gl) {
// 3 coordinates in each vertex
// 0 is the space between each vertex. They are densely packed
// in the array, so the value is 0
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, getVertexBuffer());
// Draw the primitives, in this case, triangles.
gl.glDrawElements(GL10.GL_TRIANGLES, _nrOfVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
}
private FloatBuffer getVertexBuffer() {
return _vertexBuffer;
}
}
What's going wrong here? Is there a better sample code for Open GL live wallpaper?
AT LAST I FOUND IT..
What I need to do is just add
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
to onSurfaceCreated method along with the code line
gl.glViewport(0, 0, width, height);
in the onSurfaceChanged method in MyRenderer Class
I found a similar question in stack itself [ But Solution worked for me is not marked as correct :( ]

Android Emulator vs phone opengl inconsistensies

I'm having a problem where my application looks right on my emulator, but on my phone it only displays a fragment of my scene.
Images here (The emulator is the one on the right.
My renderer code is seen here. (This class is abstract but all the implementing class is doing is draw the polygons)
public abstract class AbstractRenderer implements Renderer {
float x = 0.5f;
float y = 1f;
float z = 3;
boolean displayCoordinateSystem = true;
public void onSurfaceCreated(GL10 gl, EGLConfig eglConfig) {
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glClearColor(.5f, .5f, .5f, 1);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
float ratio = (float) w / h;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 0, 10);
}
public void onDrawFrame(GL10 gl) {
gl.glDisable(GL10.GL_DITHER);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, x, y, z, 0f, 0, 0f, 0f, 1f, 0f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
if(displayCoordinateSystem) {
drawCoordinateSystem(gl);
}
draw(gl);
// gl.glFlush();
}
private void drawCoordinateSystem(GL10 gl) {
ByteBuffer vbb = ByteBuffer.allocateDirect(6*3*4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer vertices = vbb.asFloatBuffer();
ByteBuffer ibb = ByteBuffer.allocateDirect(6*2);
ibb.order(ByteOrder.nativeOrder());
ShortBuffer indexes = ibb.asShortBuffer();
final float coordLength = 27f;
//add point (-1, 0, 0)
vertices.put(-coordLength);
vertices.put(0);
vertices.put(0);
//add point (1, 0, 0)
vertices.put(coordLength);
vertices.put(0);
vertices.put(0);
//add point (0, -1, 0)
vertices.put(0);
vertices.put(-coordLength);
vertices.put(0);
//add point (0, 1, 0)
vertices.put(0);
vertices.put(coordLength);
vertices.put(0);
//add point (0, 0, -1)
vertices.put(0);
vertices.put(0);
vertices.put(-coordLength);
//add point (0, 0, 1)
vertices.put(0);
vertices.put(0);
vertices.put(coordLength);
for(int i = 0; i < 6; i++) {
indexes.put((short)i);
}
vertices.position(0);
indexes.position(0);
gl.glColor4f(1, 1, 0, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertices);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
indexes.position(2);
gl.glColor4f(0, 1, 0, 0.5f);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
indexes.position(4);
gl.glColor4f(0, 0, 1, 0.5f);
gl.glDrawElements(GL10.GL_LINES, 2, GL10.GL_UNSIGNED_SHORT, indexes);
}
protected abstract void draw(GL10 gl);
}
My guess is that i'm not setting some value that is set by default by the emulator implementation. Only thing is i have no clue as to what that thing might be.
Hoping to hear from you dudes and dudettes!
It's a depth buffer problem: From the "notes" section in the man page of glFrustum:
near must never be set to 0.
You should calculate the near value to be as far from the camera as possible, and the far to be as close as possible, while still encompassing the things you want to draw.

Categories

Resources