OPENGL ES 2.0. Android. Strange behaviour of depth buffer - android

My Android application show the unexpected behaviour for PowerVR SGX 544MP.
In case render setting to "RENDERMODE_WHEN_DIRTY" it seems that the
depth buffer don't work, however, if the mode set
"RENDERMODE_CONTINUOUSLY" the drawing come to right:
Wrong result:
Proper result:
The emulator draw right in both case.
The default buffer of device is 24 bit, setting the buffer to same
range as emulator (16bit) unchanged drawing. I tried the varying witn
values of Near and Far of projection matrix but it was unsuccessful.
Only one of the my matrices have modification of near plane.The
martix may make bad data in the depth buffer. I turn off the
writing in the depth buffer before drawing using this matrix. In that
case, I sets "GLES20.glDepthMask( false )" before to call
"glDrawElements".
Initialisation OPENGL ES and work with VBOs are new for me, so perhaps my misunderstanding of trouble is more deep than that seems to me.
I sends to uniform different matrix values and draw with same VBOs.
I do "Enabling" for attributes globally only one time and I don't use Disable for them later.
//MyGLSurfaceView
public MyGLSurfaceView(Context context) {
super(context);
setEGLContextClientVersion(2);
// super.setEGLConfigChooser(8,8,8,8,16,0); // same result
mRenderer = new MyGLRenderer(context);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
}
//MyGLRenderer
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.2f, 0.3f, 1.0f);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendEquation(GLES20.GL_FUNC_ADD);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthRangef(0.f, 1.f);
GLES20.glClearDepthf(1.f);
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glFrontFace(GLES20.GL_CCW);
GLES20.glDepthFunc(GLES20.GL_LEQUAL);
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
perspectiveFieldOfViewRH(mProjectionMatrix, 0, 28.4f, ratio, 0.4f, 28.f);
}
#Override
public void onDrawFrame(GL10 unused) {
GLES20.glDepthMask( true );
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
int i,j;
// turn off the writing. Only read
GLES20.glDepthMask( false );
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ZERO);
GLES20.glUseProgram(prg_shaderCube);
// draw with modified projection matrix:
for (i = 0; i < 4; i++){
for (j = 0; j < 6; j++){
System.arraycopy(arrFacesMatrices[i][j], 0, mModelMatrix, 0, 16);
mModelMatrix[14] = translations[i];
Matrix.multiplyMM(mMirrorFlankWithClippingMVP, 0, mMirrorFlankViewProjectionWithClippingMatrix, 0, mModelMatrix, 0);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorFlankWithClippingMVP, 0);
GLES20.glUniformMatrix4fv(u_modelmatrixCube, 1, false, mModelMatrix, 0);
GLES20.glCullFace(GLES20.GL_BACK);
switch(pattern[i][j]){
case 0:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[0]);
GLES20.glVertexAttribPointer(attr_position_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 0);
GLES20.glVertexAttribPointer(attr_color_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 12);
GLES20.glVertexAttribPointer(attr_normal_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 24);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, ibo[0]);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
break;
case 1:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[1]);
....
break;
....
....
}
}
}
// others
GLES20.glDepthMask( true );
for (i = 3; i >= 0; i--){
for (j = 0; j < 6; j++){
System.arraycopy(arrFacesMatrices[i][j], 0, mModelMatrix, 0, 16);
mModelMatrix[14] = translations[i];
Matrix.multiplyMM(mMirrorFlankMVP, 0, mMirrorFlankViewProjectionMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMirrorDownMVP, 0, mMirrorDownViewProjectionMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVP, 0, mViewMatrix, 0, mModelMatrix, 0);
GLES20.glUniformMatrix4fv(u_modelmatrixCube, 1, false, mModelMatrix, 0);
switch(pattern[i][j]){
case 0:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[0]);
GLES20.glVertexAttribPointer(attr_position_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 0);
GLES20.glVertexAttribPointer(attr_color_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 12);
GLES20.glVertexAttribPointer(attr_normal_cube, 3, GLES20.GL_FLOAT, false, STRIDE_IN_FLAT, 24);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, ibo[0]);
GLES20.glCullFace(GLES20.GL_FRONT);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorFlankMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMirrorDownMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glCullFace(GLES20.GL_BACK);
GLES20.glUniformMatrix4fv(u_changematrixCube, 1, false, mMVP, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, capacityFlat1, GLES20.GL_UNSIGNED_SHORT, 0);
break;
case 1:
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo[1]);
....
break;
....
....
}
}
}
}
I would prefer to work on a mode RENDERMODE_WHEN_DIRTY,and I would like to understand what is happening with my depth buffer?

The following is not as conclusive as I normally like answers to be. Particularly, I have no explanation why this would behave differently between RENDERMODE_WHEN_DIRTY and RENDERMODE_CONTINUOUSLY. But there is one point in your question that is worth explaining anyway.
Only one of the my matrices have modification of near plane. The matrix may make bad data in the depth buffer.
You'll have to be very careful here. The range between near and far plane gets mapped to the range of the depth buffer. So if you use a standard projection matrix, and change the near plane, this mapping will change.
In other words, say you use a vertex at a given z-value (in eye coordinates) for your rendering while your projection matrix was set up with a near value of near1. Now you set the projection matrix with near value near2, and use a vertex with the same z-value. This vertex will now be mapped to a different depth buffer value. So depending on your projection, the same vertex will be mapped to different depth buffer values. Or a vertex that is farther away from the camera can end up with a smaller (closer) depth buffer value because you changed your projection matrix.
You could try to compensate for this by setting the depth range accordingly. But even that seems tricky if you use a perspective projection, because the mapping of eye space depth is to depth buffer values is not linear.
If you need to clip away close parts of some of your geometry, you're probably better off keeping the projection matrix unchanged, and clipping explicitly. OpenGL ES does not support arbitrary clip planes, so the easiest approach is to pass the distance to the fragment shader, and discard the clipped fragments there. Or if it's anyway possible, have logic in your app code to avoid rendering the geometry that would be clipped.

Adding of calling the glSurfaceView.requestRender() improve a performance. My focus on depth buffer dragged away from realy cause of problem.

Related

OpenGL - rotate own vision?

I'm trying to work on some OpenGL stuff. What I've got up to now is a viewport, in which I'm drawing some imaginary "borders" by using GL_LINES. It looks like this, with setLookAt set as follows:
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 5, 0, 0, 0, 0, 1, 0);
My frustum is set: Matrix.frustumM(mProjectionMatrix, 0, -2, 2, -2, 2, 1, 11); so I'm positioned somewhere inside the "cube".
Now what I'm trying to achieve is let the user look around. I'm capturing onTouchEvents, passing any movement in x/y direction to the renderer. What I'm doing next is rotating all lines drawn by the specific angle I received from the touch listener.
It then looks like this:
So the cube is not rotated around the viewer or the eye-center, but instead around some point that I don't know where it's coming from.
My problem is: how can I rotate the object around the viewer's center/position? Do I have to rotate the mViewMatrix which comes from setLookAtM? If yes, simply by using Matrix.setRotateM(mViewMatrix, ...)?
The Line's drawing method looks like this:
public void draw(float[] mViewMatrix, float[] mProjectionMatrix) {
Matrix.multiplyMM(mViewProjectionMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
GLES20.glUseProgram(iProgId);
lineBuffer.position(0);
GLES20.glVertexAttribPointer(iPosition, 3, GLES20.GL_FLOAT, false, 0, lineBuffer);
GLES20.glEnableVertexAttribArray(iPosition);
colorBuffer.position(0);
GLES20.glVertexAttribPointer(iColor, 3, GLES20.GL_FLOAT, false, 0, colorBuffer);
GLES20.glEnableVertexAttribArray(iColor);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.rotateM(mModelMatrix, 0, rotX, 1, 0, 0);
Matrix.rotateM(mModelMatrix, 0, -rotY, 0, 1, 0);
Matrix.setIdentityM(mViewProjectionMatrix, 0);
Matrix.multiplyMM(mViewProjectionMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
//GLES20.glUniformMatrix4fv(iVPMatrix, 1, false, mMVPMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewProjectionMatrix, 0);
GLES20.glUniformMatrix4fv(iVPMatrix, 1, false, mMVPMatrix, 0);
//GLES20.glDrawElements(GLES20.GL_LINES, mVertices.length/2, GLES20.GL_UNSIGNED_SHORT, indexBuffer);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, mVertices.length / 2);
}
look at function takes 3 vertices, eye position, target position and up vector. Basically it generates a matrix that moves scene around to render scene like you are looking from eye position to target position. In your example your is at 0,0,0 and looks at 5, 0, 0 (so you are looking at +x direction and up is defined as 0, 1, 0 (higher y value means object will be at top of window.)
Instead of using those constants, first define a camera position.
float cameraX, cameraY, cameraY;
it is harder to work with target vectors, so instead use an angle that defines which direction you are looking at
float angle;
and to calculate targetVector use this angle.
float targetX = cameraX + cos(angle);
float targetY = cameraY;
float targetZ = cameraZ + sin(angle);
Now to move camera around, you have to modify cameraX-Y-Z. If you want to move forward you should move your camera towards to target vector. For example to move 10 unit forward.
targetX += cos(angle)*10;
targetY += sin(angle)*10;
You also need to recalculate target vector since target position also should move.
If you want to move backwards, use -= operator instead. If you want to move sides then you have to add or remove angle PI/2 in those calculations.
To rotate camera around just increase/decrease angle and recalculate target vector.
This is a very basic camera and you won't be able to look up or down. You have to use pitch/yaw camera to be able to look up and down.

How to draw Open GL Elements more efficienty

I've posted my drawing method which is called each frame.
I change the vertices each frame to move the object (which is basically a sprite/textured quad).
As you can see I was initially creating an array each frame, but I have changed this now and I create the array initially and just update it every frame, however I'm wondering if I can do anything more to improve the efficiency? (Although I'm getting about 90fps the sprite does not move smoothly all the time, every now and then it just pauses for a split second). I can't see garbage collector running but I'm guessing it's due to allocation).
As I add more sprites/quads the jerkiness gets worse, but event at 100+ quads, although the smoothness has all but gone, my frame rate is still around 60fps so I can't understand what is slowing this down?
I've also added a screencap from Allocation Tracker
Any help would be appreciated.
public void drawTest(float x, float y, float[] mvpMatrix){
//Convert Co-ordinates
//Left
xPlotLeft = (-MyGLRenderer.ratio)+((x)*MyGLRenderer.coordStepAmountWidth);
//Top
yPlotTop = +1-((y)*MyGLRenderer.coordStepAmountHeight);
//Right
xPlotRight = xPlotLeft+((quadWidth)*MyGLRenderer.coordStepAmountWidth);
//Bottom
yPlotBottom = yPlotTop-((quadHeight)*MyGLRenderer.coordStepAmountHeight);
// Following has been changed as per below. I am now declaring the array initially and just updating it every frame.
// float[] vertices = {
//Top Left
// xPlotLeft,yPlotTop,0, 0,0,
//Top Right
// xPlotRight,yPlotTop,0, 1,0,
//Bottom Left
// xPlotLeft,yPlotBottom,0, 0,1,
//Bottom Right
// xPlotRight,yPlotBottom,0, 1,1
// };
vertices[0]=xPlotLeft;
vertices[1]=yPlotTop;
vertices[2]=0;
vertices[3]=0;
vertices[4]=0;
vertices[5]=xPlotRight;
vertices[6]=yPlotTop;
vertices[7]=0;
vertices[8]=1;
vertices[9]=0;
vertices[10]=xPlotLeft;
vertices[11]=yPlotBottom;
vertices[12]=0;
vertices[13]=0;
vertices[14]=1;
vertices[15]=xPlotRight;
vertices[16]=yPlotBottom;
vertices[17]=0;
vertices[18]=1;
vertices[19]=1;
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
//GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID);
//Use program
GLES20.glUseProgram(iProgId);
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mvpMatrix2, 0, mvpMatrix, 0, mRotationMatrix, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(iProgId, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix2, 0);
//Set starting position for vertices (0 for position)
vertexBuf.position(0);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iPosition, 3, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for position
GLES20.glEnableVertexAttribArray(iPosition);
//Set starting position for vertices (3 for texture)
vertexBuf.position(3);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iTexCoords, 2, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for texture
GLES20.glEnableVertexAttribArray(iTexCoords);
//Enable Alpha blending and set blending function
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
//Draw
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
//Disable Alpha blending
GLES20.glDisable(GLES20.GL_BLEND);
}
ByteBuffer.allocateDirect() allocates a new buffer in memory every frame, you can create an initial buffer and overwrite the contents instead. Just use rewind() or position(0) before put().
To improve matters further, use a VBO (vertex buffer object, there are many tutorials online, and several questions on SO on this topic) and glBufferSubData to update the buffer.

Does gles20.glCopyTexImage2D work in android

I was new to opengl es, So I use the sample code "HelloEffect" in android4.1 to do some tests. I use the function below to do render. if the "glCopyTexImage2D" function not been call at the end of renderTexture, then I can re-render the texture correctly, but if the glCopyTexImage2D function is used, then the second time I call renderTexture, GLToolbox.checkGlError("glViewport") will throw exceptions.
public void renderTexture(int texId, int savetexture) {
if(savetexture == 2)
texId = mCaptureTexture[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glViewport(0, 0, mViewWidth, mViewHeight);
GLToolbox.checkGlError("glViewport");
// Disable blending
GLES20.glDisable(GLES20.GL_BLEND);
// Set the vertex attributes
GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false,
0, mTexVertices);
GLES20.glEnableVertexAttribArray(mTexCoordHandle);
GLES20.glVertexAttribPointer(mPosCoordHandle, 2, GLES20.GL_FLOAT, false,
0, mPosVertices);
GLES20.glEnableVertexAttribArray(mPosCoordHandle);
GLToolbox.checkGlError("vertex attribute setup");
// Set the input texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLToolbox.checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLToolbox.checkGlError("glBindTexture");
GLES20.glUniform1i(mTexSamplerHandle, 0);
GLES20.glUniform1i(mEffectTypeHandle, 1);
// Draw
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
if(savetexture == 1)
GLES20.glCopyTexImage2D(mCaptureTexture[0], 0, GLES20.GL_RGBA, 0, 0, mTexWidth, mTexHeight, 0);
return;
}
//main workflow
renderTexture(srctexture, 0);
renderTexture(srctexture, 0);
//+above steps is work normal.
renderTexture(srctexture, 1);
renderTexture(srctexture, 2);
//in above steps, when call renderTexture(srctexture, 2)
//gLToolbox.checkGlError ("glViewport")will throw exception
my purpose to do this is that I will keep a copy of framebuffer's color buffer, as I have done some post-processing work to the texId texture, so it will save time if I don't have to do the post-processing work again.
I wonder if I have omit some critical steps above, or opengl es 2.0 is not prefect for this work
best wishes!
Your usage and interpretation of checkGLError is likely to be incorrect: OpenGL error reporting is stateful, the error flag isn't reset until you actually check it.
Most likely, the line
GLES20.glCopyTexImage2D(...);
is the one setting the error flag. You just don't check (and reset) it until after the next call to glViewPort.
...
On to the actual problem: if you could post your exact error message, we could provide further assistance as to what is wrong at that line. It looks like your parameters are specified incorrectly, it should probably be something like
GLES20.glBindTexture( GLES20.GL_TEXTURE_2D, mCaptureTexture[0] );
GLES20.glCopyTexImage2D( GLES20.GL_TEXTURE_2D, ... );

Setting Background in OpenGL android

I am new to OpenGL programming.I have made a rotating cube with different images on different faces of the cube..i want to set background for the Screen..Any help will be appreciated..
Draw a textured quad covering the whole viewport. To do this, switch the projection and modelview to identity and disable depth testing. With projection and modelview being identity vertex coordinates [-1 … 1] will cover the whole viewport. In code:
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GLfloat tex_quad[16] = {
/* x, y, s, t */
-1, -1, 0, 0,
1, -1, 1, 0,
1, 1, 1, 1,
-1, 1, 0, 1
};
glVertexPointer(2, GL_FLOAT, sizeof(GLfloat)*4, &tex_quad[0]);
glTexCoordPointer(2, GL_FLOAT, sizeof(GLfloat)*4, &tex_quad[2]);
glDisable(GL_DEPTH_TEST);
glDepthMask(GL_FALSE);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, background_image_texture_ID);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDisable(GL_TEXTURE_2D);
In my project all code of creation GLSurfaceView looks like:
glSurfaceView = ...
glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
glSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
glSurfaceView.setBackgroundResource(R.drawable.my_background);
glSurfaceView.setZOrderOnTop(true);
glSurfaceView.setRenderer(...);
glSurfaceView.setRenderMode(...);
NOTE: Do not use
_glSurfaceView.setBackgroundDrawable(this.getResources().getDrawable(R.drawable.my_background));
I wasted a few days on it.
And do not call
gl.glClearColor(...)
in
Renderer.onDrawFrame
I think the OP wants to turn his code into an android live wallpaper.
#Sumit : if I'm right you should do your due dilligence: http://developer.android.com/resources/articles/live-wallpapers.html
If I'm wrong, then please be more precise in your question.

OpenGL ES 1.1 strange lighting problems

I am examining an interesting problem I'm facing with OpenGL lighting on Android. I'm working on a 3D Viewer where you can add and manipulate 3D objects. You can also set a light with different attributes. The problem I was facing with my Viewer was that the highlight on the 3D objects from the light (it is a point light) behaved strangely. If the light source was in the exact same point as the camera, the highlight would move in the opposite direction you would expect. (So if you move the object to the left, the highlight moves to the leftedge of the object as well, instead of the right, which is what I was expecting.)
So to further narrow the problem down I've created a small sample application that only renders a square and then I rotate that square around the camera position (the origin), which is also where the light is placed. This should result in all squares facing the camera directly, so that they would be completely highlighted. The result though looked like that:
Can it be that these artifacts appear because of the distortion you get on the border due to the projection?
In the first image the distance between the sphere and the camera is about 20 units and the size of the sphere is about 2. If I move the light closer to the object the highlight looks a lot better, in the way I'm expecting it.
In the second image the radius in which the squares are located is 25 units.
I'm using OpenGL ES 1.1 (since I was struggling to get it to work with shaders in ES 2.0) on Android 3.1
Here is some of the code I'm using:
public void onDrawFrame(GL10 gl) {
// Setting the camera
GLU.gluLookAt(gl, 0, 0, 0, 0f, 0f, -1f, 0f, 1.0f, 0.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
for (int i = 0; i < 72; i++) {
gl.glPushMatrix();
gl.glRotatef(5f * i, 0, 1, 0);
gl.glTranslatef(0, 0, -25);
draw(gl);
gl.glPopMatrix();
}
}
public void draw(GL10 gl) {
setMaterial(gl);
gl.glEnable(GL10.GL_NORMALIZE);
gl.glEnableClientState(GL10.GL_NORMAL_ARRAY);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glFrontFace(GL10.GL_CCW);
// Enable the vertex and normal state
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
gl.glNormalPointer(GL10.GL_FLOAT, 0, mNormalBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES, mIndexBuffer.capacity(), GL10.GL_UNSIGNED_SHORT, mIndexBuffer);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
}
// Setting the light
private void drawLights(GL10 gl) {
// Point Light
float[] position = { 0, 0, 0, 1 };
float[] diffuse = { .6f, .6f, .6f, 1f };
float[] specular = { 1, 1, 1, 1 };
float[] ambient = { .2f, .2f, .2f, 1 };
gl.glEnable(GL10.GL_LIGHTING);
gl.glEnable(GL10.GL_LIGHT0);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glLightfv(GL10.GL_LIGHT0, GL_POSITION, position, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL_DIFFUSE, diffuse, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL_AMBIENT, ambient, 0);
gl.glLightfv(GL10.GL_LIGHT0, GL_SPECULAR, specular, 0);
}
private void setMaterial(GL10 gl) {
float shininess = 30;
float[] ambient = { 0, 0, .3f, 1 };
float[] diffuse = { 0, 0, .7f, 1 };
float[] specular = { 1, 1, 1, 1 };
gl.glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE, diffuse, 0);
gl.glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT, ambient, 0);
gl.glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, specular, 0);
gl.glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, shininess);
}
I'm setting the light in the beginning, when the activity is started (in onSurfaceCreated) and the material everytime I draw a square.
The effect in your second example (with the squares) is rather due to the default non-local viewer that OpenGL uses. By default the eye-space view vector (the vector from vertex to camera, used for the specular highlight computation) is just taken to be the (0, 0, 1)-vector, instead of the normalized vertex position. This approximation is only correct if the vertex is in the middle of the screen, but gets more and more incorrect the farther you move to the boundary of the srceen.
To change this and let OpenGL use the real vector from the vertex to the camera, just use the glLightModel function, especially
glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, GL_TRUE);
I'm not sure if this is also the cause for your first problem (with the sphere), but maybe, just try it.
EDIT: It seems you cannot use GL_LIGHT_MODEL_LOCAL_VIEWER in OpenGL ES. In this case there is no way around this problem, except switching to OpenGL ES 2.0 and doing all lighting computations yourself, of course.
Your light is probably moving when you're moving your object.
Take a look at this answer http://www.opengl.org/resources/faq/technical/lights.htm#ligh0050

Categories

Resources