OpenGL ES 2.0 cuts the screen in landscape mode - android

I'm starting to learn OpenGL, and are using the following site: http://www.learnopengles.com/android-lesson-one-getting-started/
But it seems that I got a problem at this part (works fine in portrait mode):
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model color information. */
private int mColorHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** How many elements per vertex. */
private final int mStrideBytes = 7 * mBytesPerFloat;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Offset of the color data. */
private final int mColorOffset = 3;
/** Size of the color data in elements. */
private final int mColorDataSize = 4;
public void onSurfaceChanged(GL10 gl, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
System.out.println("Height: " + height);
System.out.println("Width: " + width);
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
public void onDrawFrame(GL10 gl)
{
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 1.5f;
final float lookY = 0.0f; //Y direction of what user see
final float lookZ = -5.0f; //Z direction of what user see
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
GLES20.glClearColor(red, green, blue, clearcoloralpha);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, xax, lookY, lookZ, upX, upY, upZ);
// Draw the triangle facing straight on.
for(int i = 0; i < Triangles.size(); i++)
{
Matrix.setIdentityM(Triangles.get(i).getModelMatrix(), 0);
if(Triangles.get(i).Rotate())
{
Triangles.get(i).rotation = (360.0f / 10000.0f) * ((int) Triangles.get(i).last);
Triangles.get(i).last+=20;
//Rotates the matrix by rotation degrees
Matrix.rotateM(Triangles.get(i).getModelMatrix(), 0, Triangles.get(i).rotation, 0.0f, 0.0f, 1.0f);
}
else
Matrix.rotateM(Triangles.get(i).getModelMatrix(), 0, Triangles.get(i).rotation, 0.0f, 0.0f, 1.0f);
drawTriangle(Triangles.get(i).getFloatBuffer(),Triangles.get(i));
}
}
private void drawTriangle(final FloatBuffer aTriangleBuffer, Triangle tri)
{
aTriangleBuffer.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, tri.DataSize, GLES20.GL_FLOAT, false, mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
aTriangleBuffer.position(3);
GLES20.glVertexAttribPointer(mColorHandle, tri.ColorDataSize, GLES20.GL_FLOAT, false, mStrideBytes, aTriangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, tri.getModelMatrix(), 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
}
But when I try to move a triangle (to the left or right) in landscape mode the triangles get "cut off" (does not display the whole triangle) when moving them to far to one of the sides. It seems that they are been acted on as if they were outside the screen when they actually are not. As mentioned it seems to work fine in portrait mode.
Height is 752 and Width 1280 in landscape mode (Galaxy Tab 2).
Does this have something to do with the Project Matrix which is set here?
Thanks for any help!

You were right, the problem was you were moving you camera :D
xax should have stayed as 0.0f
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, xax, lookY, lookZ, upX, upY, upZ);

Related

Convert OpenGL 3D point to 2D using GLU.glProject

I have an OpenGL scene with a sphere having a radius of 1, and the camera being at the center of the sphere (it's a 360° picture viewer). The user can rotate the sphere by panning.
Now I need to display 2D pins "attached" to some parts of the picture. To do so, I want to convert the 3D coordinates of my pins into 2D screen coordinates, to add the pin image at that screen coordinates.
I'm using GLU.glProject and the following classes from android-apidemo:
MatrixGrabber
MatrixStack
MatrixTrackingGL
I save the projection matrix in the onSurfaceChanged method and the model-view matrix in the onDraw method (after having drawn my sphere). Then I feed GLU.glProject with them when the user rotates the sphere to update the pins position.
When I pan horizontally, the pins pan correctly, but when I pan vertically, the texture pans "faster" than the pin image (like if the pin was closer to the camera than the sphere).
Here are some relevant parts of my code:
public class CustomRenderer implements GLSurfaceView.Renderer {
MatrixGrabber mMatrixGrabber = new MatrixGrabber();
private float[] mModelView = null;
private float[] mProjection = null;
[...]
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// Get the sizes:
float side = Math.max(width, height);
int x = (int) (width - side) / 2;
int y = (int) (height - side) / 2;
// Set the viewport:
gl.glViewport(x, y, (int) side, (int) side);
// Set the perspective:
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, FIELD_OF_VIEW_Y, 1, Z_NEAR, Z_FAR);
// Grab the projection matrix:
mMatrixGrabber.getCurrentProjection(gl);
mProjection = mMatrixGrabber.mProjection;
// Set to MODELVIEW mode:
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
#Override
public void onDrawFrame(GL10 gl) {
// Load the texture if needed:
if(mTextureToLoad != null) {
mSphere.loadGLTexture(gl, mTextureToLoad);
mTextureToLoad = null;
}
// Clear:
gl.glClearColor(0.5f, 0.5f, 0.5f, 0.0f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
// Rotate the scene:
gl.glRotatef( (1 - mRotationY + 0.25f) * 360, 1, 0, 0); // 0.25 is used to adjust the texture position
gl.glRotatef( (1 - mRotationX + 0.25f) * 360, 0, 1, 0); // 0.25 is used to adjust the texture position
// Draw the sphere:
mSphere.draw(gl);
// Grab the model-view matrix:
mMatrixGrabber.getCurrentModelView(gl);
mModelView = mMatrixGrabber.mModelView;
}
public float[] getScreenCoords(float x, float y, float z) {
if(mModelView == null || mProjection == null) return null;
float[] result = new float[3];
int[] view = new int[] {0, 0, (int) mSurfaceViewSize.getWidth(), (int) mSurfaceViewSize.getHeight()};
GLU.gluProject(x, y, z,
mModelView, 0,
mProjection, 0,
view, 0,
result, 0);
result[1] = mSurfaceViewSize.getHeight() - result[1];
return result;
}
}
I use the result of the getScreenCoords method to display my pins. The y value is wrong.
What am I doing wrong?

Android API 22 and 23 change causes flattening OpenGL 3d objects

I have written application which is using OpenGL ES. I was testing it on emulated and real Nexus 5, with Android 6 (API 23) on it.
During tests on older Android versions (API 22-) it came out, that my 3d object is missing one dimension.
After statring app, it looks like this in both cases (view is set to -z, and y axis is up here):
but when rotating this, there is a difference in behavior in API 22 (or lower) and 23.
Case API 22 (or lower):
Object is flat - Z axis on model seems to be missing, however light is calculated properly (with proper Z values).
Case API 23 (desired one):
All screenshots are from emulator; I have tested it only on one real device, with API 23 (Nexus 5), and it works there.
Rotation is done by touch events, and handled by code like this:
Matrix.rotateM(mCurrentRotation, 0, mDeltaRotationY, 1.0f, 0.0f, 0.0f);
Matrix.multiplyMM(mModelMatrix, 0, mModelMatrix, 0, mCurrentRotation, 0);
OpenGL version is set in AndroidManifest:
<uses-feature
android:glEsVersion="0x00020000"
android:required="true" />
For me it seems that something has changed in behavior of model or view martix.
EDIT
As requested in comment:
Matrices are created in my rendered, which is extending GLSurfaceView.Renderer
I was following http://www.learnopengles.com/ tutorial to prepare this.
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
...
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 7.0f;
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 0.0f;
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
...
}
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
...
final float ratio = width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 20.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
...
}
public void onDrawFrame(GL10 glUnused) {
...
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
...
}
I know this issue is old already, but I've faced the same issue today.
So just in case somebody else encounters this, I identified the problem.
Whenever you call Matrix.multiplyMM(float[] result, ...) the 'result' float array must be equal neither to the left-hand-side matrix nor to the right-hand-side one.
On Android Marshmallow (or above) this does not seem to be a problem though... but below it might be a good idea to call the .clone() on the resulting matrix first and passing this then as the left/right-hand-side matrix instead.
Long story short, for Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0); the second mMVPMatrix needs to be replaced by mMVPMatrix.clone() for this to work properly.

Box2d jitter when camera follows

I am developing a box2d game on android and when the opengl camera follows the player the player jitters quite badly. When the camera is stationary, it appears to be fine. I tried box2d interpolation and that seemed to help slightly. Any suggestions?
public static void setCamera() {
// Position the eye behind the origin.
float eyeX = cameraX;
float eyeY = cameraY;
float eyeZ = cameraZoom;
// We are looking toward the distance
float lookX = cameraX;
float lookY = cameraY;
float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we
// holding the camera.
float upX = 0.0f;
float upY = 1.0f;
float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera
// position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination
// of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices
// separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY,
lookZ, upX, upY, upZ);
// Matrix.scaleM(mViewMatrix, 0, cameraZoom, cameraZoom, 0f);
// Matrix.orthoM(mProjectionMatrix, 0, left, right, top, bottom, near,
// far);
// Matrix.setLookAtM(mViewMatrix, 0, 1, 0, 1.0f, 1.0f, 0f, 0f, 0f, 1.0f,
// 0.0f);
}

OpenGL not mapping coordinates correctly to android screen

I'm having problem mapping the OpenGL coordinates to my android screen properly. For some reason the x-coordinates are not contained within the screen. This is visible when I try to draw a circle in the middle of the screen with the radius of half the x-screen. The circle should be just big enough to fit within the x-borders of the screen, but instead it goes beyond the screen border and is in fact touching the y-borders of the screen instead. also if I tell the circle to be drawn at 0,0, (upper left corner) the center will actually be drawn outside the x-border after converting it to OpenGL coordinates.
Hope that made sense.
Here's my DrawScreen class:
public class DrawScreen implements GLSurfaceView.Renderer {
Ball ball;
public float mAngle;
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private float[] mMVPMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"precision mediump float; \n" +
"void main(){ \n" +
" gl_FragColor = vec4 (0.63671875, 0.76953125, 0.22265625, 1.0); \n" +
"} \n";
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
ball = new Ball();
// Set the background frame color
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
ball.initShapes(240, 360, 240);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL program executables
// get handle to the vertex shader's vPosition member
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
}
public void onDrawFrame(GL10 unused) {
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// Prepare the circle data
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 12, ball.ballVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
// Apply a ModelView Projection transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the circle
GLES20.glDrawArrays(GLES20.GL_LINE_LOOP, 0, (int) ball.getNumSeg());
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = ratio;
final float right = -ratio;
final float bottom = 1.0f;
final float top = -1.0f;
final float near = 3.0f;
final float far = 7.0f;
//Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
Matrix.orthoM(mProjMatrix, 0, left, right, bottom, top, near, far);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}
private int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
And here's where I convert the screen coordinates to OpenGL coordinates:
float numSegments = 360;
public void initShapes(float tx, float ty, float tr){
cx = (tx / (480 / 2 )) - 1.f;
cy = (ty / (720 / 2 )) - 1.f;
r = (tr / (480 / 2 )) ;
System.out.println(r);
System.out.println(cx);
System.out.println(cy);
float ballCoords[] = new float[(int) (numSegments * 3)];
double theta = (2 * 3.1415926 / numSegments);
float c = (float) Math.cos(theta);//precalculate the sine and cosine
float s = (float) Math.sin(theta);
float t;
float x = r;//we start at angle = 0
float y = 0;
for(int i = 0; i < (numSegments * 3); i = i + 3 ) {
ballCoords[i] = (x + cx);
ballCoords[i + 1] = (y + cy);
ballCoords[i + 2] = (0);
//apply the rotation matrix
t = x;
x = c * x - s * y;
y = s * t + c * y;
}
// initialize vertex Buffer for triangle
ByteBuffer vbb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
ballCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
ballVB = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
ballVB.put(ballCoords); // add the coordinates to the FloatBuffer
ballVB.position(0); // set the buffer to read the first coordinate
}
Ok, so I solved it... though it doesn't look pretty.
I basically turned all the numbers in the onSurfaceChanged on their heads and it looks the way I want now.
This is my new code:
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) height / width;
final float left = 1;
final float right = -1;
final float bottom = ratio;
final float top = -ratio;
final float near = 3.0f;
final float far = 7.0f;
//Matrix.frustumM(mProjMatrix, 0, left, right, bottom, top, near, far);
Matrix.orthoM(mProjMatrix, 0, left, right, bottom, top, near, far);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
}

Android OpenGL ES: Rotation against arbitrary axis?

I have a point cloud that I've rendered on the Android OpenGL-ES. I can translate it correctly (I think) but when I rotate it, I can't make it work like it want. I want it to rotate about the center of the point cloud (I have this 3D point), but I don't know how to do that.
public void onDrawFrame(GL10 gl) {
// Clears the screen and depth buffer.
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
// Replace the current matrix with the identity matrix
gl.glLoadIdentity();
// Translates 4 units into the screen.
GLU.gluLookAt(gl, eyeX, eyeY, eyeZ,
centerX, centerY, centerZ,
upX, upY, upZ);
// rotate
gl.glRotatef(_xAngle, 1f, 0f, 0f);
gl.glRotatef(_yAngle, 0f, 1f, 0f);
gl.glRotatef(_zAngle, 0f, 0f, 1f);
gl.glTranslatef(_xTranslate, _yTranslate, _zTranslate);
// Draw things
ptCloud.draw(gl);
aBox.draw(gl);
}
I change the _translate and _angle variables in response to user interaction, and in turn the OpenGl would act upon them. You can see I run the draw routin on my prCloud right after my perspective is setup. I'll show you that:
public void draw(GL10 gl) {
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glPointSize(0.5f);
gl.glDrawArrays(GL10.GL_POINTS, 0, numVertices);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
As well as the create surface method, because I'm not sure if it affects anything:
public void onSurfaceChanged(GL10 gl, int width, int height) {
// Sets the current view port to the new size.
gl.glViewport(0, 0, width, height);
// Select the projection matrix
gl.glMatrixMode(GL10.GL_PROJECTION);
// Reset the projection matrix
gl.glLoadIdentity();
// Calculate the aspect ratio of the window
GLU.gluPerspective(gl, 45.0f, (float) width / (float) height, 0.001f,
1000000.0f);
// Select the modelview matrix
gl.glMatrixMode(GL10.GL_MODELVIEW);
// Reset the modelview matrix
gl.glLoadIdentity();
}
Here are my lookat default variables:
private float eyeX = 20;
private float eyeY = -150;
private float eyeZ = 60;
private float centerX = 0;
private float centerY = 0;
private float centerZ = 0;
private float upX = 0;
private float upY = 0;
private float upZ = 1;
The points have been scaled to be in the range of (0,0,0) to (120,180,38). I also don't know how to find a eye position that will show the whole model provided random Maximum point values...
Can anyone guess why it won't rotate how I would expect?
Rotate after you translate!
Transformation works in the order you tell it to. If you rotate before you translate then the translation is affected by the rotation etc. If you translate before you rotate then the rotation is translated before rotating so it will be at the center of your object.
See these pages for more information:
http://www.3dcodingtutorial.com/Basic-OpenGL-functions/Translate-and-Rotate-functions.html
http://www.swiftless.com/tutorials/opengl/rotation.html
http://www.opengl.org/resources/faq/technical/transformations.htm
http://www.falloutsoftware.com/tutorials/gl/gl5.htm

Categories

Resources