Android OpenGL ES rendering subdivided Mesh - android

I'm trying to render a subdivided mesh with a displacement texture on it and a color texture. To do so I go through every pixel, create a vertex for it, and move that vertex according to a black and white image I have. The problem is that when I render it, I get something that looks a bit like TV snow.
Here's the relevant code:
public Plane(Bitmap image, Bitmap depth)
{
this.image = image; //color image
this.depth = depth; //BW depth image
this.w = image.getWidth();
this.h = image.getHeight();
vertexCoords = vertexArray(); //places vertices in 3d
drawOrder = orderArray(); //sets the draw order
colorCoords = colorArray(); //sets color per vertex
ByteBuffer bb = ByteBuffer.allocateDirect(vertexCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertexCoords);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 4);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer cbb = ByteBuffer.allocateDirect(colorCoords.length * 4);
cbb.order(ByteOrder.nativeOrder());
colorBuffer = cbb.asFloatBuffer();
colorBuffer.put(colorCoords);
colorBuffer.position(0);
}
public void draw(GL10 gl) {
// Counter-clockwise winding.
gl.glFrontFace(GL10.GL_CCW);
// Enable face culling.
gl.glEnable(GL10.GL_CULL_FACE);
// What faces to remove with the face culling.
gl.glCullFace(GL10.GL_BACK);
// Enabled the vertices buffer for writing and to be used during
// rendering.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
// Specifies the location and data format of an array of vertex
// coordinates to use when rendering.
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
// Enable the color array buffer to be used during rendering.
gl.glEnableClientState(GL10.GL_COLOR_ARRAY); // NEW LINE ADDED.
// Point out the where the color buffer is.
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer); // NEW LINE ADDED.
gl.glDrawElements(GL10.GL_TRIANGLES, drawOrder.length,
GL10.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable the vertices buffer.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
// Disable face culling.
gl.glDisable(GL10.GL_CULL_FACE);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
}
What can I do to actually view the model, instead of this snow thing? The patterns change if I turn my screen on and off, and they sometimes change randomly. It seems that the colors present in the original bitmap are also present in the snow (the snow color changes with different pictures), so I know I'm doing something right, I just don't know what's wrong here.
EDIT: here's the code for vertexArray()
public float[] vertexArray()
{
int totalPoints = w*h;
float[] arr = new float[totalPoints*3];
int i = 0;
for(int y = 0; y<h; y++)
{
for(int x = 0; x<w; x++)
{
arr[i] = x * 0.01f;
arr[i+1] = y * 0.01f;
arr[i+2] = 1.0f;//getDepth(x,y);
i+=3;
}
}
return arr;
}

Related

Error remaining() < count < needed in GLSurfaceView.Renderer

I am trying to create a triangle in OpenGL ES. But app is crashing for line gl.glDrawElements(GL10.GL_TRIANGLES, pIndex.length, GL10.GL_UNSIGNED_SHORT, pBuff); in below code inside draw method.
public class GLTriangleEx {
private float vertices[] = {
0f, 1f, // point 0
1f, -1f, // point 1
-1f, -1f // point 2
};
private FloatBuffer vertBuff;
private short[] pIndex = {0, 1, 2};
private ShortBuffer pBuff;
public GLTriangleEx() {
ByteBuffer bBuff = ByteBuffer.allocateDirect(vertices.length * 4);
bBuff.order(ByteOrder.nativeOrder());
vertBuff = bBuff.asFloatBuffer();
vertBuff.put(vertices);
vertBuff.position(0);
ByteBuffer pbBuff = ByteBuffer.allocateDirect(pIndex.length * 2);
pbBuff.order(ByteOrder.nativeOrder());
pBuff = pbBuff.asShortBuffer();
pBuff.put(pIndex);
pbBuff.position(0);
}
public void draw(GL10 gl){
gl.glFrontFace(GL10.GL_CW);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, vertBuff);
// app crashes here.
gl.glDrawElements(GL10.GL_TRIANGLES, pIndex.length, GL10.GL_UNSIGNED_SHORT, pBuff);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
Crash log cat is
java.lang.ArrayIndexOutOfBoundsException: remaining() < count < needed
at com.google.android.gles_jni.GLImpl.glDrawElements(Native Method)
at com.mobility.opengleslearning.GLTriangleEx.draw(GLTriangleEx.java:45)
at com.mobility.opengleslearning.GLRenderer.onDrawFrame(GLRenderer.java:38)
at android.opengl.GLSurfaceView$GLThread.guardedRun(GLSurfaceView.java:1522)
at android.opengl.GLSurfaceView$GLThread.run(GLSurfaceView.java:1239)
I have checked below links for help but of no use for my case:
Android OpenGL error: "remaining() < needed" and Android 4.4
Beginning to learn OpenGL ES. Drawing quad
You need to rewind the ShortBuffer you use for the indices. In this code:
ByteBuffer pbBuff = ByteBuffer.allocateDirect(pIndex.length * 2);
pbBuff.order(ByteOrder.nativeOrder());
pBuff = pbBuff.asShortBuffer();
pBuff.put(pIndex);
pbBuff.position(0);
you're rewinding pbBuff, which is the underlying ByteBuffer.
asShortBuffer() returns a view buffer that shares the underlying data with the original buffer. From the documentation (emphasis added by me):
A view buffer is simply another buffer whose content is backed by the byte buffer. Changes to the byte buffer's content will be visible in the view buffer, and vice versa; the two buffers' position, limit, and mark values are independent.
So pBuff, which is your view buffer, has its own position. You need to rewind the view buffer, which is the buffer you use later:
pBuff.position(0);

Android OpenGL ES point cloud rendering

I have a float array containing all points in a mesh, and I'm trying to render it as a point cloud, but no matter what, all I get is glitches
Here is the relevant code:
public Plane()
{
...
ByteBuffer bb = ByteBuffer.allocateDirect(vertexCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertexCoords);
vertexBuffer.position(0);
...
}
public void draw(GL10 gl)
{
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glPointSize(3);
// Specifies the location and data format of an array of vertex
// coordinates to use when rendering.
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glDrawArrays(GL10.GL_POINTS, 0, vertexCoords.length / 3);
// Disable the vertices buffer.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
MyRenderer.java:
public void onSurfaceChanged(GL10 gl10, int i, int i2) {
// Sets the current view port to the new size.
gl10.glViewport(0, 0, i, i2);
// Select the projection matrix
gl10.glMatrixMode(GL10.GL_PROJECTION);
// Reset the projection matrix
gl10.glLoadIdentity();
// Calculate the aspect ratio of the window
GLU.gluPerspective(gl10, 45.0f,
(float) i / (float) i2,
0.1f, 100.0f);
// Select the modelview matrix
gl10.glMatrixMode(GL10.GL_MODELVIEW);
// Reset the modelview matrix
gl10.glLoadIdentity();
}
I got the rendering code from another StackOverflow question, but no matter what I do, all I get is what looks like static. For reference, I get the same (or a similar) effect when I try to triangulate it. (using and index buffer).

Colorbuffer does not work properly while applied on a circle

When i was applying color buffer and blending on a solid circle, the color in first 20 degrees does not display properly, I got some sort of color ribbon but that is not what it supposed to be, maybe I have done something wrong in my code?
public class Circle {
boolean circleChecked;
private int points=361;
private float vertices[]={0.0f,0.0f,0.0f};
private float[] fogcolor = {0.2f,0.4f,0.7f,0.9f};
private FloatBuffer vertBuff, textureBuffer;
private FloatBuffer colorBuffer; // Buffer for color-array (NEW)
float texData[] = null;
private float[] colors = { // Colors for the vertices (NEW)
0.7f,0.7f,0.7f,0.5f,
0.7f,0.7f,0.7f,0.5f,
0.7f,0.7f,0.7f,0.5f
};
float theta = 0;
int[] textures = new int[1];
int R=1;
float textCoordArray[] =
{
-R,
(float) (R * (Math.sqrt(2) + 1)),
-R,
-R,
(float) (R * (Math.sqrt(2) + 1)),
-R
};
public Circle(float size, float positionX, float positionY){
vertices = new float[(points)*3];
for(int i=0;i<3;i+=3){
vertices[i]=positionX * size;
vertices[i+1]=positionY *size;
vertices[i+2]=0.51f;
}
for(int i=3;i<(points)*3;i+=3)
{
vertices[i]=((float) ( Math.cos(theta))/3+positionX) * size;
vertices[i+1]=((float) (Math.sin(theta))/3+positionY) *size;
vertices[i+2]=0.5f;
theta += Math.PI / 90;
}
ByteBuffer bBuff=ByteBuffer.allocateDirect(vertices.length*4);
bBuff.order(ByteOrder.nativeOrder());
vertBuff=bBuff.asFloatBuffer();
vertBuff.put(vertices);
vertBuff.position(0);
// Setup color-array buffer. Colors in float. A float has 4 bytes (NEW)
ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length * 4);
cbb.order(ByteOrder.nativeOrder()); // Use native byte order (NEW)
colorBuffer = cbb.asFloatBuffer(); // Convert byte buffer to float (NEW)
colorBuffer.put(colors); // Copy data into buffer (NEW)
colorBuffer.position(0); // Rewind (NEW)
ByteBuffer bBuff2=ByteBuffer.allocateDirect(textCoordArray.length * 4 * 360);
bBuff2.order(ByteOrder.nativeOrder());
textureBuffer=bBuff2.asFloatBuffer();
textureBuffer.put(textCoordArray);
textureBuffer.position(0);
}
public void draw(GL10 gl){
//gl.glDisable(GL10.GL_LIGHTING);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
//gl.glColor4f(0.8f, 0.8f, 0.8f, 1);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertBuff);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
// if(circleChecked){
// gl.glColor4f(0.2f, 0.4f, 0.8f, 1);
//}
//gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_BLEND);
gl.glPushMatrix();
gl.glFogf(GL10.GL_FOG_MODE, GL10.GL_LINEAR);
gl.glFogf(GL10.GL_FOG_START, 3.0f);
gl.glFogf(GL10.GL_FOG_END, 5.0f);
float fogColor[] = {1f, 0.0f, 0.5f, 1.0f};
gl.glFogfv(GL10.GL_FOG_COLOR, fogColor, 0);
gl.glFogf(GL10.GL_FOG_DENSITY, 0.9f);
gl.glEnable(GL10.GL_FOG);
gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
//gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); //4
//gl.glTexCoordPointer(2, GL10.GL_FLOAT,0, textureBuffer); //5
// gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDrawArrays(GL10.GL_TRIANGLE_FAN, 0, points/2);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY); // Disable color-array (NEW)
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glPopMatrix();
//gl.glDisable(GL10.GL_FOG);
}
}
The problem is in your color array. glDrawArrays gl.glDrawArrays(GL10.GL_TRIANGLE_FAN, 0, points/2); will take values from each buffer you enable its client state "gl.glEnableClientState(GL10.GL_COLOR_ARRAY)". The number of those values equals to the last parameter and in your case points/2 but your color buffer has only 3 values. The result is that only first of your triangles has correct color mapping, all the rest is garbage and the results are unpredictable.
Although this might seem inefficient for your case, you will need to repeat those color parameters in your "for" loop where you set your vertex coordinates and the length of the buffer should be the same as "vertBuffer". And by length I mean number of values, not bytes, where 1 color value consists of 4 float values and 1 position value consists of 3 float values in your case.

Android texture only showing solid color

I am trying to display a single texture on a quad.
I had a working VertexObject, which drew a square(or any geometric object) fine. Now I tried expanding it to handle textures too, and the textures doesn't work. I only see the quad in one solid color.
The coordinate data is in an arrayList:
/*the vertices' coordinates*/
public int coordCount = 0;
/*float array of 3(x,y,z)*/
public ArrayList<Float> coordList = new ArrayList<Float>(coordCount);
/*the coordinates' indexes(if used)*/
/*maximum limit:32767*/
private int orderCount = 0;
private ArrayList<Short> orderList = new ArrayList<Short>(orderCount);
/*textures*/
public boolean textured;
private boolean textureIsReady;
private ArrayList<Float> textureList = new ArrayList<Float>(coordCount);
private Bitmap bitmap; //the image to be displayed
private int textures[]; //the textures' ids
The buffers are initialized in the following function:
/*Drawing is based on the buffers*/
public void refreshBuffers(){
/*Coordinates' List*/
float coords[] = new float[coordList.size()];
for(int i=0;i<coordList.size();i++){
coords[i]= coordList.get(i);
}
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
coords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(coords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
/*Index List*/
short order[] = new short[(short)orderList.size()];
for(int i=0;i<order.length;i++){
order[i] = (short) orderList.get(i);
}
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
order.length * 2);
dlb.order(ByteOrder.nativeOrder());
orderBuffer = dlb.asShortBuffer();
orderBuffer.put(order);
orderBuffer.position(0);
/*texture list*/
if(textured){
float textureCoords[] = new float[textureList.size()];
for(int i=0;i<textureList.size();i++){
textureCoords[i] = textureList.get(i);
}
ByteBuffer byteBuf = ByteBuffer.allocateDirect(textureCoords.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}
}
I load the image into the object with the following code:
public void initTexture(GL10 gl, Bitmap inBitmap){
bitmap = inBitmap;
loadTexture(gl);
textureIsReady = true;
}
/*http://www.jayway.com/2010/12/30/opengl-es-tutorial-for-android-part-vi-textures/*/
public void loadTexture(GL10 gl){
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
/*bind bitmap to texture*/
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
And the drawing happens based on this code:
public void draw(GL10 gl){
if(textured && textureIsReady){
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
//loadTexture(gl);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0,
textureBuffer);
}else{
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColor4f(color[0], color[1], color[2], color[3]);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
}
if(!indexed)gl.glDrawArrays(drawMode, 0, coordCount);
else gl.glDrawElements(drawMode, orderCount, GL10.GL_UNSIGNED_SHORT, orderBuffer);
if(textured && textureIsReady){
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisable(GL10.GL_TEXTURE_2D);
}else{
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
}
The initialization is as follows:
pic = new VertexObject();
pic.indexed = true;
pic.textured = true;
pic.initTexture(gl,MainActivity.bp);
pic.color[0] = 0.0f;
pic.color[1] = 0.0f;
pic.color[2] = 0.0f;
float inputVertex[] = {2.0f,2.0f,0.0f};
float inputTexture[] = {0.0f,0.0f};
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 2.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 0.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
pic.addIndex((short)0);
pic.addIndex((short)1);
pic.addIndex((short)2);
pic.addIndex((short)0);
pic.addIndex((short)2);
pic.addIndex((short)3);
The coordinates are just simply added to the arrayList, and then I refresh the buffers.
The bitmap is valid, because it is showing up on an imageView.
The image is a png file with the size of 128x128 in the drawable folder.
For what I gathered the image is getting to the vertexObject, but something isn't right with the texture mapping. Any pointers on what am I doing wrong?
Okay, I got it!
I downloaded a working example from the internet and rewrote it, to resemble the object(presented above) step by step. I observed if it works on every step. Turns out, the problem isn't in the graphical part, because the object worked in another context with different coordinates.
Long story short:
I got the texture UV mapping wrong!
That's why I got the solid color, the texture was loaded, but the UV mapping wasn't correct.
Short story long:
At the lines
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
The indexing was wrong as only the first element of inputTexture was updated only. There might have been some additional errors regarding the sizes of the different array describing the vertex coordinates, but rewriting on the linked example fixed the problem, and it produced a mroe concise code.

rendering an object from obj file and creating VBO results mix texture or black screen

I want to render an object by parsing an obj file (android platform, opengl es 1.1). Parsing is OK and I have all data (v, vn, vt) and indices correctly stored in separate arrays. When I try to render using glDrawElements the object is drawn fine but texture is mixed. I guess it's because indices of texture coords are different with vertices and opengl only uses vertex Indices.
I tried to make a VBO to not work with indices. But this time when I call gldrawArrays nothing is drawn. (black page).
Please tell me what's the cause of problem and what's the other ways of rendering such objects.
Here is some parts of my code. If you need other parts, tell me.
This is class ObjectArr that holds the matrices and create VBO and draws the object.
public ObjectArr(float[] positions, float[] textureCoords, float[] normal, short[] faceIndices, short[] texturePointer, short[] normalPointer){
texture = textureCoords;
vertexPointer = faceIndices;
normals = normal;
vertex = positions;
ByteBuffer ilbuffer = ByteBuffer.allocate(vertexPointer.length * 8 * 4); // creating interleave
ilbuffer.order(ByteOrder.nativeOrder());
interleave = ilbuffer.asFloatBuffer();
for (int i = 0; i < vertexPointer.length; i++) {
for (int j = 0;j < 3; j++) // positions
interleave.put(vertex[3 * vertexPointer[i] + j]); // three number for position
for (int j = 0;j < 3; j++) // normals
interleave.put(normals[3 * normalPointer[i] + j]); // three number for normal
for (int j = 0; j < 3; j++)
interleave.put(texture[3 * texturePointer[i] + j]); // three number for texture
}
interleave.position(0);
draw (GL10 gl){
GL11 gl11 = (GL11) gl;
int stride = 9 * 4;
gl11.glBindBuffer(GL11.GL_ARRAY_BUFFER, VBO[0]);
gl11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
gl11.glVertexPointer(3, GL11.GL_FLOAT, stride, 0);
gl11.glEnableClientState(GL11.GL_NORMAL_ARRAY);
gl11.glNormalPointer(GL11.GL_FLOAT, stride, 3 * 4); // 4 is float size
gl11.glEnableClientState(GL11.GL_TEXTURE_COORD_ARRAY);
gl11.glTexCoordPointer(3,GL11.GL_FLOAT,stride,(3 + 3) * 4);
gl11.glEnable(GL11.GL_TEXTURE_2D);
gl11.glBindTexture(GL11.GL_TEXTURE_2D, texturesMatrix[0]);
gl11.glDrawArrays(GL11.GL_TRIANGLE_STRIP, 0, vertexPointer.length);
// deactivate arrays with gldisableclient...
gl11.glBindBuffer(GL11.GL_ARRAY_BUFFER, 0);
}
public void createVBO(GL10 gl){ // creates VBO from interleave created before
GL11 gl11 = (GL11) gl;
VBO = new int[1];
gl11.glGenBuffers(1, VBO, 0);
gl11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, VBO[0]);
gl11.glBufferData(GLES11.GL_ARRAY_BUFFER, 9 * 4 * vertexPointer.length, interleave, GLES11.GL_STATIC_DRAW); // 9 is stride
}
public void loadGLTexture(GL10 gl, Context context) { // plane is a simple obj and also png file
GL11 gl11 = (GL11) gl;
// reading bitmap from raw folder
gl11.glGenTextures(1, texturesMatrix, 0); // matrix with integer values for binding
gl11.glBindTexture(GL11.GL_TEXTURE_2D, texturesMatrix[0]);
GLUtils.texImage2D(GL11.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
and this is unSurfaceCreated and onDrawFrame method in my renderer.
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GL11 gl11 = (GL11) gl;
obj.createVBO(gl);
obj.loadGLTexture(gl, this.context);
gl11.glEnable(GL10.GL_TEXTURE_2D);
gl11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set color's clear-value to black
gl11.glClearDepthf(1.0f); // Set depth's clear-value to farthest
gl11.glEnable(GL10.GL_DEPTH_TEST); // Enables depth-buffer for hidden surface removal
gl11.glDepthFunc(GL10.GL_LEQUAL); // The type of depth testing to do
gl11.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST); // nice perspective view
gl11.glShadeModel(GL10.GL_SMOOTH); // Enable smooth shading of color
gl11.glDisable(GL10.GL_DITHER); // Disable dithering for better performance
}
public void onDrawFrame(GL10 gl){
GL11 gl11 = (GL11) gl;
// Clear color and depth buffers
gl11.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl11.glLoadIdentity();
gl11.glTranslatef(-8.0f, 0.0f, -16.0f);
//--- Touch
gl11.glTranslatef(X, 0.0f, 0.0f);
gl11.glTranslatef(0.0f, Y, 0.0f);
//--- Touch
gl11.glScalef(0.03f * zoomScale, 0.03f * zoomScale, 0.03f * zoomScale); (NEW)
obj.draw(gl);
}
I'm completely confused and stuck. I appreciate any help. tnx.

Categories

Resources