I want to create a electric lightning in OpenGl. For that I wrote this function:
public Blitz()
{
points = new float[STEPS * 3];
for(int i = 0; i < STEPS; i += 3)
{
points[i] = (float) (startX + i * (endX - startX) / STEPS);
points[i + 1] = (float) (startY + i * (endY - startY) / STEPS);
points[i + 2] = 0;
}
// ...
vertexBuffer.put(points);
vertexBuffer.position(0);
}
public void update()
{
for(int i = 0; i < STEPS; i += 3)
{
float rnd = random(); // creates rnd float
if(points[i] + rnd < startX + 5 && points[i] + rnd > startX - 5)
{
points[i] += rnd;
}
}
vertexBuffer.clear();
vertexBuffer.put(points);
vertexBuffer.position(0);
}
public void render()
{
texture.bind();
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glLineWidth(10);
gl.glDrawArrays(GL10.GL_LINE_STRIP, 0, STEPS);
}
Here is my Texture Class that I use for my textures:
public class Texture
{
private GL10 gl;
FloatBuffer texCoords;
private int textureID;
public Texture(GL10 gl, Bitmap bmp)
{
this.gl = gl;
int[] TextureIDs = new int[1];
gl.glGenTextures(1, TextureIDs, 0);
textureID = TextureIDs[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA, bmp, 0);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
ByteBuffer buffer = ByteBuffer.allocateDirect( 4 * 2 * 4 );
buffer.order(ByteOrder.nativeOrder());
texCoords = buffer.asFloatBuffer();
texCoords.put(0);
texCoords.put(0);
texCoords.put(0);
texCoords.put(1);
texCoords.put(1);
texCoords.put(1);
texCoords.put(1);
texCoords.put(0);
texCoords.rewind();
}
public void bind()
{
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_SRC_ALPHA);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texCoords);
}
public void dispose()
{
int[] textures = {textureID};
gl.glDeleteTextures(1, textures, 0);
textureID = 0;
}
}
With a normal color all works great but if I want to put this texture on the lines it comes to this result:
Do you have any ideas why? What is my mistake?
Your mistake is, that you assume, that a OpenGL line would be able to draw a texture with one coordinate varying perpendicular to the lines direction. OpenGL simply can't do that, because texture coordinates are vertex attributes and there are only two vertices to a line segment. Which means, that any texture coordinate variation is possible only along the line, not perpendicular to it.
The solution is not to draw a line, but a quad or a triangle strip with vertices being extruded to both sides of the line. This is most easily done using a shader operating in screen parallel space.
Related
I am drawing a simple 3D shape. The shape is read from the .obj file and the whole shape has one color.
I want to create a possibility to change the color. For example if I tap the button I want to make the structure red.
I've read a lot of tutorials but I can't figure out how to change the color (redraw element?) during the runtime. I've tried something like that in the onDrawFrame
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_AMBIENT, colorBuffer);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
But nothing is changing.
Where and how should I need to implement such action ?
//update
draw method
if(!initialized) {
init(gl);
initialized = true;
}
if(glCameraMatrixBuffer != null) {
glMatrixBuffer.put(glMatrix);
glMatrixBuffer.position(0);
//argDrawMode3D
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
//argDraw3dCamera
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadMatrixf( glCameraMatrixBuffer );
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadMatrixf(glMatrixBuffer);
}
gl.glEnableClientState(GL10.GL_NORMAL_ARRAY);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_AMBIENT, colorBuffer);
for(int i=0; i<parts.size(); i++){
TDModelPart t=parts.get(i);
Material m=t.getMaterial();
if(m!=null){
FloatBuffer a=m.getAmbientColorBuffer();
FloatBuffer d=m.getDiffuseColorBuffer();
FloatBuffer s=m.getSpecularColorBuffer();
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK,GL10.GL_AMBIENT,a);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK,GL10.GL_SPECULAR,s);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK,GL10.GL_DIFFUSE,d);
}
gl.glNormalPointer(GL10.GL_FLOAT, 0, t.getNormalBuffer());
gl.glDrawElements(GL10.GL_TRIANGLES,t.getFacesCount(),GL10.GL_UNSIGNED_SHORT,t.getFaceBuffer());
}
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
public final void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
if (DEBUG) {
gl = (GL10) GLDebugHelper.wrap(gl, GLDebugHelper.CONFIG_CHECK_GL_ERROR, log);
}
setupDraw2D(gl);
gl.glDisable(GL10.GL_DEPTH_TEST);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glDisable(GL10.GL_LIGHTING);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureName);
//load new preview frame as a texture, if needed
if (frameEnqueued) {
frameLock.lock();
if (!isTextureInitialized) {
initializeTexture(gl);
} else {
//just update the image
//can we just update a portion(non power of two)?...seems to work
gl.glTexSubImage2D(GL10.GL_TEXTURE_2D, 0, 0, 0, previewFrameWidth, previewFrameHeight,
mode, GL10.GL_UNSIGNED_BYTE, frameData);
}
frameLock.unlock();
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
frameEnqueued = false;
}
gl.glColor4f(1, 1, 1, 1f);
//draw camera preview frame:
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, squareBuffer);
//draw camera square
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
if (customRenderer != null) {
customRenderer.setupEnv(gl);
} else {
// gl.glEnable(GL10.GL_LIGHTING);
// gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_AMBIENT, ambientLightBuffer);
// gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_DIFFUSE, diffuseLightBuffer);
// gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_SPECULAR, specularLightBuffer);
// gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_POSITION, lightPositionBuffer);
// gl.glEnable(GL10.GL_LIGHT0);
}
matrixGrabber.getCurrentState(gl);
if (performAction) {
Log.d("ZMIANA_KOLORY", "akcja się wola");
int[] c = new int[]{255, 0, 0};
Vector<Float> v = markerInfo.getObjects().get(0).vectors;
ByteBuffer vBuf = ByteBuffer.allocateDirect(v.size() * 4);
vBuf.order(ByteOrder.nativeOrder());
float[] newColor = new float[c.length + 1];
for (int i = 0; i < c.length; i++) {
newColor[i] = (1.0f / 255) * c[i];
newColor[3] = 1.0f;
}
Log.d("ZMIANA_KOLORY", newColor[0] + " " + newColor[1] + " " + newColor[2] + " " + newColor[3] + " ");
FloatBuffer colorBuffer = vBuf.asFloatBuffer();
colorBuffer.put(newColor);
colorBuffer.position(0);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_AMBIENT, colorBuffer);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
performAction = false;
// Log.d("PERFORM_ACTION", "wykonuję akcję: " + actionName);
// switch (actionName) {
// case "Highlight":
//
// for(ARObject obj : markerInfo.getObjects()) {
//// if(tmp.get)
//
// }
//
// actionName = null;
// performAction = false;
// break;
// }
}
markerInfo.draw(gl);
if (customRenderer != null) {
customRenderer.draw(gl);
}
if (takeScreenshot) {
Log.d("SCREENSHOT", "Sprawdzam touched");
Ray r = new Ray(gl, screenWidth, screenHeight, x, y);
Log.d("SCREENSHOT", "Near Coord =" + Arrays.toString(r.P0));
Log.d("SCREENSHOT", "Far Coord =" + Arrays.toString(r.P1));
takeScreenshot = false;
}
}
OpenGL is a state based drawing system. State that's not active/enabled at the very moment you make a drawing call has no effect. You're enabling the color array, set the pointer and then immediately after disable it again. For the color vertex array to be taken into account, it must be enabled.
However you also seem to use a texture there. And on top of that you're using the fixed function pipeline (FFP). There are a number of interactions between textures, colors and illumination which can all lead to the color not showing up or being not turning out as you'd expect.
Do yourself a favour and start using shaders. Understanding the way the FFP works is not hard; however it grew so many state switches and data paths, that it's become very cumbersome to work with. You can either write 40+ lines of code to set up the FFP as you desire, or you can write a single line of shader code to the same effect.
Following is my Sprite code. The image drawn using this code has a black background (which covers the transparent background) and has a color overlay making the image look distorted.
How can I improve transparent image drawing on open gl es.
class Ship {
public int life = 5;
public FloatBuffer ShipVertexBuffer;
public FloatBuffer ShipTextureBuffer;
public PieceCluster cluster;
//! TEXTURES
private int[] textures = new int[1];
//! TEXTURES
public float ShipVerticles[] = {
0, 0, // лево низ
0, 30, // лево вверх
30, 0, // право низ
30, 30 // право вверх
};
//! TEXTURES
public float ShipTextures[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
//! TEXTURES
public Ship(PieceCluster c) {
//! Буффер вертексов
cluster = c;
ByteBuffer bb = ByteBuffer.allocateDirect(36);
bb.order(ByteOrder.nativeOrder());
ShipVertexBuffer = bb.asFloatBuffer();
ShipVertexBuffer.put(ShipVerticles);
ShipVertexBuffer.position(0);
//! TEXTURES
bb = ByteBuffer.allocateDirect(ShipTextures.length * 4);
bb.order(ByteOrder.nativeOrder());
ShipTextureBuffer = bb.asFloatBuffer();
ShipTextureBuffer.put(ShipTextures);
ShipTextureBuffer.position(0);
//! TEXTURES
}
public void loadGLTexture(GL10 gl) {
// loading texture
// generate one texture pointer
Bitmap bitmap = cluster.Picture;
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
public void draw(GL10 gl, float x, float y) {
//! TEXTURE
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
gl.glColor4f(0.0f, 1.0f, 0.0f, 1.0f);
gl.glTranslatef(x, y, 0.0f);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, ShipVertexBuffer);
//! TEXTURE
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, ShipTextureBuffer);
//! TEXTURE
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//! TEXTURE
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
}
It doesn't look like you've enabled blending.
Try adding glEnable(GL_BLEND) before your draw call. Use glBlendFunc() and glBlendEquation() to change the blending style to suit what effect you want.
I am trying to display a single texture on a quad.
I had a working VertexObject, which drew a square(or any geometric object) fine. Now I tried expanding it to handle textures too, and the textures doesn't work. I only see the quad in one solid color.
The coordinate data is in an arrayList:
/*the vertices' coordinates*/
public int coordCount = 0;
/*float array of 3(x,y,z)*/
public ArrayList<Float> coordList = new ArrayList<Float>(coordCount);
/*the coordinates' indexes(if used)*/
/*maximum limit:32767*/
private int orderCount = 0;
private ArrayList<Short> orderList = new ArrayList<Short>(orderCount);
/*textures*/
public boolean textured;
private boolean textureIsReady;
private ArrayList<Float> textureList = new ArrayList<Float>(coordCount);
private Bitmap bitmap; //the image to be displayed
private int textures[]; //the textures' ids
The buffers are initialized in the following function:
/*Drawing is based on the buffers*/
public void refreshBuffers(){
/*Coordinates' List*/
float coords[] = new float[coordList.size()];
for(int i=0;i<coordList.size();i++){
coords[i]= coordList.get(i);
}
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
coords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(coords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
/*Index List*/
short order[] = new short[(short)orderList.size()];
for(int i=0;i<order.length;i++){
order[i] = (short) orderList.get(i);
}
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
order.length * 2);
dlb.order(ByteOrder.nativeOrder());
orderBuffer = dlb.asShortBuffer();
orderBuffer.put(order);
orderBuffer.position(0);
/*texture list*/
if(textured){
float textureCoords[] = new float[textureList.size()];
for(int i=0;i<textureList.size();i++){
textureCoords[i] = textureList.get(i);
}
ByteBuffer byteBuf = ByteBuffer.allocateDirect(textureCoords.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}
}
I load the image into the object with the following code:
public void initTexture(GL10 gl, Bitmap inBitmap){
bitmap = inBitmap;
loadTexture(gl);
textureIsReady = true;
}
/*http://www.jayway.com/2010/12/30/opengl-es-tutorial-for-android-part-vi-textures/*/
public void loadTexture(GL10 gl){
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
/*bind bitmap to texture*/
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
And the drawing happens based on this code:
public void draw(GL10 gl){
if(textured && textureIsReady){
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
//loadTexture(gl);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0,
textureBuffer);
}else{
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColor4f(color[0], color[1], color[2], color[3]);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
}
if(!indexed)gl.glDrawArrays(drawMode, 0, coordCount);
else gl.glDrawElements(drawMode, orderCount, GL10.GL_UNSIGNED_SHORT, orderBuffer);
if(textured && textureIsReady){
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisable(GL10.GL_TEXTURE_2D);
}else{
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
}
The initialization is as follows:
pic = new VertexObject();
pic.indexed = true;
pic.textured = true;
pic.initTexture(gl,MainActivity.bp);
pic.color[0] = 0.0f;
pic.color[1] = 0.0f;
pic.color[2] = 0.0f;
float inputVertex[] = {2.0f,2.0f,0.0f};
float inputTexture[] = {0.0f,0.0f};
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 2.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 0.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
pic.addIndex((short)0);
pic.addIndex((short)1);
pic.addIndex((short)2);
pic.addIndex((short)0);
pic.addIndex((short)2);
pic.addIndex((short)3);
The coordinates are just simply added to the arrayList, and then I refresh the buffers.
The bitmap is valid, because it is showing up on an imageView.
The image is a png file with the size of 128x128 in the drawable folder.
For what I gathered the image is getting to the vertexObject, but something isn't right with the texture mapping. Any pointers on what am I doing wrong?
Okay, I got it!
I downloaded a working example from the internet and rewrote it, to resemble the object(presented above) step by step. I observed if it works on every step. Turns out, the problem isn't in the graphical part, because the object worked in another context with different coordinates.
Long story short:
I got the texture UV mapping wrong!
That's why I got the solid color, the texture was loaded, but the UV mapping wasn't correct.
Short story long:
At the lines
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
The indexing was wrong as only the first element of inputTexture was updated only. There might have been some additional errors regarding the sizes of the different array describing the vertex coordinates, but rewriting on the linked example fixed the problem, and it produced a mroe concise code.
I am facing problems on loading a texture onto a circle. My circle is made with a triangle fan. It gives a bad output.
Original Image:
The Result :
My code:
public class MyOpenGLCircle {
private int points=360;
private float vertices[]={0.0f,0.0f,0.0f};
private FloatBuffer vertBuff, textureBuffer;
float texData[] = null;
float theta = 0;
int[] textures = new int[1];
int R=1;
float textCoordArray[] =
{
-R, (float) (R * (Math.sqrt(2) + 1)),
-R, -R,
(float) (R * (Math.sqrt(2) + 1)), -R
};
public MyOpenGLCircle(){
vertices = new float[(points+1)*3];
for(int i=0;i<(points)*3;i+=3)
{
//radius is 1/3
vertices[i]=(float) ( Math.cos(theta))/3;
vertices[i+1]=(float) (Math.sin(theta))/3;
vertices[i+2]=0;
theta += Math.PI / 90;
}
ByteBuffer bBuff=ByteBuffer.allocateDirect(vertices.length*4);
bBuff.order(ByteOrder.nativeOrder());
vertBuff=bBuff.asFloatBuffer();
vertBuff.put(vertices);
vertBuff.position(0);
ByteBuffer bBuff2=ByteBuffer.allocateDirect(textCoordArray.length * 4 * 360);
bBuff2.order(ByteOrder.nativeOrder());
textureBuffer=bBuff2.asFloatBuffer();
textureBuffer.put(textCoordArray);
textureBuffer.position(0);
}
public void draw(GL10 gl){
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertBuff);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); //4
gl.glTexCoordPointer(2, GL10.GL_FLOAT,0, textureBuffer); //5
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDrawArrays(GL10.GL_TRIANGLE_FAN, 0, points/2);
}
public void loadBallTexture(GL10 gl, Context context, int resource){
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resource);
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
}
Please help me through this
For starters, you need to have the same number of texcoord pairs in your texcoord array as you have vertex tuples in your vertex array.
It looks like you've just got 3 pairs of texture coordinates, and 360 vertices.
You need to have a texcoord array that has 360 texture coordinates in it. Then when the vertices are drawn, vertex[0] gets texcoord[0], vertex[1] gets paired with texcoord[1], etc.
===EDIT===
You just have to define the texture coordinates in a similar manner to how you define your vertices: in a loop using mathematical formulas.
So for example, your first vertex of the triangle fan is at the center of the circle. For the center of your circle, you want the texcoord to reference the center of the texture, which is coordinate (0.5, 0.5).
As you go around the edges, just think about which texture coordinate maps to that part of the circle. So lets assume that your next vertex is the rightmost vertex of the circle, that lies along the same y value as the center of the circle. The texcoord for this one would be (1.0, 0.5), or the right edge of the texture in the vertical middle.
The top vertex of the circle would have texcoord (0.5, 1.0), the leftmost vertex would be (0.0, 0.5), etc.
You can use your trigonometry to fill in the rest of the vertices.
A texture used for a cube is being used for other objects in the view even though they aren't assigned it. This only happens on my HTC Magic running 1.6. I load the texture image from resources using the standard Bitmap libraries. This shouldn't be the problem however since it correctly applies texture to intended models.
I tried deleted the texture after using it however the texture is completely wiped and the cubes never seem to display it.
I recycled a lot of code taken from tutorials around the net.
Code:
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glRotatef(sceneroty, 0, 1, 0);
gl.glTranslatef(xtrans, 0, ztrans);
root.draw(gl);
Root.Draw Function:
gl.glPushMatrix();
gl.glFrontFace(GL10.GL_CCW);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glCullFace(GL10.GL_BACK);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, verticesBuffer);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColor4f(rgba[0], rgba[1], rgba[2], rgba[3]);
// Smooth color
if (colorBuffer != null) {
// Enable the color array buffer to be used during rendering.
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
}
// New part...
if (mShouldLoadTexture) {
loadGLTexture(gl);mShouldLoadTexture = false;
}
if (mTextureId != -1 && mTextureBuffer != null) {
gl.glEnable(GL10.GL_TEXTURE_2D);
// Enable the texture state
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Point to our buffers
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, mTextureBuffer);
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureId);
}
// ... end new part.
gl.glTranslatef(x, y, z);
gl.glRotatef(rx, 1, 0, 0);
gl.glRotatef(ry, 0, 1, 0);
gl.glRotatef(rz, 0, 0, 1);
// Point out the where the color buffer is.
gl.glDrawElements(GL10.GL_TRIANGLES, numOfIndices,
GL10.GL_UNSIGNED_SHORT, indicesBuffer);
// Disable the vertices buffer.
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
// New part...
if (mTextureId != -1 && mTextureBuffer != null) {
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
// ... end new part.
// Disable face culling.
gl.glDisable(GL10.GL_CULL_FACE);
gl.glPopMatrix();
Other used Functions:
protected void setTextureCoordinates(float[] textureCoords) {
// float is 4 bytes, therefore we multiply the number if
// vertices with 4.
ByteBuffer byteBuf = ByteBuffer.allocateDirect(
textureCoords.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
mTextureBuffer = byteBuf.asFloatBuffer();
mTextureBuffer.put(textureCoords);
mTextureBuffer.position(0);
}
public void loadBitmap(Bitmap bitmap) {
this.mBitmap = bitmap;
mShouldLoadTexture = true;
}
private void loadGLTexture(GL10 gl) {
// Generate one texture pointer...
gl.glGenTextures(1, textures, 0);
mTextureId = textures[0];
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureId);
// Create Nearest Filtered Texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
// Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_REPEAT);
// Use the Android GLUtils to specify a two-dimensional texture image
// from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, mBitmap, 0);
}
It looks like you aren't disabling texturing nor unbinding the TEXTURE_2D target. If your other cubes are untextured they will then be drawn using the same texture left over from the previous draw operation. You should disable texturing if the object doesn't have a texture assigned to it:
if (mTextureId == -1) {
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
}