I have a very basic Activity at the moment. It creates a GLSurfaceView and sets the Renderer. The problem is all I see is red, which is from glClearColor, and no texture. Not even a white area. Also glGetError() is not reporting anything.
Here is the Renderer:
public class MyRenderer implements Renderer {
public MyRenderer(Context context)
{
mContext = context;
}
public void onDrawFrame(GL10 gl) {
gl.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glBindTexture(GL10.GL_TEXTURE_2D, texture[0]);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, vertex);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texCoords);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrthof(-160.0f, 160.0f, -240.0f, 240.0f, 0.1f, 1.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
float vertexBuffer[] = {
-160.0f, -240.0f,
-160.0f, 240.0f,
160.0f, -240.0f,
160.0f, 240.0f
};
vertex = ByteBuffer.allocateDirect(8 * 4).asFloatBuffer().put(vertexBuffer);
float texCoordsBuffer[] = {
0.0f, 0.0f,
0.0f, 480.0f/512.0f,
320.0f/512.0f, 0.0f,
320.0f/512.0f, 480.0f/512.0f
};
texCoords = ByteBuffer.allocateDirect(8 * 4).asFloatBuffer().put(texCoordsBuffer);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inDensity = 240; // needed so that the image will be 512x512
Bitmap bitmap = BitmapFactory.decodeResource(mContext.getResources(), R.drawable.image, options);
int width = bitmap.getWidth();
int height = bitmap.getHeight();
Log.i(TAG, "Bitmap:{w:" + width + " h:" + height + "}");
gl.glEnable(GL10.GL_TEXTURE_2D);
texture = new int[1];
gl.glGenTextures(1, texture, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, texture[0]);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR)
{
Log.e(TAG, "GL Texture Load Error: " + error);
}
}
private Context mContext;
private int texture[];
private FloatBuffer vertex;
private FloatBuffer texCoords;
}
There are several problems with your code:
You need to set the byte order of your buffers to native:
vertex.order(ByteOrder.nativeOrder())
After copying data into your buffers, reset the position to 0:
vertex.position(0)
(Do both for texCoord as well).
It would probably also help to put your near clipping plane at -1.0 instead of .1 (in glOrthof).
// needed because the image won't be 512x512
To have OpenGL render the texture, the texture size needs to be a power of 2, ie 64x64, 128x32 or 256x1024
Related
I am making an application with Opengl and Opencv.
Opencv identifies the corners of a square on the screen.
I apply an algorithm that converts the screen coordinates of points into Cartesian coordinates into opengl, and I create the vertices.
But each point taken from the four caught in the square is modified to each frame,
so the float array of vertices also changes with each frame, how to change the vertices to each frame?
this is MyGLSurfaceView
public class MyGLSurfaceView implements GLSurfaceView.Renderer{
Context context;
Model3D model3D;
float[] vertices;
public MyGLSurfaceView(Context context, float[] vertices){
this.context = context;
this.vertices = vertices;
//ColocaModel(gl,vertices);
model3D = new Model3D(vertices);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
model3D.model3DTexture(gl,context);
gl.glEnable(GL10.GL_TEXTURE_2D); // Enable texture (NEW)
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if (height == 0) height = 1; // To prevent divide by zero
float aspect = (float)width / height;
// Set the viewport (display area) to cover the entire window
gl.glViewport(0, 0, width, height);
// Setup perspective projection, with aspect ratio matches viewport
gl.glMatrixMode(GL10.GL_PROJECTION); // Select projection matrix
gl.glLoadIdentity(); // Reset projection matrix
// Use perspective projection
GLU.gluPerspective(gl, 45, aspect, 0.1f, 100.f);
gl.glMatrixMode(GL10.GL_MODELVIEW); // Select model-view matrix
gl.glLoadIdentity();
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glLoadIdentity();
gl.glTranslatef(0.0f, 0.0f, -6.0f);
//gl.glTranslatex(200, 500, -6);
//gl.glRotatef(angleCube, 0.1f, 1.0f, 0.2f);
model3D = new Model3D(vertices);
model3D.draw(gl,vertices);
}
}
My object
public class Model3D {
float[] texCoords = { // Texture coords for the above face (NEW)
0.0f, 1.0f, // A. left-bottom (NEW)
1.0f, 1.0f, // B. right-bottom (NEW)
0.0f, 0.0f, // C. left-top (NEW)
1.0f, 0.0f // D. right-top (NEW)
};
int[] textureIDs = new int[1]; //array para 1 textura
private FloatBuffer vertexBuffer;
private FloatBuffer texBuffer;
public Model3D(float[] vertices){
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
vertexBuffer = vbb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
ByteBuffer tbb = ByteBuffer.allocateDirect(texCoords.length * 4);
tbb.order(ByteOrder.nativeOrder());
texBuffer = tbb.asFloatBuffer();
texBuffer.put(texCoords);
texBuffer.position(0);
}
public void model3DTexture(GL10 gl, Context context) {
gl.glGenTextures(1, textureIDs, 0); // Generate texture-ID array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureIDs[0]); // Bind to texture ID
// Set up texture filters
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Construct an input stream to texture image "res\drawable\nehe.png"
#SuppressLint("ResourceType") InputStream istream = context.getResources().openRawResource(R.drawable.image2);
Bitmap bitmap;
try {
// Read and decode input as bitmap
bitmap = BitmapFactory.decodeStream(istream);
} finally {
try {
istream.close();
} catch(IOException e) { }
}
// Build Texture from loaded bitmap for the currently-bind texture ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
public void draw(GL10 gl,float[] vertices){
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Enable texture-coords-array (NEW)
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texBuffer); // Define texture-coords buffer (NEW)
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,vertexBuffer);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP,0,vertices.length/3);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
}
My call
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
MyGLSurfaceView view = new MyGLSurfaceView(this,vertices);
glView = new GLSurfaceView(this);
glView = (GLSurfaceView) findViewById(R.id.glView);
glView.setZOrderOnTop(true);
glView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
glView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
glView.setRenderer(view);
glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
At this point is where I would like to make the vertex change
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
...
...
These are my converted vertices
public float[] vertices = {
xCP4, yCP4, 0.0f,
xCP3, yCP3, 0.0f,
xCP1, yCP1, 0.0f,
xCP2, yCP2, 0.0f
};
I want to capture the rendered scene in my Android openGl App in A Texture Using FrameBuffer Object.
EveryThing is Fine in the emulator, But on Real Device the frameBuffer Object wouldn't work,
meaning Nothing Goes to my Texture, Here is the FBO Class (For Creating FrameBuffer and creating texture of it):
public class FBO {
int [] fb, renderTex;
int [] TextureBank;
int top;
int texW;
int texH;
int maxTextures;
public FBO(int width,int height,int maxTextures){
texW = width;
texH = height;
fb = new int[1];
renderTex= new int[1];
top=-1;
this.maxTextures = maxTextures;
TextureBank = new int[maxTextures];
}
public void setup(GL10 gl){
// generate
((GL11ExtensionPack)gl).glGenFramebuffersOES(1, fb, 0);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glGenTextures(1, renderTex, 0);// generate texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, renderTex[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
//texBuffer = ByteBuffer.allocateDirect(buf.length*4).order(ByteOrder.nativeOrder()).asIntBuffer();
//gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,GL10.GL_MODULATE);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA, texW, texH, 0, GL10.GL_RGBA, GL10.GL_FLOAT, null);
gl.glDisable(GL10.GL_TEXTURE_2D);
if(top>=maxTextures-1){
Log.e("OUT OF TEXTURE COUNTS", "OUT OF TEXTURE COUNTS Texture WIll Not Be added to Stack");
}
else{
TextureBank [++top]= renderTex[0];
Log.d("TOP= ", "" + top);
}
}
public boolean RenderStart(GL10 gl){
// Bind the framebuffer
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, fb[0]);
// specify texture as color attachment
((GL11ExtensionPack)gl).glFramebufferTexture2DOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, GL11ExtensionPack.GL_COLOR_ATTACHMENT0_OES, GL10.GL_TEXTURE_2D, renderTex[0], 0);
int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR) {
Log.d("err", "FIRST Background Load GLError: " + error+" ");
}
int status = ((GL11ExtensionPack)gl).glCheckFramebufferStatusOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES);
if (status != GL11ExtensionPack.GL_FRAMEBUFFER_COMPLETE_OES)
{
Log.d("err", "SECOND Background Load GLError: " + status+" ");;
return true;
}
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
return true;
}
public void RenderEnd(GL10 gl){
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, 0);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
gl.glColor4f(1,1,1,1);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
public int getTexture(){
return renderTex[0];
}
}
And here is my OnDrawFrame Method Which uses this FBO:
public synchronized void onDrawFrame(GL10 gl) {
mObserver.onDrawFrame();
gl.glClearColor(Color.red(mBackgroundColor) / 255f,
Color.green(mBackgroundColor) / 255f,
Color.blue(mBackgroundColor) / 255f,
Color.alpha(mBackgroundColor) / 255f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
if(capture){
if(AttachTexture){
int h = gl.glGetError();
/**
Setup FBO
*/
frameBuffer.setup(gl);
if(h!=0){
Log.d("ERROR", "ERROR Happend"+h+"");
}
AttachTexture = false;
}
/**
Start Rendering In FBO
*/
frameBuffer.RenderStart(gl);
if (USE_PERSPECTIVE_PROJECTION) {
double x = mCurlMesh.GetMinX()* mCurlMesh.GetMinY();
gl.glTranslatef((float)(-1.000-mCurlMesh.GetMinX()),(float)(-1.000-mCurlMesh.GetMinY()), -6f);
}
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluOrtho2D(gl, (float)mCurlMesh.GetMinX(), (float)mCurlMesh.GetMaxX(),
(float)mCurlMesh.GetMinY(), (float)mCurlMesh.GetMaxY());
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glViewport(0, 0,texW,texH);
mCurlMesh.onDrawFrame(gl);
/**
End Rendering In FBO
*/
frameBuffer.RenderEnd(gl);
}
//Reset Every Thing to Its Normal State
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluOrtho2D(gl, -1.1f,1.1f,-1.1f, 1.1f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glViewport(0, 0, mViewportWidth, mViewportHeight);
mCurlMesh.onDrawFrame(gl);
}
I Should mention that my Texture size IS 2^n:
frameBuffer = new FBO(1024, 1024, 8);
Guys i need your help again :)
MainRenderer.java:
package com.example.galaga2d;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteOrder;
import java.util.Random;
import java.util.Vector;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import android.opengl.GLUtils;
public class MainRenderer implements Renderer {
Random rand = new Random();
float chance = 0.0f;
private Context context;
public Ship playerShip = new Ship();
Vector<Asteroid> asteroidVector = new Vector<Asteroid>();
public MainRenderer(Context context) {
this.context = context;
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
//! TEXTURES
playerShip.loadGLTexture(gl, this.context);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_SRC_COLOR);
//gl.glShadeModel(GL10.GL_SMOOTH);
//! TEXTURES
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
gl.glLoadIdentity();
chance = rand.nextFloat() * (100.0f - 1.0f) + 1.0f;
if (chance <= 4.0f) {
asteroidVector.addElement(new Asteroid());
}
if (playerShip.life != 0) {
playerShip.draw(gl);
gl.glLoadIdentity();
for (int i = 0; i < asteroidVector.size(); i++) {
if(asteroidVector.elementAt(i).textured == 0) {
asteroidVector.elementAt(i).loadGLTexture(gl, this.context);
asteroidVector.elementAt(i).textured |= 1;
//gl.glLoadIdentity();
} else {
asteroidVector.elementAt(i).textured &= ~1;
}
}
for (int i = 0; i < asteroidVector.size(); i++) {
asteroidVector.elementAt(i).collisionCheck();
asteroidVector.elementAt(i).draw(gl);
if (asteroidVector.elementAt(i).Y > 480.0f) {
asteroidVector.remove(i);
}
gl.glLoadIdentity();
}
} else {
gl.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrthof(0, width, height, 0, 1, -1);
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
// --------------------------------------------------------------------------------
class Ship {
public int life = 3; // Количество жизней игрока
public FloatBuffer ShipVertexBuffer; // Vertex буффер
public FloatBuffer ShipTextureBuffer; // Texture буффер
public float X = 100.0f; // Начальные координаты игрока по X
public float Y = 300.0f; // Начальные координаты игрока по Y
//! TEXTURES
private int[] textures = new int[1];
//! TEXTURES
public float ShipVerticles[] = { // Вертикли прямоугольника - корабль
0, 0,
0, 30,
30, 0,
30, 30
};
//! TEXTURES
public float ShipTextures[] = { // Разметка наложения текстуры, соответствует
0.0f, 0.0f, // разметке вертиклей
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
//! TEXTURES
public Ship() {
//! Буффер вертексов
ByteBuffer bb = ByteBuffer.allocateDirect(36);
bb.order(ByteOrder.nativeOrder());
ShipVertexBuffer = bb.asFloatBuffer();
ShipVertexBuffer.put(ShipVerticles);
ShipVertexBuffer.position(0);
//! TEXTURES
bb = ByteBuffer.allocateDirect(ShipTextures.length * 4);
bb.order(ByteOrder.nativeOrder());
ShipTextureBuffer = bb.asFloatBuffer();
ShipTextureBuffer.put(ShipTextures);
ShipTextureBuffer.position(0);
//! TEXTURES
}
public void loadGLTexture(GL10 gl, Context context) {
// loading texture
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
R.drawable.ship);
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
public void draw(GL10 gl) {
//! TEXTURE
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
gl.glTranslatef(playerShip.X, playerShip.Y, 0.0f);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, ShipVertexBuffer);
//! TEXTURE
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, ShipTextureBuffer);
//! TEXTURE
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//! TEXTURE
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
}
}
class Asteroid {
private float colorR = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float colorG = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float colorB = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float X = rand.nextFloat() * (300.0f - 1.0f) + 1.0f;
private float Y = -30.0f;
private float size = rand.nextFloat() * (30.0f - 20.0f) + 20.0f;
private float speed = rand.nextFloat() * (10.0f - 1.0f) + 1.0f;
private int collision = 0;
private int textured = 0;
private FloatBuffer AsteroidVertexBuffer;
private FloatBuffer AsteroidTextureBuffer;
//! TEXTURES
private int[] textures = new int[1];
//! TEXTURES
public float AsteroidVerticles[] = {
0, 0, // лево низ
0, size, // лево вверх
size, 0, // право низ
size, size // право вверх
};
//! TEXTURES
public float AsteroidTextures[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
//! TEXTURES
public Asteroid() {
ByteBuffer bb = ByteBuffer.allocateDirect(36);
bb.order(ByteOrder.nativeOrder());
AsteroidVertexBuffer = bb.asFloatBuffer();
AsteroidVertexBuffer.put(AsteroidVerticles);
AsteroidVertexBuffer.position(0);
//! TEXTURES
bb = ByteBuffer.allocateDirect(AsteroidTextures.length * 4);
bb.order(ByteOrder.nativeOrder());
AsteroidTextureBuffer = bb.asFloatBuffer();
AsteroidTextureBuffer.put(AsteroidTextures);
AsteroidTextureBuffer.position(0);
//! TEXTURES
}
public void collisionCheck() {
float result = (float)Math.sqrt(Math.pow((playerShip.X-X), 2)+Math.pow((playerShip.Y-Y), 2));
if (result < size)
{
if(collision == 0)
{
playerShip.life = playerShip.life - 1;
collision |= 1;
}
} else {
collision &= ~1;
}
}
public void loadGLTexture(GL10 gl, Context context) {
// loading texture
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
R.drawable.asteroid);
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
public void draw(GL10 gl) {
Y += speed;
//! TEXTURE
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
gl.glColor4f(colorR, colorG, colorB, 1.0f);
gl.glTranslatef(X, Y, 0.0f);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, AsteroidVertexBuffer);
//! TEXTURE
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, AsteroidTextureBuffer);
//! TEXTURE
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//! TEXTURE
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
}
}
// --------------------------------------------------------------------------------
}
Every frame drawing object asteroid by chance:
chance = rand.nextFloat() * (100.0f - 1.0f) + 1.0f;
if (chance <= 4.0f) {
asteroidVector.addElement(new Asteroid());
}
Thats mean we need to load texture for all new objects we draw every second, but we dont need to load texture for one object many times, and i add flag to check is object textured or not:
for (int i = 0; i < asteroidVector.size(); i++) {
if(asteroidVector.elementAt(i).textured == 0) {
asteroidVector.elementAt(i).loadGLTexture(gl, this.context);
asteroidVector.elementAt(i).textured |= 1;
//gl.glLoadIdentity();
} else {
asteroidVector.elementAt(i).textured &= ~1;
}
}
After object created and textured, we need to delete it if he go over screen border, so i do this:
for (int i = 0; i < asteroidVector.size(); i++) {
asteroidVector.elementAt(i).collisionCheck();
asteroidVector.elementAt(i).draw(gl);
//! THIS
if (asteroidVector.elementAt(i).Y > 480.0f) {
asteroidVector.remove(i);
}
//! THIS
gl.glLoadIdentity();
}
But that not enough, because tuxture buffer dont clear, and after 10-20 seconds on application running i have see some lagging and low fps.
The question is - How i can clear texture buffer or memory? To fix lagging and low fps?
I suppose you could do that, but then again you shouldn't be getting yourself into a situation where you actually need to in the first place.
As it now stands, you're creating a new texture object (and never freeing it!) for every Asteroid that's ever constructed, which is absolutely not necessary - a great way to fix that is to use a shared texture object for all of your Asteroid instances.
In a static function (called once), simply decode the image resource to raw pixel data -> glGenTextures -> glBindTexture -> glTexParam... calls ->GLUtils.TexImage2D -> free the (client-side) decoded raw pixel data and store the texture id (given by glGenTextures) into a static variable within the Asteroid class. Having done this, you can just bind to this shared texture object at the rendering stage (ie. draw), and proceed as you normally would.
As said before, never calling glDeleteTextures is the second part of your problem, although now that you're (hopefully) using a shared texture object for your Asteroid instances, it becomes much less important.
(Furthermore, even the vertex/texcoord buffer can be shared between instances, provided they are identical :P)
In my app I'm getting data from the camera , creating bitmap object and I must show it using openGL , so for that I have Square class and GlRenderer class. Problem is that it works very slowly , and every time it shows some image , then black screen , and another image and so on. Below is my classes
Square class
package com.example.videochat;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
public class Square {
private FloatBuffer vertexBuffer; // buffer holding the vertices
private float vertices[] = {
-1.0f, -1.0f, 0.0f, // V1 - bottom left
-1.0f, 1.0f, 0.0f, // V2 - top left
1.0f, -1.0f, 0.0f, // V3 - bottom right
1.0f, 1.0f, 0.0f // V4 - top right
};
private Bitmap m_bitmap;
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
/** The texture pointer */
private int[] textures = new int[1];
public Square(Bitmap bitmap) {
// a float has 4 bytes so we allocate for each coordinate 4 bytes
m_bitmap = bitmap;
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
// allocates the memory from the byte buffer
vertexBuffer = byteBuffer.asFloatBuffer();
// fill the vertexBuffer with the vertices
vertexBuffer.put(vertices);
// set the cursor position to the beginning of the buffer
vertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
textureBuffer = byteBuffer.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
}
public void loadGLTexture(GL10 gl, Context context) {
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, m_bitmap, 0);
// Clean up
m_bitmap.recycle();
}
/** The draw method for the square with the GL context */
public void draw(GL10 gl) {
// bind the previously generated texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// Point to our buffers
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation
gl.glFrontFace(GL10.GL_CW);
// Point to our vertex buffer
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
}
GlRenderer class
package com.example.videochat;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLU;
import android.opengl.GLSurfaceView.Renderer;
public class GlRenderer implements Renderer {
private Square square; // the square
private Context context;
/** Constructor to set the handed over context */
public GlRenderer(Context context, Bitmap bitmap) {
this.context = context;
// initialise the square
this.square = new Square(bitmap);
}
#Override
public void onDrawFrame(GL10 gl) {
// clear Screen and Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// Reset the Modelview Matrix
gl.glLoadIdentity();
// Drawing
gl.glTranslatef(0.0f, 0.0f, -5.0f); // move 5 units INTO the screen
// is the same as moving the camera 5 units away
// gl.glScalef(0.5f, 0.5f, 0.5f); // scale the square to 50%
// otherwise it will be too large
square.draw(gl); // Draw the triangle
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height == 0) { //Prevent A Divide By Zero By
height = 1; //Making Height Equal One
}
gl.glViewport(0, 0, width, height); //Reset The Current Viewport
gl.glMatrixMode(GL10.GL_PROJECTION); //Select The Projection Matrix
gl.glLoadIdentity(); //Reset The Projection Matrix
//Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(gl, 45.0f, (float)width / (float)height, 0.1f, 100.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW); //Select The Modelview Matrix
gl.glLoadIdentity(); //Reset The Modelview Matrix
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Load the texture for the square
square.loadGLTexture(gl, this.context);
gl.glEnable(GL10.GL_TEXTURE_2D); //Enable Texture Mapping ( NEW )
gl.glShadeModel(GL10.GL_SMOOTH); //Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); //Black Background
gl.glClearDepthf(1.0f); //Depth Buffer Setup
gl.glEnable(GL10.GL_DEPTH_TEST); //Enables Depth Testing
gl.glDepthFunc(GL10.GL_LEQUAL); //The Type Of Depth Testing To Do
//Really Nice Perspective Calculations
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
}
}
and the part of tha main class
public void initGlSurfaceView (Bitmap bmp, SurfaceView VideoCaptureBitmap) {
glSurfaceView = new GLSurfaceView(this);
glSurfaceView.setRenderer(new GlRenderer(this,bmp));
setContentView(glSurfaceView);
}
private void startVideo() {
if(m_Camera != null) return;
SurfaceHolder videoCaptureViewHolder = null;
try {
m_Camera = Camera.open();
} catch (RuntimeException e) {
Log.e("CameraTest", "Camera Open filed");
return;
}
m_Camera.setErrorCallback(new ErrorCallback() {
public void onError(int error, Camera camera) {
}
});
Camera.Parameters parameters = m_Camera.getParameters();
//parameters.setPreviewFrameRate(30);
List<Size> supportedPreviewSizes=parameters.getSupportedPreviewSizes();
Iterator<Size> supportedPreviewSizesIterator=supportedPreviewSizes.iterator();
while(supportedPreviewSizesIterator.hasNext()){
Size tmpSize=supportedPreviewSizesIterator.next();
Log.v("CameraTest","supportedPreviewSize.width = "+tmpSize.width+"supportedPreviewSize.height = "+tmpSize.height);
}
m_Camera.setParameters(parameters);
if (null != m_VideoCaptureView)
videoCaptureViewHolder = m_VideoCaptureView.getHolder();
try {
m_Camera.setPreviewDisplay(videoCaptureViewHolder);
} catch (Throwable t) {
}
Log.v("CameraTest","Camera PreviewFrameRate = "+m_Camera.getParameters().getPreviewFrameRate());
Size previewSize=m_Camera.getParameters().getPreviewSize();
int dataBufferSize=(int)(previewSize.height*previewSize.width*
(ImageFormat.getBitsPerPixel(m_Camera.getParameters().getPreviewFormat())/8.0));
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
private long timestamp=0;
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
//Log.v("CameraTest","Time Gap = "+(System.currentTimeMillis()-timestamp));
//int size = data.length;
//Bitmap btmp = BitmapFactory.decodeByteArray(data, 0, size);
//m_VideCaptureBitmap.setImageBitmap(btmp);
Size previewSize = camera.getParameters().getPreviewSize();
YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 80, baos);
byte[] jdata = baos.toByteArray();
// Convert to Bitmap
Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
//m_VideCaptureBitmap.setImageBitmap(bmp);
initGlSurfaceView(bmp, m_VideCaptureBitmap);
// set our renderer to be the main renderer with
// the current activity context
//setContentView(glSurfaceView);
Log.v("CameraTest","Frame size = "+ data.length);
timestamp=System.currentTimeMillis();
try{
camera.addCallbackBuffer(data);
}catch (Exception e) {
Log.e("CameraTest", "addCallbackBuffer error");
return;
}
return;
}
});
try {
m_Camera.startPreview();
} catch (Throwable e) {
m_Camera.release();
m_Camera = null;
return;
}
}
Can anybody helps me? Thanks
Regards
I am trying to rotate the texture around its center but it is not giving the expected results can you please see the code and let me know what i am missing?
here is my code
public class GLRenderer implements Renderer {
Context context;
Square s;
float x = 100 ,y = 100 ;
float w,h;
public GLRenderer(Context c) {
context = c;
s = new Square(x,y);
}
#Override
public void onDrawFrame(GL10 gl) {
s.draw(gl);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glOrthof(0, 320, 0, 480, 0, 1);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glEnable(GL10.GL_TEXTURE_2D);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
s.loadGLTexture(gl, R.drawable.ic_launcher);
}
public class Square{
int textures[] = new int[1];
FloatBuffer vertexbuffer;
FloatBuffer texturebuffer;
float texture[] ={
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f
};
public Square(float x, float y){
float vertices[] = {
x,y,0,
x,y+100,0,
x+100,y,0,
x+100,y+100,0
};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length*4);
vbb.order(ByteOrder.nativeOrder());
vertexbuffer = vbb.asFloatBuffer();
vertexbuffer.put(vertices);
vertexbuffer.position(0);
ByteBuffer tbb = ByteBuffer.allocateDirect(texture.length*4);
tbb.order(ByteOrder.nativeOrder());
texturebuffer = tbb.asFloatBuffer();
texturebuffer.put(texture);
texturebuffer.position(0);
}
public void draw(GL10 gl) {
gl.glPushMatrix();
gl.glTranslatef(-150, -150, 0);
gl.glRotatef(30,0, 0,1 );
gl.glTranslatef(150, 150, 0);
// Point to our buffers
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation
gl.glFrontFace(GL10.GL_CW);
// Point to our vertex buffer
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexbuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texturebuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
r = r+10;
gl.glPopMatrix();
}
public void loadGLTexture(GL10 gl, int drawable) {
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
drawable);
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
}
}
Since your plane is facing the Z axis, you actually need to rotate it on the Z axis, and not on the X.
So gl.glRotatef(30, 1, 0, 0) would be gl.glRotatef(30, 0, 0, 1)
Oh, and the translation values are actually -150 and 150, not -50 & 50. Since your plane is 100x100 and it "begins" in (100,100) so 150 would be the center of the plane.
But you should try not using that x and y variables. Then your vertices would become { 0,0,0, 0,100,0, 100,0,0, 100,100,0 }, it's in this case that you'd translate it by (-50, -50, 0) and then (50, 50, 0).