I'm trying to render a 640x480 RGB565 image using OpenGL ES on Android using GLSurfaceView and Native C code.
Initially I had a 0x0501 error with glTexImage2D, which I was able to resolve by changing the image dimensions.
But now, in the "drawFrame" call, when I do glDrawTexiOES to resnder the texture, I'm getting the following error on the Logs:
drawtex.c:89: DrawTexture: No textures enabled
I'm already doing glEnable(GL_TEXTURE_2D), is there anything else I should do?
Is there a complete example showing GLSurfaceView with native code using textures?
Thanks in advance!
I have had the same problem,you can either make a custom renderer like this:
or (a do it little bit complicated by subclassing SurfaceView directly)
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11;
import javax.microedition.khronos.opengles.GL11Ext;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLSurfaceView;
import android.util.Log;
import com.TIEmulator.JNI.NLib;
class EmulatorRenderer implements GLSurfaceView.Renderer, NLib.EventsInterface {
public static interface RenderCb {
public boolean dismissStartupDialog();
public void updateStartupDialog(String msg);
}
private static int mViewHeight;
private static int mViewWidth;
private static BitmapFactory.Options sBitmapOptions = new BitmapFactory.Options();
private static int TEX_BUF_HEIGHT = 128;
private static int TEX_BUF_WIDTH = 256;
private final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4
* EmulatorRenderer.TEX_BUF_HEIGHT * EmulatorRenderer.TEX_BUF_WIDTH);
private boolean emulation_running = false;
private Context mContext = null;
private final int[] mCropWorkspace;
private final GLSurfaceView mGLView;
protected int mTex = -1;
protected int[] mtexBuf = new int[1];
private final int[] mTextureNameWorkspace;
public EmulatorRenderer(final Context ctx, final GLSurfaceView v) {
// Pre-allocate and store these objects so we can use them at runtime
// without allocating memory mid-frame.
mTextureNameWorkspace = new int[1];
mCropWorkspace = new int[4];
byteBuffer.order(ByteOrder.BIG_ENDIAN);
// Set our bitmaps to 16-bit, 565 format.
EmulatorRenderer.sBitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
mGLView = v;
mContext = ctx;
try {
NLib.setListener(this);
} catch (final RuntimeException exc) {
exc.printStackTrace();
callFinishOnce();
return;
}
if (!NLib.TryLoadLib()) {
callFinishOnce();
return;
}
Log.v(this.getClass().toString(),">>>>>>>>>>>>>>>>>>>>>>> init successfull! <<<<<<<<<<<<<<<<<<<<<<");
}
private void callFinishOnce() {
if (mContext != null) {
((Activity) mContext).finish();
mContext = null;
}
}
#Override
protected void finalize() throws Throwable {
NLib.stopEmu();
super.finalize();
}
protected int loadBB(final GL10 gl) {
int textureName = -1;
if (mContext != null && gl != null) {
gl.glGenTextures(1, mTextureNameWorkspace, 0);
textureName = mTextureNameWorkspace[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureName);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_NEAREST);// GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,
GL10.GL_REPLACE);
mCropWorkspace[0] = 0; // u
// mCropWorkspace[1] = EMU_HEIGHT; // v
mCropWorkspace[1] = 0; // v
mCropWorkspace[2] = NLib.getWidth(); // w
// mCropWorkspace[3] = -EMU_HEIGHT; // h -EMU_HEIGHT;
mCropWorkspace[3] = NLib.getHeight(); // h -EMU_HEIGHT;
byteBuffer.order(ByteOrder.BIG_ENDIAN);
final int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR) {
Log.e("SpriteMethodTest", "Texture Load GLError: " + error);
}
}
return textureName;
}
public void onDrawFrame(final GL10 gl) {
// Log.v(this.toString(),"onDrawFrame called");
gl.glActiveTexture(mTex);
gl.glClientActiveTexture(mTex);
byteBuffer.position(0);
// this two lines bind and create the texture!
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTex);
((GL11) gl).glTexParameteriv(GL10.GL_TEXTURE_2D,
GL11Ext.GL_TEXTURE_CROP_RECT_OES, mCropWorkspace, 0);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA,
EmulatorRenderer.TEX_BUF_WIDTH,
EmulatorRenderer.TEX_BUF_HEIGHT, 0, GL10.GL_RGBA,
GL10.GL_UNSIGNED_BYTE, byteBuffer);
((GL11Ext) gl).glDrawTexiOES(0, 0, 0, EmulatorRenderer.mViewWidth,
EmulatorRenderer.mViewHeight);
/** gl.glEnable(GL10.GL_DEPTH_TEST); gl.glDepthFunc(GL10.GL_LEQUAL);
*/
}
public void OnFatalError(final String text) {
Log.d(toString(), "FATAL ERROR CALLBACK raised: " + text
+ " ===> Activity calls finish()");
}
public void onFinish() {
onPause();
}
/*
* JNI interface
*
*/
#Override
public void OnBufferUpdate() {
mGLView.requestRender();
}
// TODO Auto-generated method stub
#Override
public void OnWarning(String msg) {
// TODO Auto-generated method stub
}
public void onPause() {
mGLView.onPause();
// Log.v("onPause", "NILib.stopEmulate()");
emulation_running = false;
//startupDialogDismiss = false;
NLib.stopEmu();
}
public void onResume() {
// Log.v(this.toString(),"EmulatorRenderer:onResume called");
NLib.startEmu(byteBuffer);
mGLView.onResume();
//callFinishOnce();
emulation_running = true;
}
public void onSurfaceChanged(final GL10 gl, final int w, final int h) {
EmulatorRenderer.mViewWidth = w;
EmulatorRenderer.mViewHeight = h;
Log.v(toString(), "onSurfaceChanged: ==> New View Size: [" + w + ","
+ h + "]");
}
public void onSurfaceCreated(final GL10 gl, final EGLConfig config) {
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glClearColor(0.5f, 0.5f, 0.5f, 1);
gl.glShadeModel(GL10.GL_FLAT);
gl.glDisable(GL10.GL_DEPTH_TEST);
gl.glEnable(GL10.GL_TEXTURE_2D);
/*
* By default, OpenGL enables features that improve quality but reduce
* performance. One might want to tweak that especially on software
* renderer.
*/
gl.glDisable(GL10.GL_DITHER);
gl.glDisable(GL10.GL_LIGHTING);
gl.glDisable(GL10.GL_BLEND);
gl.glDisable(GL10.GL_DEPTH_TEST);
gl.glHint(GL10.GL_LINE_SMOOTH_HINT, GL10.GL_NICEST); // Set Line
// Antialiasing
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// create the one and only texture here...
mTex = loadBB(gl);
}
public void OnTIEmuStopped() {
System.out.println("OnTIEmuStopped callback called! ");
callFinishOnce();
}
/* Called when the size of the window changes. */
public void sizeChanged(final GL10 gl, final int w, final int h) {
// Log.v(this.toString(),"sizeChanged: ==> new Viewport: ["+w+","+h+"]");
gl.glViewport(0, 0, w, h);
/*
* Set our projection matrix. This doesn't have to be done each time we
* draw, but usually a new projection needs to be set when the viewport
* is resized.
*/
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
// Set up the orthographic projection
// gl.glOrthof(0.0f, w, 0.0f, h, 0.0f, 1.0f);
gl.glOrthof(0.0f, w, 0.0f, 0.0f, h, 1.0f);
gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
gl.glColor4x(0x10000, 0x10000, 0x10000, 0x10000);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glMatrixMode(GL10.GL_MODELVIEW);
}
}
yes i did...
gl.glGenTextures(1, mTextureNameWorkspace, 0);
textureName = mTextureNameWorkspace[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureName);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_NEAREST);// GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,
GL10.GL_REPLACE);
mCropWorkspace[0] = 0; // u
// mCropWorkspace[1] = EMU_HEIGHT; // v
mCropWorkspace[1] = 0; // v
mCropWorkspace[2] = NLib.getWidth(); // w
// mCropWorkspace[3] = -EMU_HEIGHT; // h -EMU_HEIGHT;
mCropWorkspace[3] = NLib.getHeight(); // h -EMU_HEIGHT;
static_test_debug_if_gl_error( gl , "loadBB time 1");
gl.glActiveTexture(mTex);
gl.glClientActiveTexture(mTex);
static_test_debug_if_gl_error( gl , "loadBB time 2");
Did you generate a texture id and bind a texture first?
glGenTexture()/glBindTexture()
The following will get the texture all set up and ready to use on GL_TEXTURE0:
When loading the texture:
// in your native onSurfaceCreated function:
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &texID);
glBindTexture(GL_TEXTURE_2D, texID);
// setup texture parameters if you want:
glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // etc.
glTexImage2D(GL_TEXTURE_2D, ... );
see here for how to fill out the TexImage2D parameters.
I've unfortunately not been able to get the glDrawTex_OES functions to work properly, but the texture does work if you render it onto a quad:
// in your native onRender function:
glBindTexture(GL_TEXTURE_2D, sGlTexture.texID);
// quad drawing:
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, quadVerts[i]);
glTexCoordPointer(2, GL_FLOAT, 0, quadTexCoords[i]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
Related
I want to capture the rendered scene in my Android openGl App in A Texture Using FrameBuffer Object.
EveryThing is Fine in the emulator, But on Real Device the frameBuffer Object wouldn't work,
meaning Nothing Goes to my Texture, Here is the FBO Class (For Creating FrameBuffer and creating texture of it):
public class FBO {
int [] fb, renderTex;
int [] TextureBank;
int top;
int texW;
int texH;
int maxTextures;
public FBO(int width,int height,int maxTextures){
texW = width;
texH = height;
fb = new int[1];
renderTex= new int[1];
top=-1;
this.maxTextures = maxTextures;
TextureBank = new int[maxTextures];
}
public void setup(GL10 gl){
// generate
((GL11ExtensionPack)gl).glGenFramebuffersOES(1, fb, 0);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glGenTextures(1, renderTex, 0);// generate texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, renderTex[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
//texBuffer = ByteBuffer.allocateDirect(buf.length*4).order(ByteOrder.nativeOrder()).asIntBuffer();
//gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,GL10.GL_MODULATE);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA, texW, texH, 0, GL10.GL_RGBA, GL10.GL_FLOAT, null);
gl.glDisable(GL10.GL_TEXTURE_2D);
if(top>=maxTextures-1){
Log.e("OUT OF TEXTURE COUNTS", "OUT OF TEXTURE COUNTS Texture WIll Not Be added to Stack");
}
else{
TextureBank [++top]= renderTex[0];
Log.d("TOP= ", "" + top);
}
}
public boolean RenderStart(GL10 gl){
// Bind the framebuffer
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, fb[0]);
// specify texture as color attachment
((GL11ExtensionPack)gl).glFramebufferTexture2DOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, GL11ExtensionPack.GL_COLOR_ATTACHMENT0_OES, GL10.GL_TEXTURE_2D, renderTex[0], 0);
int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR) {
Log.d("err", "FIRST Background Load GLError: " + error+" ");
}
int status = ((GL11ExtensionPack)gl).glCheckFramebufferStatusOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES);
if (status != GL11ExtensionPack.GL_FRAMEBUFFER_COMPLETE_OES)
{
Log.d("err", "SECOND Background Load GLError: " + status+" ");;
return true;
}
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
return true;
}
public void RenderEnd(GL10 gl){
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, 0);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
gl.glColor4f(1,1,1,1);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
public int getTexture(){
return renderTex[0];
}
}
And here is my OnDrawFrame Method Which uses this FBO:
public synchronized void onDrawFrame(GL10 gl) {
mObserver.onDrawFrame();
gl.glClearColor(Color.red(mBackgroundColor) / 255f,
Color.green(mBackgroundColor) / 255f,
Color.blue(mBackgroundColor) / 255f,
Color.alpha(mBackgroundColor) / 255f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
if(capture){
if(AttachTexture){
int h = gl.glGetError();
/**
Setup FBO
*/
frameBuffer.setup(gl);
if(h!=0){
Log.d("ERROR", "ERROR Happend"+h+"");
}
AttachTexture = false;
}
/**
Start Rendering In FBO
*/
frameBuffer.RenderStart(gl);
if (USE_PERSPECTIVE_PROJECTION) {
double x = mCurlMesh.GetMinX()* mCurlMesh.GetMinY();
gl.glTranslatef((float)(-1.000-mCurlMesh.GetMinX()),(float)(-1.000-mCurlMesh.GetMinY()), -6f);
}
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluOrtho2D(gl, (float)mCurlMesh.GetMinX(), (float)mCurlMesh.GetMaxX(),
(float)mCurlMesh.GetMinY(), (float)mCurlMesh.GetMaxY());
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glViewport(0, 0,texW,texH);
mCurlMesh.onDrawFrame(gl);
/**
End Rendering In FBO
*/
frameBuffer.RenderEnd(gl);
}
//Reset Every Thing to Its Normal State
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluOrtho2D(gl, -1.1f,1.1f,-1.1f, 1.1f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glViewport(0, 0, mViewportWidth, mViewportHeight);
mCurlMesh.onDrawFrame(gl);
}
I Should mention that my Texture size IS 2^n:
frameBuffer = new FBO(1024, 1024, 8);
Recently I worked on a 2D Game using SurfaceView and I learned a lot about game loops and everything. But now I want to make the same game using OpenGL. I read that the GLSurfaceView is the class that would be relative to what SurfaceView was. But I'm not sure about other things as:
Using SurfaceView, I used the Bitmap class to load a image resource that would be lets say a character. And that bitmap would be a property of my Character class. And for the gameloop I used a different Thread
I'm new to OpenGL so I'd like to know how to load image resources (do I load them using the Bitmap class?), or what to use instead of a Thread for the game loop?
do I load them using the Bitmap class?
Yes, you can load bitmap and use it as textImage2D.
Example:
public void loadTextures(GL10 gl, Context context) {
Log.e(LOG_TAG, "ExplosionSprite :: loadTextures");
mFrame = 0;
InputStream is;
Bitmap bitmap;
is = context.getResources().openRawResource(DRAW_SOURCE);
bitmap = BitmapFactory.decodeStream(is);
try {
is.close();
is = null;
} catch (IOException e) {
}
gl.glGenTextures(TEXTURE_COUNT, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
Before 4 month I did the same, because of performance switched to OpenGL ES 2D.
Its a bit complicated but after some time seems easy enough.
I don't have right now example for single image but have pretty good example for sprite sheet where I animate my Image. I'm sure you can remove not relevant data to make it for one static image. Google has enough resources "how to" so I'll point what I used for my purposes:
link 1
link 2
link 3
Cast_001_Sprite_.java
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import net.obviam.droidz.R;
import net.obviam.droidz.model.components.ESpriteDirection;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
import android.util.Log;
public class Cast_001_Sprite_ {
private static final String LOG_TAG = Cast_001_Sprite.class.getSimpleName();
/** Sprite sheet definition */
private static final int SPRITE_WIDTH = 5;
private static final int SPRITE_HEIGHT = 4;
private int DRAW_SOURCE = R.drawable.a1;
private int mX = Location._1[0]; // 100;
private int mY = Location._1[1]; // 100;
private float mScreenWidth, mScreenHeight, wRatio, hRatio;
private int mFrame = 0;
private int mSwitcher = 0;
private final static int TEXTURE_COUNT = 1; // for sprite sheet we use 1 image all the time.
private int[] textures = new int[TEXTURE_COUNT]; // frame animation
protected FloatBuffer vertexBuffer;
private final static ESpriteDirection mDirection = ESpriteDirection.TOP_TO_DOWN_LEFT_TO_RIGHT;
public float x, y, initPos, finalPos, initSpeed, currentPos;
private ByteBuffer bb1;
private final static int TOTAL_IMAGE_COUNT_IN_SPRITE = SPRITE_WIDTH * SPRITE_HEIGHT;
private FloatBuffer[] floatBufferArray = new FloatBuffer[TOTAL_IMAGE_COUNT_IN_SPRITE];
private float xOffset = 1.0f/SPRITE_WIDTH;
private float yOffset = 1.0f/SPRITE_HEIGHT;
float[] vertices = new float[] {
-1.0f, -1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f };
private float storage[][] = new float[TOTAL_IMAGE_COUNT_IN_SPRITE][];
private int[] sprite_X_Indexes = new int[SPRITE_WIDTH];//{1,2,3,4};
private int[] sprite_Y_Indexes = new int[SPRITE_HEIGHT];//{1,2,3,4};
public Cast_001_Sprite_(float screenWidth, float screenHeight){
generateSpriteIndexes();
updateScreenData(screenWidth, screenHeight);
int index = 0;
switch (mDirection) {
case TOP_TO_DOWN_LEFT_TO_RIGHT:
for(int row = 0; row<SPRITE_HEIGHT; row++){
for(int column = 0; column<SPRITE_WIDTH; column++){
storage[index] = generateTextures(column, row);
index++;
}
}
break;
case DOWN_TO_TOP_LEFT_TO_RIGHT:
//TODO
// for(int row = spriteLength; row>0; row--){
// for(int column = 0; column<spriteHeight; column++){
// storage[index] = generateTextures( row-1, column);
// index++;
// }
// }
break;
default:
break;
}
// vertices buffer
bb1 = ByteBuffer.allocateDirect(vertices.length * 4);
bb1.order(ByteOrder.nativeOrder());
vertexBuffer = bb1.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
for(int i=0; i<TOTAL_IMAGE_COUNT_IN_SPRITE; i++){
bb1 = ByteBuffer.allocateDirect(storage[i].length * 4);
bb1.order(ByteOrder.nativeOrder());
FloatBuffer textureBuffer = bb1.asFloatBuffer();
textureBuffer.put(storage[i]);
textureBuffer.position(0);
floatBufferArray[i] = textureBuffer;
}
}
private void generateSpriteIndexes() {
for(int indexX = 0; indexX<SPRITE_WIDTH; indexX++){
sprite_X_Indexes[indexX] = indexX+1;
}
for(int indexY = 0; indexY<SPRITE_HEIGHT; indexY++){
sprite_Y_Indexes[indexY] = indexY+1;
}
}
public void updateScreenData(float screenWidth, float screenHeight){
// takes screen Height and Width
this.mScreenWidth = (screenWidth > 0) ? screenWidth : 1f;
this.mScreenHeight = screenHeight;
wRatio = 10f/mScreenWidth;
hRatio = mScreenHeight/10f;
addExplosion(mX,mY);
}
public void addExplosion(float x, float y) {
this.x = x;
this.y = y;
this.initPos = y;
}
/**
* Generates texture by location
*
* #param texture - fill current texture
* #param placeX - image place in sprite scale X
* #param placeY - image place in sprite scale Y
* #return
*/
private float[] generateTextures(int placeX, int placeY) {
float texture[] = new float[8];
/*
V1 _____ V3
| |
| |
V2|_____|V4
*/
//StringBuffer buff = new StringBuffer();
/** V1 */
texture[0] = (placeX == 0)?0.0f : xOffset*sprite_X_Indexes[placeX-1];
texture[1] = yOffset*sprite_Y_Indexes[placeY];
/** V2 */
texture[2] = (placeX == 0)?0.0f : xOffset*sprite_X_Indexes[placeX-1];
texture[3] = (placeY == 0)?0.0f : yOffset*sprite_Y_Indexes[placeY-1];
/** V3 */
texture[4] = xOffset*sprite_X_Indexes[placeX];
texture[5] = yOffset*sprite_Y_Indexes[placeY];
/** V4 */
texture[6] = xOffset*sprite_X_Indexes[placeX];
texture[7] = (placeY == 0)?0.0f : yOffset*sprite_Y_Indexes[placeY-1];
return texture;
}
private void update() {
if(mSwitcher == 1){
mFrame = ++mFrame % TOTAL_IMAGE_COUNT_IN_SPRITE;
mSwitcher = 0;
// Log.e(LOG_TAG, "DevQuestSpriteBase :: " + mFrame);
}
else{
mSwitcher++;
}
}
public void reset(){
mFrame = 0;
}
public void loadTextures(GL10 gl, Context context) {
Log.e(LOG_TAG, "ExplosionSprite :: loadTextures");
mFrame = 0;
InputStream is;
Bitmap bitmap;
is = context.getResources().openRawResource(DRAW_SOURCE);
bitmap = BitmapFactory.decodeStream(is);
try {
is.close();
is = null;
} catch (IOException e) {
}
gl.glGenTextures(TEXTURE_COUNT, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
public void draw(GL10 gl){
// if(mFrame == TOTAL_IMAGE_COUNT_IN_SPRITE - 1){
// return;
// }
gl.glPushMatrix();
try {
float transx = + (wRatio * x);
float transy = + (mScreenHeight*wRatio) - (wRatio * y) - 1/hRatio;
// Log.e(LOG_TAG, "transx: " + transx + "; transy: " + transy + "; sprite.x: "+ sprite.x + "; sprite.y: " + sprite.y);
gl.glTranslatef(transx, transy, 0.0f);
//########### draw ##############
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, floatBufferArray[mFrame]);
update();
gl.glColor4f(1f, 1f, 1f, 0.2f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//###############################
} catch (NullPointerException e) {
}
gl.glPopMatrix();
}
}
DevQuest1Activity.java
public class DevQuest1Activity extends Activity {
private DevQuestGLSurfaceView mGLView;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mGLView = new DevQuestGLSurfaceView(this);
setContentView(mGLView);
}
#Override
protected void onPause() {
super.onPause();
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
mGLView.onResume();
}
}
DevQuestGLRenderer.java
public class DevQuestGLRenderer implements GLSurfaceView.Renderer {
private static final String LOG_TAG = "Fess";//DevQuestGLRenderer.class.getSimpleName();
private Context context;
private float ratio;
private int screenWidth, screenHeight;
public Cast_001_Sprite Cast_001_Sprite;
public DevQuestGLRenderer(Context context){
this.context = context;
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig confid) {
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
gl.glClearDepthf(1.0f);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_BLEND);
gl.glDisable(GL10.GL_DEPTH_TEST);
gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, /*GL10.GL_REPLACE*/ GL10.GL_MODULATE);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.e(LOG_TAG, "onSurfaceChanged");
// prevent 0 divise
if(height == 0) { height=1;}
screenWidth = width; screenHeight = height;
ratio = (float) width/height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrthof(0, width, 0, height, -10f, 10f);
gl.glViewport(0, 0, screenWidth, screenHeight);
Cast_001_Sprite = new Cast_001_Sprite(width, height);
Cast_001_Sprite.loadTextures(gl, context);
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glPushMatrix();
gl.glScalef((screenWidth)/10, (screenHeight*ratio)/10, 1.0f);
Cast_001_Sprite.draw(gl);
gl.glPopMatrix();
}
}
DevQuestGLSurfaceView.java
public class DevQuestGLSurfaceView extends GLSurfaceView {
private DevQuestGLRenderer mRenderer;
private int count = 0;
public DevQuestGLSurfaceView(Context context) {
super(context);
mRenderer = new DevQuestGLRenderer(context);
setRenderer(mRenderer);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
return true;
}
}
ESpriteDirection.java
public enum ESpriteDirection {
TOP_TO_DOWN_LEFT_TO_RIGHT,
DOWN_TO_TOP_LEFT_TO_RIGHT,
TOP_TO_DOWN_RIGHT_TO_LEFT,
DOWN_TO_TOP_RIGHT_TO_LEFT
}
And this is an image I used:
USE THIS CODE TO LOAD YOUR IMAGE..
public static int loadTexture(Context context, int resourceId) {
final int[] textureObjectIds = new int[1];
glGenTextures(1, textureObjectIds, 0);
if (textureObjectIds[0] == 0) {
if (LoggerConfig.ON) {
Log.w(TAG, "Could not generate a new OpenGL texture object.");
}
return 0;
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(
context.getResources(), resourceId, options);
if (bitmap == null) {
if (LoggerConfig.ON) {
Log.w(TAG, "Resource ID " + resourceId + " could not be decoded.");
}
glDeleteTextures(1, textureObjectIds, 0);
return 0;
}
// Bind to the texture in OpenGL
glBindTexture(GL_TEXTURE_2D, textureObjectIds[0]);
// Set filtering: a default must be set, or the texture will be
// black.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Load the bitmap into the bound texture.
texImage2D(GL_TEXTURE_2D, 0, bitmap, 0);
// Note: Following code may cause an error to be reported in the
// ADB log as follows: E/IMGSRV(20095): :0: HardwareMipGen:
// Failed to generate texture mipmap levels (error=3)
// No OpenGL error will be encountered (glGetError() will return
// 0). If this happens, just squash the source image to be
// square. It will look the same because of texture coordinates,
// and mipmap generation will work.
glGenerateMipmap(GL_TEXTURE_2D);
// Recycle the bitmap, since its data has been loaded into
// OpenGL.
bitmap.recycle();
// Unbind from the texture.
glBindTexture(GL_TEXTURE_2D, 0);
return textureObjectIds[0];
}
Guys i need your help again :)
MainRenderer.java:
package com.example.galaga2d;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteOrder;
import java.util.Random;
import java.util.Vector;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import android.opengl.GLUtils;
public class MainRenderer implements Renderer {
Random rand = new Random();
float chance = 0.0f;
private Context context;
public Ship playerShip = new Ship();
Vector<Asteroid> asteroidVector = new Vector<Asteroid>();
public MainRenderer(Context context) {
this.context = context;
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
//! TEXTURES
playerShip.loadGLTexture(gl, this.context);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_SRC_COLOR);
//gl.glShadeModel(GL10.GL_SMOOTH);
//! TEXTURES
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
gl.glLoadIdentity();
chance = rand.nextFloat() * (100.0f - 1.0f) + 1.0f;
if (chance <= 4.0f) {
asteroidVector.addElement(new Asteroid());
}
if (playerShip.life != 0) {
playerShip.draw(gl);
gl.glLoadIdentity();
for (int i = 0; i < asteroidVector.size(); i++) {
if(asteroidVector.elementAt(i).textured == 0) {
asteroidVector.elementAt(i).loadGLTexture(gl, this.context);
asteroidVector.elementAt(i).textured |= 1;
//gl.glLoadIdentity();
} else {
asteroidVector.elementAt(i).textured &= ~1;
}
}
for (int i = 0; i < asteroidVector.size(); i++) {
asteroidVector.elementAt(i).collisionCheck();
asteroidVector.elementAt(i).draw(gl);
if (asteroidVector.elementAt(i).Y > 480.0f) {
asteroidVector.remove(i);
}
gl.glLoadIdentity();
}
} else {
gl.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrthof(0, width, height, 0, 1, -1);
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
// --------------------------------------------------------------------------------
class Ship {
public int life = 3; // Количество жизней игрока
public FloatBuffer ShipVertexBuffer; // Vertex буффер
public FloatBuffer ShipTextureBuffer; // Texture буффер
public float X = 100.0f; // Начальные координаты игрока по X
public float Y = 300.0f; // Начальные координаты игрока по Y
//! TEXTURES
private int[] textures = new int[1];
//! TEXTURES
public float ShipVerticles[] = { // Вертикли прямоугольника - корабль
0, 0,
0, 30,
30, 0,
30, 30
};
//! TEXTURES
public float ShipTextures[] = { // Разметка наложения текстуры, соответствует
0.0f, 0.0f, // разметке вертиклей
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
//! TEXTURES
public Ship() {
//! Буффер вертексов
ByteBuffer bb = ByteBuffer.allocateDirect(36);
bb.order(ByteOrder.nativeOrder());
ShipVertexBuffer = bb.asFloatBuffer();
ShipVertexBuffer.put(ShipVerticles);
ShipVertexBuffer.position(0);
//! TEXTURES
bb = ByteBuffer.allocateDirect(ShipTextures.length * 4);
bb.order(ByteOrder.nativeOrder());
ShipTextureBuffer = bb.asFloatBuffer();
ShipTextureBuffer.put(ShipTextures);
ShipTextureBuffer.position(0);
//! TEXTURES
}
public void loadGLTexture(GL10 gl, Context context) {
// loading texture
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
R.drawable.ship);
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
public void draw(GL10 gl) {
//! TEXTURE
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
gl.glTranslatef(playerShip.X, playerShip.Y, 0.0f);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, ShipVertexBuffer);
//! TEXTURE
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, ShipTextureBuffer);
//! TEXTURE
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//! TEXTURE
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
}
}
class Asteroid {
private float colorR = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float colorG = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float colorB = rand.nextFloat()* (1.0f - 0.3f) + 0.3f;
private float X = rand.nextFloat() * (300.0f - 1.0f) + 1.0f;
private float Y = -30.0f;
private float size = rand.nextFloat() * (30.0f - 20.0f) + 20.0f;
private float speed = rand.nextFloat() * (10.0f - 1.0f) + 1.0f;
private int collision = 0;
private int textured = 0;
private FloatBuffer AsteroidVertexBuffer;
private FloatBuffer AsteroidTextureBuffer;
//! TEXTURES
private int[] textures = new int[1];
//! TEXTURES
public float AsteroidVerticles[] = {
0, 0, // лево низ
0, size, // лево вверх
size, 0, // право низ
size, size // право вверх
};
//! TEXTURES
public float AsteroidTextures[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
//! TEXTURES
public Asteroid() {
ByteBuffer bb = ByteBuffer.allocateDirect(36);
bb.order(ByteOrder.nativeOrder());
AsteroidVertexBuffer = bb.asFloatBuffer();
AsteroidVertexBuffer.put(AsteroidVerticles);
AsteroidVertexBuffer.position(0);
//! TEXTURES
bb = ByteBuffer.allocateDirect(AsteroidTextures.length * 4);
bb.order(ByteOrder.nativeOrder());
AsteroidTextureBuffer = bb.asFloatBuffer();
AsteroidTextureBuffer.put(AsteroidTextures);
AsteroidTextureBuffer.position(0);
//! TEXTURES
}
public void collisionCheck() {
float result = (float)Math.sqrt(Math.pow((playerShip.X-X), 2)+Math.pow((playerShip.Y-Y), 2));
if (result < size)
{
if(collision == 0)
{
playerShip.life = playerShip.life - 1;
collision |= 1;
}
} else {
collision &= ~1;
}
}
public void loadGLTexture(GL10 gl, Context context) {
// loading texture
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
R.drawable.asteroid);
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
public void draw(GL10 gl) {
Y += speed;
//! TEXTURE
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
gl.glColor4f(colorR, colorG, colorB, 1.0f);
gl.glTranslatef(X, Y, 0.0f);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, AsteroidVertexBuffer);
//! TEXTURE
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, AsteroidTextureBuffer);
//! TEXTURE
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
//! TEXTURE
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
//! TEXTURE
}
}
// --------------------------------------------------------------------------------
}
Every frame drawing object asteroid by chance:
chance = rand.nextFloat() * (100.0f - 1.0f) + 1.0f;
if (chance <= 4.0f) {
asteroidVector.addElement(new Asteroid());
}
Thats mean we need to load texture for all new objects we draw every second, but we dont need to load texture for one object many times, and i add flag to check is object textured or not:
for (int i = 0; i < asteroidVector.size(); i++) {
if(asteroidVector.elementAt(i).textured == 0) {
asteroidVector.elementAt(i).loadGLTexture(gl, this.context);
asteroidVector.elementAt(i).textured |= 1;
//gl.glLoadIdentity();
} else {
asteroidVector.elementAt(i).textured &= ~1;
}
}
After object created and textured, we need to delete it if he go over screen border, so i do this:
for (int i = 0; i < asteroidVector.size(); i++) {
asteroidVector.elementAt(i).collisionCheck();
asteroidVector.elementAt(i).draw(gl);
//! THIS
if (asteroidVector.elementAt(i).Y > 480.0f) {
asteroidVector.remove(i);
}
//! THIS
gl.glLoadIdentity();
}
But that not enough, because tuxture buffer dont clear, and after 10-20 seconds on application running i have see some lagging and low fps.
The question is - How i can clear texture buffer or memory? To fix lagging and low fps?
I suppose you could do that, but then again you shouldn't be getting yourself into a situation where you actually need to in the first place.
As it now stands, you're creating a new texture object (and never freeing it!) for every Asteroid that's ever constructed, which is absolutely not necessary - a great way to fix that is to use a shared texture object for all of your Asteroid instances.
In a static function (called once), simply decode the image resource to raw pixel data -> glGenTextures -> glBindTexture -> glTexParam... calls ->GLUtils.TexImage2D -> free the (client-side) decoded raw pixel data and store the texture id (given by glGenTextures) into a static variable within the Asteroid class. Having done this, you can just bind to this shared texture object at the rendering stage (ie. draw), and proceed as you normally would.
As said before, never calling glDeleteTextures is the second part of your problem, although now that you're (hopefully) using a shared texture object for your Asteroid instances, it becomes much less important.
(Furthermore, even the vertex/texcoord buffer can be shared between instances, provided they are identical :P)
I want to rotate the object z axis so I am using the below code but its not rotating at particular position its rotation just go back and appear come near. I think there is wrong with the values in GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);. Please help me to set the correct values of these So that rotation works well.
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
private class Renderer implements GLSurfaceView.Renderer {
public Renderer() {
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setZOrderOnTop(true);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClearColor(0.0f,0.0f,0.0f, 0.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
mViewWidth = (float)w;
mViewHeight = (float)h;
gl.glViewport(0,0,w,h);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 45, mViewWidth/mViewHeight, 0.1f, 100f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glPushMatrix();
gl.glDisable(GL10.GL_DITHER);
GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);
//draw_model
gl.glPushMatrix();
if(mOrigin != null && mRotate != null) {
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
}
if(mModel != null) {
mModel.draw(gl, mContext);
if(!RendererView.textureFileName.equals(""))
mModel.bindTextures(mContext, gl);
}
gl.glPopMatrix();
gl.glPopMatrix();
if(isPictureTake) {
w = getWidth();
h = getHeight();
b = new int[w*(y+h)];
bt = new int[w*h];
IntBuffer ib = IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(0, 0, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
createBitmapFromGLSurface(mContext);
isPictureTake = false;
}
}
}
ObjLoader.java
package com.amplimesh.models;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.StringTokenizer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
import com.amplimesh.renderer.RendererView;
import com.amplimesh.util.Point3;
/**
* Object Loader and draw the texture and object.
* #author Ajay
*/
public class ObjModel {
/**
* It fill the texture into the mesh
* #param context
* #param gl
*/
public void bindTextures(Context context, GL10 gl) {
Bitmap bitmap;
try {
InputStream is = context.getAssets().open("textures/"+RendererView.textureFileName);
bitmap = BitmapFactory.decodeStream(is);
if(bitmap != null) {
// generate one texture pointer
gl.glGenTextures(1, mTextures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
//gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
//gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
} catch (java.io.IOException e) {
return;
}
}
/**
* It draw the object.
* #param gl
*/
public void draw(GL10 gl, Context mContext) {
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnableClientState(GL10.GL_NORMAL_ARRAY);
for (Model model : mModels) {
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, model.v);
if (model.vt != null && mTextures != null) {
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextures[0]);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, model.vt);
}
if (model.vn != null) {
gl.glNormalPointer(GL10.GL_FLOAT, 0, model.vn);
}
gl.glDrawArrays(GL10.GL_TRIANGLES, 0, model.v_size);
}
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
/**
* It Load the object from stream.
* #param is
* #param texture_name
* #return
* #throws IOException
*/
public static ObjModel loadFromStream(InputStream is, String texture_name) throws IOException {
ObjModel obj = ObjLoader.loadFromStream(is);
return obj;
}
private Model mModels[];
private int mTextures[] = new int[1];;
/**
* It read the the obj file.
* #author Ajay
*/
private static class ObjLoader {
public static ObjModel loadFromStream(InputStream is) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
ObjModel obj = new ObjModel();
ArrayList<Point3> v = new ArrayList<Point3>();
ArrayList<Point3> vt = new ArrayList<Point3>();
ArrayList<Point3> vn = new ArrayList<Point3>();
ArrayList<Face> f = new ArrayList<Face>();
ArrayList<Model> o = new ArrayList<Model>();
boolean o_pending=false;
while(reader.ready()) {
String line = reader.readLine();
if (line == null)
break;
StringTokenizer tok = new StringTokenizer(line);
String cmd = tok.nextToken();
if (cmd.equals("o")) {
if (o_pending) {
Model model = new Model();
model.fill(f, vt.size() > 0, vn.size() > 0);
o.add(model);
}
else {
o_pending=true;
}
}
else
if (cmd.equals("v")) {
v.add(read_point(tok));
}
else
if (cmd.equals("vn")) {
vn.add(read_point(tok));
}
else
if (cmd.equals("vt")) {
vt.add(read_point(tok));
}
else
if (cmd.equals("f")) {
if (tok.countTokens() != 3)
continue;
Face face = new Face(3);
while (tok.hasMoreTokens()) {
StringTokenizer face_tok = new StringTokenizer(tok.nextToken(), "/");
int v_idx = -1;
int vt_idx = -1;
int vn_idx = -1;
v_idx = Integer.parseInt(face_tok.nextToken());
if (face_tok.hasMoreTokens()) vt_idx = Integer.parseInt(face_tok.nextToken());
if (face_tok.hasMoreTokens()) vn_idx = Integer.parseInt(face_tok.nextToken());
//Log.v("objmodel", "face: "+v_idx+"/"+vt_idx+"/"+vn_idx);
face.addVertex(
v.get(v_idx-1),
vt_idx == -1 ? null : vt.get(vt_idx-1),
vn_idx == -1 ? null : vn.get(vn_idx-1)
);
}
f.add(face);
}
}
if (o_pending) {
Model model = new Model();
model.fill(f, vt.size() > 0, vn.size() > 0);
o.add(model);
}
obj.mModels = new Model[o.size()];
o.toArray(obj.mModels);
return obj;
}
private static Point3 read_point(StringTokenizer tok) {
Point3 ret = new Point3();
if (tok.hasMoreTokens()) {
ret.x = Float.parseFloat(tok.nextToken());
if (tok.hasMoreTokens()) {
ret.y = Float.parseFloat(tok.nextToken());
if (tok.hasMoreTokens()) {
ret.z = Float.parseFloat(tok.nextToken());
}
}
}
return ret;
}
}
private static class Face {
Point3 v[];
Point3 vt[];
Point3 vn[];
int size;
int count;
public Face(int size) {
this.size = size;
this.count = 0;
this.v = new Point3[size];
this.vt = new Point3[size];
this.vn = new Point3[size];
}
public boolean addVertex(Point3 v, Point3 vt, Point3 vn) {
if (count >= size)
return false;
this.v[count] = v;
this.vt[count] = vt;
this.vn[count] = vn;
count++;
return true;
}
public void pushOnto(FloatBuffer v_buffer, FloatBuffer vt_buffer, FloatBuffer vn_buffer) {
int i;
for (i=0; i<size; i++) {
v_buffer.put(v[i].x); v_buffer.put(v[i].y); v_buffer.put(v[i].z);
if (vt_buffer != null && vt[i] != null) {
vt_buffer.put(vt[i].x); vt_buffer.put(vt[i].y);
}
if (vn_buffer != null && vn[i] != null) {
vn_buffer.put(vn[i].x); vn_buffer.put(vn[i].y); vn_buffer.put(vn[i].z);
}
}
}
}
/**
* It hold the vertex buffer, vertex normal and texture.
* #author Ajay
*/
private static class Model {
public FloatBuffer v;
public FloatBuffer vt;
public FloatBuffer vn;
public int v_size;
public void fill(ArrayList<Face> faces, boolean has_tex, boolean has_normals) {
int f_len = faces.size();
this.v_size = f_len * 3;
ByteBuffer tBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
tBuf.order(ByteOrder.nativeOrder());
this.v = tBuf.asFloatBuffer();
if (has_tex) {
ByteBuffer vtBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
vtBuf.order(ByteOrder.nativeOrder());
this.vt = vtBuf.asFloatBuffer();
}
if (has_normals) {
ByteBuffer vnBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
vnBuf.order(ByteOrder.nativeOrder());
this.vn = vnBuf.asFloatBuffer();
}
int i;
for (i=0; i < f_len; i++) {
Face face = faces.get(i);
face.pushOnto(this.v, this.vt, this.vn);
}
this.v.rewind();
if (this.vt != null)
this.vt.rewind();
if (this.vn != null)
this.vn.rewind();
}
}
}
To rotate an object at a specific position around an axis, you first need to translate the object's center to the origin. In your example, you have an object at (mOrigin) which may be relative to another translation (your camera's position).
Instead of translating your object to its final position and then rotating, you need to translate it to (0,0,0), rotate and then translate to the final position.
In the simplest case this would be:
gl.glRotatef (mRotate.x, 1f, 0f, 0f);
gl.glRotatef (mRotate.y, 0f, 1f, 0f);
gl.glRotatef (mRotate.z, 0f, 0f, 1f);
gl.glTranslatef (mOrigin.x, mOrigin.y, mOrigin.z);
In the more complicated case, where your object is relative to the camera, you would have to do something like this:
gl.glTranslatef (-Camera.x, -Camera.y, -Camera.z);
gl.glRotatef (mRotate.x, 1f, 0f, 0f);
gl.glRotatef (mRotate.y, 0f, 1f, 0f);
gl.glRotatef (mRotate.z, 0f, 0f, 1f);
gl.glTranslatef (Camera.x + mOrigin.x, Camera.y + mOrigin.y, Camera.z + mOrigin.z);
The rotation component of the Glulookat seems fine so I dont see any problem there that may cause issues.
First, from the link you provided, it seems to me that you want to rotate about the Y axis although you mentioned the rotation about the Z axis this can give different results. https://www.dropbox.com/s/ozt7beo4gz5q293/demo2__A.avi
Second, what are the values you are using within the m_Rotate vector? are they in degrees or radians?
third, are you pushing or popping the matrix stack? if so how? or are you feeding your own matrix data into the OpenGL?
also have you made sure to call glLoadIdentity() to set the view matrix back to identity matrix during your draw calls?
TBH there can be many areas that may cause issue. To narrow down the possible problem area, do the followings in your draw function;
stop using GluLookAt.
remove all push and pop calls,
firstly make sure to call
glLoadIdentity();
translate your camera behind the object (in your GLuLookAt this is set to 0,0,10).
glTranslatef(0.0f,0.0f,-10.0f);
then do simple rotation around the Z axis by;
glRotatef(mRotate.z, 0.0f, 0.0f, 1.0f);
EDIT: you call onDrawFrame to draw things on the scene as far as I can tell. . and in this call where you need to do these changes to figure out where your problem is.
instead of this;
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glPushMatrix();
gl.glDisable(GL10.GL_DITHER);
GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);
//draw_model
gl.glPushMatrix();
if(mOrigin != null && mRotate != null) {
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
}
...
gl.glPopMatrix();
gl.glPopMatrix();
}
try something simpler like this.
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glDisable(GL10.GL_DITHER);
gl.glMatrixMode(GL10.GL_MODELVIEW); //making sure OpenGL currently in model view
gl.glLoadIdentity(); //clear the model view matrix to identity matrix
gl.glTranslatef(0.0f, 0.0f, -10.0f); //move 10 unit backwards so as if camera moves backwards.
gl.glRotatef(90.0f,0.0f,0.0f,1.0f) //rotate 90 degree around the z axis.
//draw_model
...
//gl.glPopMatrix();
//gl.glPopMatrix();
}
by doing so your possible problematic area reduced alot. and you can start figuring out the problem. if the above code works, then possible errors reduced alot.
it could be that;
you setting the matrix mode other then model view somewhere else and not setting it back in your draw call,
you are not clearing the identity matrix in your draw call.
your origin and rotation values are not correctly done.
BTW: Im not an expert in java but in c++ and objective c, we can not do 1f to set a value to float. we must use 1.0f otherwise compiler will complain.
In my app I'm getting data from the camera , creating bitmap object and I must show it using openGL , so for that I have Square class and GlRenderer class. Problem is that it works very slowly , and every time it shows some image , then black screen , and another image and so on. Below is my classes
Square class
package com.example.videochat;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
public class Square {
private FloatBuffer vertexBuffer; // buffer holding the vertices
private float vertices[] = {
-1.0f, -1.0f, 0.0f, // V1 - bottom left
-1.0f, 1.0f, 0.0f, // V2 - top left
1.0f, -1.0f, 0.0f, // V3 - bottom right
1.0f, 1.0f, 0.0f // V4 - top right
};
private Bitmap m_bitmap;
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
/** The texture pointer */
private int[] textures = new int[1];
public Square(Bitmap bitmap) {
// a float has 4 bytes so we allocate for each coordinate 4 bytes
m_bitmap = bitmap;
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
// allocates the memory from the byte buffer
vertexBuffer = byteBuffer.asFloatBuffer();
// fill the vertexBuffer with the vertices
vertexBuffer.put(vertices);
// set the cursor position to the beginning of the buffer
vertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
textureBuffer = byteBuffer.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
}
public void loadGLTexture(GL10 gl, Context context) {
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, m_bitmap, 0);
// Clean up
m_bitmap.recycle();
}
/** The draw method for the square with the GL context */
public void draw(GL10 gl) {
// bind the previously generated texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// Point to our buffers
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation
gl.glFrontFace(GL10.GL_CW);
// Point to our vertex buffer
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
}
GlRenderer class
package com.example.videochat;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLU;
import android.opengl.GLSurfaceView.Renderer;
public class GlRenderer implements Renderer {
private Square square; // the square
private Context context;
/** Constructor to set the handed over context */
public GlRenderer(Context context, Bitmap bitmap) {
this.context = context;
// initialise the square
this.square = new Square(bitmap);
}
#Override
public void onDrawFrame(GL10 gl) {
// clear Screen and Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// Reset the Modelview Matrix
gl.glLoadIdentity();
// Drawing
gl.glTranslatef(0.0f, 0.0f, -5.0f); // move 5 units INTO the screen
// is the same as moving the camera 5 units away
// gl.glScalef(0.5f, 0.5f, 0.5f); // scale the square to 50%
// otherwise it will be too large
square.draw(gl); // Draw the triangle
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height == 0) { //Prevent A Divide By Zero By
height = 1; //Making Height Equal One
}
gl.glViewport(0, 0, width, height); //Reset The Current Viewport
gl.glMatrixMode(GL10.GL_PROJECTION); //Select The Projection Matrix
gl.glLoadIdentity(); //Reset The Projection Matrix
//Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(gl, 45.0f, (float)width / (float)height, 0.1f, 100.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW); //Select The Modelview Matrix
gl.glLoadIdentity(); //Reset The Modelview Matrix
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Load the texture for the square
square.loadGLTexture(gl, this.context);
gl.glEnable(GL10.GL_TEXTURE_2D); //Enable Texture Mapping ( NEW )
gl.glShadeModel(GL10.GL_SMOOTH); //Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); //Black Background
gl.glClearDepthf(1.0f); //Depth Buffer Setup
gl.glEnable(GL10.GL_DEPTH_TEST); //Enables Depth Testing
gl.glDepthFunc(GL10.GL_LEQUAL); //The Type Of Depth Testing To Do
//Really Nice Perspective Calculations
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
}
}
and the part of tha main class
public void initGlSurfaceView (Bitmap bmp, SurfaceView VideoCaptureBitmap) {
glSurfaceView = new GLSurfaceView(this);
glSurfaceView.setRenderer(new GlRenderer(this,bmp));
setContentView(glSurfaceView);
}
private void startVideo() {
if(m_Camera != null) return;
SurfaceHolder videoCaptureViewHolder = null;
try {
m_Camera = Camera.open();
} catch (RuntimeException e) {
Log.e("CameraTest", "Camera Open filed");
return;
}
m_Camera.setErrorCallback(new ErrorCallback() {
public void onError(int error, Camera camera) {
}
});
Camera.Parameters parameters = m_Camera.getParameters();
//parameters.setPreviewFrameRate(30);
List<Size> supportedPreviewSizes=parameters.getSupportedPreviewSizes();
Iterator<Size> supportedPreviewSizesIterator=supportedPreviewSizes.iterator();
while(supportedPreviewSizesIterator.hasNext()){
Size tmpSize=supportedPreviewSizesIterator.next();
Log.v("CameraTest","supportedPreviewSize.width = "+tmpSize.width+"supportedPreviewSize.height = "+tmpSize.height);
}
m_Camera.setParameters(parameters);
if (null != m_VideoCaptureView)
videoCaptureViewHolder = m_VideoCaptureView.getHolder();
try {
m_Camera.setPreviewDisplay(videoCaptureViewHolder);
} catch (Throwable t) {
}
Log.v("CameraTest","Camera PreviewFrameRate = "+m_Camera.getParameters().getPreviewFrameRate());
Size previewSize=m_Camera.getParameters().getPreviewSize();
int dataBufferSize=(int)(previewSize.height*previewSize.width*
(ImageFormat.getBitsPerPixel(m_Camera.getParameters().getPreviewFormat())/8.0));
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
private long timestamp=0;
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
//Log.v("CameraTest","Time Gap = "+(System.currentTimeMillis()-timestamp));
//int size = data.length;
//Bitmap btmp = BitmapFactory.decodeByteArray(data, 0, size);
//m_VideCaptureBitmap.setImageBitmap(btmp);
Size previewSize = camera.getParameters().getPreviewSize();
YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 80, baos);
byte[] jdata = baos.toByteArray();
// Convert to Bitmap
Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
//m_VideCaptureBitmap.setImageBitmap(bmp);
initGlSurfaceView(bmp, m_VideCaptureBitmap);
// set our renderer to be the main renderer with
// the current activity context
//setContentView(glSurfaceView);
Log.v("CameraTest","Frame size = "+ data.length);
timestamp=System.currentTimeMillis();
try{
camera.addCallbackBuffer(data);
}catch (Exception e) {
Log.e("CameraTest", "addCallbackBuffer error");
return;
}
return;
}
});
try {
m_Camera.startPreview();
} catch (Throwable e) {
m_Camera.release();
m_Camera = null;
return;
}
}
Can anybody helps me? Thanks
Regards