I implemented FBO on my OpenGL Game. and Im rendering what is rendered to screen to a texture, the problem is that rendering to texture starts from lower left corner. look:
what is rendered to Default Frame Buffer:
what is rendered to texture attached to FBO:
But Where i want to be Rendered to Texture is:
how can i do this? here is the renderer Calass (the FBO operation is done in onDrawFrame function):
public class CurlRenderer implements GLSurfaceView.Renderer {
// Constant for requesting right page rect.
public static final int PAGE = 1;
// Set to true for checking quickly how perspective projection looks.
private static final boolean USE_PERSPECTIVE_PROJECTION = false;
// Background fill color.
private int mBackgroundColor;
// Curl meshes used for static and dynamic rendering.
private CurlMesh mCurlMesh;
private RectF mMargins = new RectF();
private CurlRenderer.Observer mObserver;
// Page rectangles.
private RectF mPageRect;
// View mode.
// Screen size.
private int mViewportWidth, mViewportHeight;
// Rect for render area.
private RectF mViewRect = new RectF();
private boolean first = true;
int[] fb, renderTex;
int texW = 300;
int texH = 256;
IntBuffer texBuffer;
int[] buf = new int[texW * texH];
GL11ExtensionPack gl11ep ;
/**
* Basic constructor.
*/
public CurlRenderer(CurlRenderer.Observer observer) {
mObserver = observer;
mCurlMesh = new CurlMesh(0);
mPageRect = new RectF();
}
/**
* Adds CurlMesh to this renderer.
*/
public synchronized void addCurlMesh(CurlMesh mesh) {
mCurlMesh = mesh;
}
/**
* Returns rect reserved for left or right page. Value page should be
* PAGE_LEFT or PAGE_RIGHT.
*/
public RectF getPageRect(int page) {
if (page == PAGE) {
return mPageRect;
}
return null;
}
public void setup(GL10 gl){
fb = new int[1];
renderTex = new int[1];
// generate
((GL11ExtensionPack)gl).glGenFramebuffersOES(1, fb, 0);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glGenTextures(1, renderTex, 0);// generate texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, renderTex[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
// texBuffer = ByteBuffer.allocateDirect(buf.length*4).order(ByteOrder.nativeOrder()).asIntBuffer();
// gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,GL10.GL_MODULATE);
gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGBA, texW, texH, 0, GL10.GL_RGBA, GL10.GL_UNSIGNED_SHORT_4_4_4_4, null);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
boolean RenderStart(GL10 gl){
// Bind the framebuffer
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, fb[0]);
// specify texture as color attachment
((GL11ExtensionPack)gl).glFramebufferTexture2DOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, GL11ExtensionPack.GL_COLOR_ATTACHMENT0_OES, GL10.GL_TEXTURE_2D, renderTex[0], 0);
int error = gl.glGetError();
if (error != GL10.GL_NO_ERROR) {
Log.d("err", "Background Load GLError: " + error+" ");
}
int status = ((GL11ExtensionPack)gl).glCheckFramebufferStatusOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES);
if (status != GL11ExtensionPack.GL_FRAMEBUFFER_COMPLETE_OES)
{
Log.d("err", "Background Load GLError: " + status+" ");;
return true;
}
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
return true;
}
void RenderEnd(GL10 gl){
((GL11ExtensionPack)gl).glBindFramebufferOES(GL11ExtensionPack.GL_FRAMEBUFFER_OES, 0);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glBindTexture(GL10.GL_TEXTURE_2D, renderTex[0]);
gl.glColor4f(1,1,1,1);
gl.glDisable(GL10.GL_TEXTURE_2D);
}
#Override
public synchronized void onDrawFrame(GL10 gl) {
if(first){
int h = GLES20.glGetError();
this.setup(gl);
if(h!=0){
Log.d("ERROR", "ERROR Happend"+h+"");
}
first = false;
}
mObserver.onDrawFrame();
//glClearColor miad rangi ke maa entekhaab kardim ro tooye carde Graphic register mikone
gl.glClearColor(Color.red(mBackgroundColor) / 255f,
Color.green(mBackgroundColor) / 255f,
Color.blue(mBackgroundColor) / 255f,
Color.alpha(mBackgroundColor) / 255f);
//glClear miad oon rangi ke bala register karde boodim ro dige az buffer paak mikone
gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
//miad matris ro be MabdaEsh barmigardoone, ke bAd baraye glRotate va glTranslate moshkeli ijaad nashe
//chon maa asle jaabejaa kardan hamoon baraye safhe, baste be makaane avalieye
// kaaghazemoon hast, na oon makani ke dar haale hazer gharaar dare
gl.glLoadIdentity();
if (USE_PERSPECTIVE_PROJECTION) {
gl.glTranslatef(0, 0, -6f);
}
RenderStart(gl);
mCurlMesh.onDrawFrame(gl);
RenderEnd(gl);
mCurlMesh.onDrawFrame(gl);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
mViewportWidth = width;
mViewportHeight = height;
float ratio = (float) width / height;
mViewRect.top = 1.0f;
mViewRect.bottom = -1.0f;
mViewRect.left = -ratio;
mViewRect.right = ratio;
updatePageRects();
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
if (USE_PERSPECTIVE_PROJECTION) {
GLU.gluPerspective(gl, 20f, (float) width / height, .1f, 100f);
} else {
GLU.gluOrtho2D(gl, mViewRect.left, mViewRect.right,
mViewRect.bottom, mViewRect.top);
}
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// mCurlMesh.setup(gl);
gl.glClearColor(0f, 0f, 0f, 1f);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glHint(GL10.GL_LINE_SMOOTH_HINT, GL10.GL_NICEST);
//gl.glHint(GL10.GL_POLYGON_SMOOTH_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_LINE_SMOOTH);
gl.glDisable(GL10.GL_DEPTH_TEST);
gl.glDisable(GL10.GL_CULL_FACE);
}
/**
* Change background/clear color.
*/
public void setBackgroundColor(int color) {
mBackgroundColor = color;
}
/**
* Set margins or padding. Note: margins are proportional. Meaning a value
* of .1f will produce a 10% margin.
*/
public synchronized void setMargins(float left, float top, float right,
float bottom) {
mMargins.left = left;
mMargins.top = top;
mMargins.right = right;
mMargins.bottom = bottom;
updatePageRects();
}
/**
* Translates screen coordinates into view coordinates.
* mokhtassate ye noghte (masalan pointer Position) roye safhe ro, be moAdele mokhtasaatesh
* rooye CurlView Tabdil mikene
*/
public void translate(PointF pt) {
pt.x = mViewRect.left + (mViewRect.width() * pt.x / mViewportWidth);
pt.y = mViewRect.top - (-mViewRect.height() * pt.y / mViewportHeight);
}
/**
* Recalculates page rectangles.
*/
private void updatePageRects() {
if (mViewRect.width() == 0 || mViewRect.height() == 0) {
return;
}
/**
* # TODO inja daghighan hamnoon kaari ke mikham, yAni size dadan be Page ro anjaam mide
* mpageRect... khode meshe va mViewRect view E layout
*/
mPageRect.set(mViewRect);
mPageRect.left += mViewRect.width() * mMargins.left;
mPageRect.right -= mViewRect.width() * mMargins.right;
mPageRect.top += mViewRect.height() * mMargins.top;
mPageRect.bottom -= mViewRect.height() * mMargins.bottom;
int bitmapW = (int) ((mPageRect.width() * mViewportWidth) / mViewRect.width());
int bitmapH = (int) ((mPageRect.height() * mViewportHeight) / mViewRect.height());
mObserver.onPageSizeChanged(bitmapW, bitmapH);
}
/**
* Observer for waiting render engine/state updates.
*/
public interface Observer {
/**
* Called from onDrawFrame called before rendering is started. This is
* intended to be used for animation purposes.
*/
public void onDrawFrame();
/**
* Called once page size is changed. Width and height tell the page size
* in pixels making it possible to update textures accordingly.
*/
public void onPageSizeChanged(int width, int height);
}
}
You're missing to set the viewport for the FBO rendering. If you just wanted to draw the same part of the geometry as you draw to the default framebuffer, you would use the texture size for the viewport dimensions:
glViewport(0, 0, texW, texH);
Don't forget to set the viewport back to the appropriate size of the view/surface when you're done with FBO rendering, and start rendering to the default framebuffer again.
To draw a different (sub-)section of the geometry, as indicated in your sketch, you have a few options:
Use a modelview transformation to translate/scale the geometry.
Adjust the projection transformation.
Adjust the viewport.
The results from using any of these may be slightly different, depending on what and how you render. Particularly if lighting is involved, or a perspective projection, not all options will give exactly the same result. In that case, you'll have to decide which behavior you want.
Changing one of the transformations is probably the most standard approach. But adjusting the viewport can be an elegant alternative, depending on what exactly you're trying to achieve.
For example, just roughly guessing the values based on your sketch, you could use:
glViewport(texW / 4, -texH / 4, texW / 2, texH);
This defines the viewport rectangle to approximately match the dashed orange rectangle in your sketch. You may need some more math for the values to maintain the aspect ratio, but this shows the fundamental idea.
Related
I have an OpenGL scene with a sphere having a radius of 1, and the camera being at the center of the sphere (it's a 360° picture viewer). The user can rotate the sphere by panning.
Now I need to display 2D pins "attached" to some parts of the picture. To do so, I want to convert the 3D coordinates of my pins into 2D screen coordinates, to add the pin image at that screen coordinates.
I'm using GLU.glProject and the following classes from android-apidemo:
MatrixGrabber
MatrixStack
MatrixTrackingGL
I save the projection matrix in the onSurfaceChanged method and the model-view matrix in the onDraw method (after having drawn my sphere). Then I feed GLU.glProject with them when the user rotates the sphere to update the pins position.
When I pan horizontally, the pins pan correctly, but when I pan vertically, the texture pans "faster" than the pin image (like if the pin was closer to the camera than the sphere).
Here are some relevant parts of my code:
public class CustomRenderer implements GLSurfaceView.Renderer {
MatrixGrabber mMatrixGrabber = new MatrixGrabber();
private float[] mModelView = null;
private float[] mProjection = null;
[...]
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// Get the sizes:
float side = Math.max(width, height);
int x = (int) (width - side) / 2;
int y = (int) (height - side) / 2;
// Set the viewport:
gl.glViewport(x, y, (int) side, (int) side);
// Set the perspective:
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, FIELD_OF_VIEW_Y, 1, Z_NEAR, Z_FAR);
// Grab the projection matrix:
mMatrixGrabber.getCurrentProjection(gl);
mProjection = mMatrixGrabber.mProjection;
// Set to MODELVIEW mode:
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
#Override
public void onDrawFrame(GL10 gl) {
// Load the texture if needed:
if(mTextureToLoad != null) {
mSphere.loadGLTexture(gl, mTextureToLoad);
mTextureToLoad = null;
}
// Clear:
gl.glClearColor(0.5f, 0.5f, 0.5f, 0.0f);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
// Rotate the scene:
gl.glRotatef( (1 - mRotationY + 0.25f) * 360, 1, 0, 0); // 0.25 is used to adjust the texture position
gl.glRotatef( (1 - mRotationX + 0.25f) * 360, 0, 1, 0); // 0.25 is used to adjust the texture position
// Draw the sphere:
mSphere.draw(gl);
// Grab the model-view matrix:
mMatrixGrabber.getCurrentModelView(gl);
mModelView = mMatrixGrabber.mModelView;
}
public float[] getScreenCoords(float x, float y, float z) {
if(mModelView == null || mProjection == null) return null;
float[] result = new float[3];
int[] view = new int[] {0, 0, (int) mSurfaceViewSize.getWidth(), (int) mSurfaceViewSize.getHeight()};
GLU.gluProject(x, y, z,
mModelView, 0,
mProjection, 0,
view, 0,
result, 0);
result[1] = mSurfaceViewSize.getHeight() - result[1];
return result;
}
}
I use the result of the getScreenCoords method to display my pins. The y value is wrong.
What am I doing wrong?
I'm trying to draw a simple line drawing connecting several vertices in OpenGL ES. However, the line is drawn inverted or in a different position from where it should be drawn. I've attached the class for the line drawing below
ConnectingPath.java
--------------------
public class ConnectingPath {
int positionBufferId;
PointF[] verticesList;
public float vertices[];
public FloatBuffer vertexBuffer;
public ConnectingPath(LinkedList<PointF> verticesList, float[] colors)
{
List<PointF> tempCorners = verticesList;
int i = 0;
this.verticesList = new PointF[tempCorners.size()];
for (PointF corner : tempCorners) {
this.verticesList[i++] = corner;
}
}
public float[] getTransformedVertices()
{
float z;
List<Float> finalVertices = new ArrayList<Float>();
finalVertices.clear();
for(PointF point : verticesList){
finalVertices.add(point.x);
finalVertices.add(point.y);
finalVertices.add(0.0f);
}
int i = 0;
float[] verticesArray = new float[finalVertices.size()];
for (Float f : finalVertices) {
verticesArray[i++] = (f != null ? f : Float.NaN);
}
return verticesArray;
}
public void initBooth(){
vertices = this.getTransformedVertices();
for(Float f : vertices){
Log.d("Mapsv3--", f + "");
}
ByteBuffer bb = ByteBuffer.allocateDirect(vertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
int[] buffers = new int[1];
GLES11.glGenBuffers(1, buffers, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, buffers[0]);
GLES11.glBufferData(GLES11.GL_ARRAY_BUFFER, 4 * vertices.length, vertexBuffer, GLES11.GL_STATIC_DRAW);
positionBufferId = buffers[0];
}
public void Render(GL10 gl){
GLES11.glPushMatrix();
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, positionBufferId);
GLES11.glEnableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glVertexPointer(3, GL10.GL_FLOAT, 0, 0);
GLES11.glBindBuffer(GLES11.GL_ARRAY_BUFFER, 0);
GLES11.glFrontFace(GL10.GL_CW);
GLES11.glLineWidth(10.0f);
GLES11.glColor4f(0.0f,0.0f,0.0f,1.0f);
GLES11.glDrawArrays(GL10.GL_LINE_STRIP, 0, verticesList.length);
GLES11.glDisableClientState(GL10.GL_VERTEX_ARRAY);
GLES11.glPopMatrix();
}
}
Drawing code :
Renderer.java
--------------
// Variables here
public void onSurfaceChanged(GL10 gl, int width, int height) {
viewWidth = width;
viewHeight = height;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_TEXTURE_2D); //Enable Texture Mapping
gl.glShadeModel(GL10.GL_SMOOTH); //Enable Smooth Shading
gl.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); //Grey Background
gl.glClearDepthf(1.0f); //Depth Buffer Setup
gl.glEnable(GL10.GL_DEPTH_TEST); //Enables Depth Testing
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
}
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluOrtho2D(gl, -viewWidth/2, viewWidth/2, -viewHeight/2,viewHeight/2);
gl.glTranslatef(center.x,center.y,0);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0,0, 0);
gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glCullFace(GL10.GL_FRONT);
if(connectingPath!=null){
connectingPath.Render(gl);
}
gl.glDisable(GL10.GL_CULL_FACE);
gl.glLoadIdentity();
}
Screenshot :
The drawing in OpenGL seems to be inverted for you due to the way OpenGL defines it's screen coordinates. In contrast to most 2D drawing API's, the origin is located in the bottom left corner, which means that the y axis values increase when moving upwards. A very nice explanation is available in the OpenGL common pitfalls (Number 12):
Given a sheet of paper, people write from the top of the page to the bottom. The origin for writing text is at the upper left-hand margin of the page (at least in European languages). However, if you were to ask any decent math student to plot a few points on an X-Y graph, the origin would certainly be at the lower left-hand corner of the graph. Most 2D rendering APIs mimic writers and use a 2D coordinate system where the origin is in the upper left-hand corner of the screen or window (at least by default). On the other hand, 3D rendering APIs adopt the mathematically minded convention and assume a lower left-hand origin for their 3D coordinate systems.
I have cube that rotates around the center of the coordinates system. But the problem is it rotates very slowly. So in my case how to set the rotation speed?
The following three methods update the mCurrentModelMatrix with the given model transformation. These are stateful accumulative methods.
public void trnslate(float x, float y, float z)
{
float[] tempModelMatrix = new float[16];
Matrix.setIdentityM(tempModelMatrix, 0);
Matrix.translateM(tempModelMatrix,0,x,y,z);
Matrix.multiplyMM(this.mCurrentModelMatrix, 0,
tempModelMatrix, 0, this.mCurrentModelMatrix, 0);
}
public void rotate(float angle, float x, float y, float z)
{
float[] tempModelMatrix = new float[16];
Matrix.setIdentityM(tempModelMatrix, 0);
Matrix.rotateM(tempModelMatrix,0,angle,x,y,z);
Matrix.multiplyMM(this.mCurrentModelMatrix, 0,
tempModelMatrix, 0, this.mCurrentModelMatrix, 0);
}
public void scale(float xFactor, float yFactor, float zFactor)
{
float[] tempModelMatrix = new float[16];
Matrix.setIdentityM(tempModelMatrix, 0);
Matrix.scaleM(tempModelMatrix,0,xFactor,yFactor,zFactor);
Matrix.multiplyMM(this.mCurrentModelMatrix, 0,
tempModelMatrix, 0, this.mCurrentModelMatrix, 0);
}
/*
* Calculaute the final model view matrix
* 1. Order of matrix multiplication is important
* 2. MVPmatrix = proj * view * model;
* 3. Setup the MVP matrix in the vertex shader memory
*/
protected void setupMatrices()
{
float[] tempModelMatrix = new float[16];
Matrix.setIdentityM(tempModelMatrix, 0);
//translate the model combo next
Matrix.multiplyMM(mMVPMatrix, 0, //matrix and offset
mCurrentModelMatrix, 0,
tempModelMatrix, 0);
//translate eye coordinates first
Matrix.multiplyMM(mMVPMatrix, 0,
this.mVMatrix, 0,
mMVPMatrix, 0);
//Project it: screen coordinates
Matrix.multiplyMM(mMVPMatrix, 0,
mProjMatrix, 0,
mMVPMatrix, 0);
//Set the vertex uniform handler representing the MVP matrix
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, //uniform handle
1, //number of uniforms. 1 if it is not an array
false, //transpose: must be false
mMVPMatrix, //client matrix memory pointer
0); //offset
}
draw method
// Drawing operation
#Override
protected void draw(GL10 gl, int positionHandle) {
// Hide the hidden surfaces using these APIs
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthFunc(GLES20.GL_LESS);
// Transfer vertices to the shader
transferVertexPoints(positionHandle);
// Transfer texture points to the shader
transferTexturePoints(getTextureHandle());
// Implement rotation from 0 to 360 degrees
// Stop when asked and restart when the stopFlag
// is set to false.
// Decide what the current angle to apply
// for rotation is.
if (stopFlag == true) {
// stop rotation
curAngle = stoppedAtAngle;
} else {
curAngle += 1.0f;
}
if (curAngle > 360) {
curAngle = 0;
}
// Tell the base class to start their
// matrices to unit matrices.
this.initializeMatrices();
// The order of these model transformations matter
// Each model transformation is specified with
// respect to the last one, and not the very first.
// Center the cube
this.trnslate(0, 0, -1);
// Rotate it around y axis
this.rotate(curAngle, 0, -1, 0);
// Decenter it to where ever you want
this.trnslate(0, -2, 2);
// Go ahead calculate the ModelViewMatrix as
// we are done with ALL of our model transformations
this.setupMatrices();
// Call glDrawArrays to use the vertices and draw
int vertexCount = mTriangleVerticesData.length / 3;
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, // what primitives to use
0, // at what point to start
vertexCount); // Starting there how many points to use
// Check if there are errors
checkGlError("glDrawArrays");
}
Thanks in advance!
You are rotating at 1 degree per frame, so it will take 360 frames to do a complete rotation.
If you want it to rotate in 2 seconds, and you were running at 30 frames per second, you would want to rotate by 6 degrees per frame, by changing this section:
if (stopFlag == true) {
// stop rotation
curAngle = stoppedAtAngle;
} else {
curAngle += 6.0f;
}
if (curAngle > 360) {
curAngle = 0;
}
Okay, let me preface this by saying that I am very new to OpenGL as it pertains to Android, and while I've been reading up on it for some time, I cannot get past this roadblock in my coding.
Currently I am trying to write a class to load textures from .png file located in my drawables folder onto a .obj model that I made in Blender. I did a UV unwrap on the Blender model, and then used the uv unwrap as a guide for the .png file.
The issue currently is that I am able to load the texture onto the model, but it is one solid color which seems to be coming from the texture file. Clearly I don't understand enough about UV texturing in Blender, but there are so many different OpenGL libraries, and so much variation from PC to Android that it's really hard to wrap my head around what works where.
I would be extremely grateful if somebody could help me with this. Here's some of the relevant code, I'll post more as necessary:
from TextureLoader:
public Texture getTexture(GL10 gl, final int ref) throws IOException {
Texture tex = (Texture) table.get(ref);
if (tex != null) {
return tex;
}
Log.i("Textures:", "Loading texture: " + ref);
tex = getTexture(gl, ref,
GL10.GL_TEXTURE_2D, // target
GL10.GL_RGBA, // dst pixel format
GL10.GL_LINEAR, // min filter (unused)
GL10.GL_NEAREST);
table.put(ref,tex);
return tex;
}
public Texture getTexture(GL10 gl, final int ref,
int target,
int dstPixelFormat,
int minFilter,
int magFilter) throws IOException {
if (!sReady) {
throw new RuntimeException("Texture Loader not prepared");
}
int srcPixelFormat = 0;
// create the texture ID for this texture
int id = createID(gl);
Texture texture = new Texture(target, id);
// bind this texture
gl.glBindTexture(target, id);
Bitmap bitmap = loadImage(ref);
texture.setWidth(bitmap.getWidth());
texture.setHeight(bitmap.getHeight());
if (bitmap.hasAlpha()) {
srcPixelFormat = GL10.GL_RGBA;
} else {
srcPixelFormat = GL10.GL_RGB;
}
// convert that image into a byte buffer of texture data
ByteBuffer textureBuffer = convertImageData(bitmap);
if (target == GL10.GL_TEXTURE_2D) {
gl.glTexParameterf(target, GL10.GL_TEXTURE_MIN_FILTER, minFilter);
gl.glTexParameterf(target, GL10.GL_TEXTURE_MAG_FILTER, magFilter);
}
GLUtils.texImage2D(target, 0, bitmap, 0);
/*gl.glTexImage2D(target,
0,
dstPixelFormat,
get2Fold(bitmap.getWidth()),
get2Fold(bitmap.getHeight()),
0,
srcPixelFormat,
GL10.GL_UNSIGNED_BYTE,
textureBuffer);*/
bitmap.recycle();
return texture;
}
/**
* Get the closest greater power of 2 to the fold number
*
* #param fold The target number
* #return The power of 2
*/
private int get2Fold(int fold) {
int ret = 2;
while (ret < fold) {
ret *= 2;
}
return ret;
}
/**
* Convert the buffered image to a texture
*
* #param bufferedImage The image to convert to a texture
* #param texture The texture to store the data into
* #return A buffer containing the data
*/
private ByteBuffer convertImageData(Bitmap bitmap) {
ByteBuffer imageBuffer = null;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] data = stream.toByteArray();
imageBuffer = ByteBuffer.allocateDirect(data.length);
imageBuffer.order(ByteOrder.nativeOrder());
imageBuffer.put(data, 0, data.length);
imageBuffer.flip();
return imageBuffer;
}
/**
* Creates an integer buffer to hold specified ints
* - strictly a utility method
*
* #param size how many int to contain
* #return created IntBuffer
*/
protected IntBuffer createIntBuffer(int size) {
ByteBuffer temp = ByteBuffer.allocateDirect(4 * size);
temp.order(ByteOrder.nativeOrder());
return temp.asIntBuffer();
}
private Bitmap loadImage(int ref) {
Bitmap bitmap = null;
Matrix flip = new Matrix();
flip.postScale(1f, -1f);
// This will tell the BitmapFactory to not scale based on the device's pixel density:
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inScaled = false;
Bitmap temp = BitmapFactory.decodeResource(sContext.getResources(), ref, opts);
bitmap = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), flip, true);
temp.recycle();
return bitmap;
}
from Texture:
public void bind(GL10 gl) {
gl.glBindTexture(target, textureID);
gl.glEnable(GL10.GL_TEXTURE_2D);
}
as it is called:
public void render() {
//Clear Screen And Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_LIGHTING);
gl.glPushMatrix();
gl.glTranslatef(0.0f, -1.2f, z); //Move down 1.2 Unit And Into The Screen 6.0
gl.glRotatef(xrot, 1.0f, 0.0f, 0.0f); //X
gl.glRotatef(yrot, 0.0f, 1.0f, 0.0f); //Y
texture.bind(gl);
model.draw(gl);
gl.glPopMatrix();
}
I recently i stumbled accross a weird problem(or at least i think so).I made in a little opengl es app for Android.The problem is that on the emulator it run's fine but on the phone it just doesn't render what the emulator showed me!
I tested the app on 2 devices:Samsung Ace and Sony Xperia x10 with the same result!
I just don't know what is the problem(i don't have too much experience with opengl on android) so please if you have some ideas... point me out
Some code:
Class that implements Renderer
public class GLOrbitor implements Renderer{
OrbitorLayer layer;
TexFont text; //this is used to render 1-9 and A-Z
TexFont text1; //this one to render a-z
int x;
int y;
private int atomNumber;
private int width;
private int height;
private Context context;
private final static float consty = 0.15f;
GLOrbitor(Context context){
setContext(context);
}
#Override
public void onDrawFrame(GL10 gl) {
// TODO Auto-generated method stub
gl.glDisable(GL10.GL_DITHER);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, 5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
gl.glPushMatrix();
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_BLEND); // We know this is a 32bit font so set blending to suit
gl.glBlendFunc(GL10.GL_SRC_ALPHA,GL10.GL_ONE_MINUS_SRC_ALPHA);
drawNumAndLetters(gl);
gl.glDisable(GL10.GL_BLEND);
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glPopMatrix();
gl.glPushMatrix();
//gl.glTranslatef(-0.8f, 0.8f, 0f);
layer.draw(gl,getAtomNumber());
gl.glPopMatrix();
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
gl.glViewport(0,0, width, height);
this.width = width;
this.height = height;
float ratio = (float)width/height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, 1, 1, 3, 7);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
layer = new OrbitorLayer();
text = new TexFont(getContext(),gl);
text1 = new TexFont(getContext(),gl);
try {
text.LoadFont("ubunturegular.bff", gl); // 0-9 and A-Z
text1.LoadFont("ubunturegular1.bff", gl); //a-z
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// TODO Auto-generated method stub
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT,GL10.GL_NICEST);
gl.glClearColor(0.0f,0.0f,0.0f, 1f);
gl.glEnable(GL10.GL_DEPTH_TEST | GL10.GL_SMOOTH);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glFrontFace(GL10.GL_CCW);
//gl.glEnable(GL10.GL_CULL_FACE);
//gl.glCullFace(GL10.GL_BACK);
}
}`
//this code draw a tiny rectangle
public GLRectangle(){
setFilled(false);
ByteBuffer vbb = ByteBuffer.allocateDirect(12 * 4);
vbb.order(ByteOrder.nativeOrder());
mFVertexBuffer = vbb.asFloatBuffer();
ByteBuffer ibb = ByteBuffer.allocateDirect(6 * 2);
ibb.order(ByteOrder.nativeOrder());
mIndexBuffer = ibb.asShortBuffer();
float[] coords = {
-poz, -poz, 0,
poz, -poz, 0,
poz, poz, 0,
-poz,poz,0
};
mFVertexBuffer.put(coords);
short[] myIndecesArray = {0,1,2,0,2,3};
mIndexBuffer.put(myIndecesArray);
mFVertexBuffer.position(0);
mIndexBuffer.position(0);
}
public void draw(GL10 gl)
{
if(isFilled())
gl.glColor4f(1f, 0f, 0f, 1f);
else gl.glColor4f(1f, 1f, 1f, 1f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mFVertexBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES, 6,GL10.GL_UNSIGNED_SHORT, mIndexBuffer);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
//and this one draw an array of rectangles
public class OrbitorLayer {
GLRectangle[] rectangle;
GLMargin margin;
private final static float consty = 0.15f;
OrbitorLayer(){
margin = new GLMargin();
rectangle = new GLRectangle[118];
}
public void draw(GL10 gl,int atomNumber){
for(int i = 0;i<118;i++){
rectangle[i] = new GLRectangle();
if((i>=0) && (i<atomNumber))
rectangle[i].setFilled(true);
}
//there's some exception for chromium with z = 24 and copper with z=29
//they have 4s1 and 3d5 also 4s1 and 3d10
if(atomNumber == 24){
//4s2 is at 19 and 24
rectangle[19].setFilled(false);
rectangle[24].setFilled(true);
}
else if(atomNumber == 29){
//3d10 is at 29
rectangle[19].setFilled(false);
rectangle[29].setFilled(true);
}
gl.glPushMatrix();
margin.draw(gl);
gl.glPopMatrix();
int index = -1;
float startx = -0.9f;
float starty = 0.8f;
//start with 1s layer
for(int i=0;i<2;i++){
index++;
//rectangle[index] = new GLRectangle();
//translate and draw
gl.glPushMatrix();
gl.glTranslatef((float) (startx + (i * 0.05)), starty, 0.0f);
rectangle[index].draw(gl);
gl.glPopMatrix();
}
//2 s layer
startx = -0.9f;
starty = (float)(0.8f - (1 * consty));
for(int i=0;i<2;i++){
index++;
//rectangle[index] = new GLRectangle();
//translate and draw
gl.glPushMatrix();
gl.glTranslatef((float) (startx + (i * 0.05)), starty, 0.0f);
rectangle[index].draw(gl);
gl.glPopMatrix();
}
//2 p layer
startx = 0.7f;
starty = (float)(0.8f - (1 * consty));
for(int i=0;i<6;i++){
index++;
//rectangle[index] = new GLRectangle();
//translate and draw
gl.glPushMatrix();
gl.glTranslatef((float) (startx + (i * 0.05)), starty, 0.0f);
rectangle[index].draw(gl);
gl.glPopMatrix();
}
}
And a screenshot on how it's rendered on emulator
On a real android smartphone those filled rectangles and blue rounded shape aren't visible only the letters and number's which are drawn with these code
http://www.codehead.co.uk/cbfg/TexFont.java
If you have some ideas please don't hesitate!
Update:
Thank you Craigy and Matthew
Craigy:On a real phone i can't see those red and white little rectangles. Everything else work as exepected.By the way :only the the zone that fill the blue rectangle is an GLSurfaceView everything else has no link with opengl!Sorry if i can't provide a screenshot from a real device.My app was tested by 2 friends on their device's.
Matthew:the only thing where i use texture is displaying:1,2,3,4,5,6,7 and s,f,d,p.And what's funny is that they are rendered well on a real device.My question is why drawing texture's affects drawing and translating those rectangles.Anyway i will play a little more with the info you suggested me and i will let you guys now!
If you have any new intel about these problem...please i beg you..it's driving me nut's just displaying a few rectangles and some text give me so much problem's!!
You aren't specifying texture coordinates.
By default, the opengl implementation in the emulator doesn't do texturing correctly. By default it uses a 'fast' mode that blits the image without regard to texture coordinates. So your images show up on the 'incorrect' emulator, but don't work on a 'correct' device.
I'd suggest forgetting about the emulator for opengl development.
A few more things to keep in mind:
The emulator will work with non power-or-two textures; the device won't.
Android will scale images to match the density of the device, potentially resulting in your power-of-two texture ending up being non-power-of-two. Be sure to set BitmapFactory.Options.inScaled to false to prevent this.