Android GL ES 2.0 Ortho Matrices - android

EDIT: Right, fixed it :D Issue was that I was trying to set the Projection matrix before calling glUseProgram()
I'm starting out with GL ES 2.0 on Android, and am trying to migrate some of my code over from 1.1 I've defined vertex and frag shaders as per the official docs, and after some googling I understand how the Model/Projection matrices work together, yet I can't seem to get anything but a blank screen.
I'm passing in a model view matrix to my vert shader, and am multiplying it with the ortho projection before multiplying the resulting mvp matrix with the vertex position. Here are my shaders to clarify:
Vertex Shader
attribute vec3 Position;
uniform mat4 Projection;
uniform mat4 ModelView;
void main() {
mat4 mvp = Projection * ModelView;
gl_Position = mvp * vec4(Position.xyz, 1);
}
Fragment Shader
precision mediump float;
uniform vec4 Color;
void main() {
gl_FragColor = Color;
}
I'm building the projection matrix in my renderer's onSurfaceChangedFunction():
int projectionHandle = GLES20.glGetUniformLocation(shaderProg, "Projection");
Matrix.orthoM(projection, 0, -width / 2, width / 2, -height / 2, height / 2, -10, 10);
GLES20.glUniformMatrix4fv(projectionHandle, 1, false, projection, 0);
Then in my onDrawFrame(), I call each actor's draw routine, which looks like
nt positionHandle = GLES20.glGetAttribLocation(Renderer.getShaderProg(), "Position");
nt colorHandle = GLES20.glGetAttribLocation(Renderer.getShaderProg(), "Color");
nt modelHandle = GLES20.glGetUniformLocation(Renderer.getShaderProg(), "ModelView");
float[] modelView = new float[16];
Matrix.setIdentityM(modelView, 0);
Matrix.rotateM(modelView, 0, rotation, 0, 0, 1.0f);
Matrix.translateM(modelView, 0, position.x, position.y, 1.0f);
GLES20.glUniformMatrix4fv(modelHandle, 1, false, modelView, 0);
GLES20.glUniform4fv(colorHandle, 1, color.toFloatArray(), 0);
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 0, vertBuffer);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
GLES20.glDisableVertexAttribArray(positionHandle);
I realize that I can optimize this a bit, but I just want to get it to work first. The vertices are in a FloatBuffer, and centered around the origin. Any thoughts on what I am doing wrong? I've been checking my code against various tutorials and SO questions/answers, and can't see what I'm doing wrong.

Right, fixed it :D Issue was that I was trying to set the Projection matrix before calling glUseProgram()

Related

FBO texture copy not working on Android - rendered texture filled with whatever is at texture coord 0, 0

The problem is that the result of the FBO copy is filled with whatever pixel is at texture coordinate 0,0 of the source texture.
If I edit the shader to render a gradient based on texture coordinate position, the fragment shader fills the whole result as if it had texture coordinate 0, 0 fed into it.
If I edit the triangle strip vertices, things behave as expected, so I think the camera and geometry is setup right. It's just that the 2-tri quad is all the same color when it should reflect either my input texture or at least my position-gradient shaders!
I've ported this code nearly line for line from a working iOS example.
This is running alongside Unity3D, so don't assume any GL settings are default, as the engine is likely fiddling with them before my code starts.
Here's the FBO copy operation
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer);
checkGlError("glBindFramebuffer");
GLES20.glViewport(0, 0, TEXTURE_WIDTH*4, TEXTURE_HEIGHT*4);
checkGlError("glViewport");
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDepthMask(false);
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
GLES20.glPolygonOffset(0.0f, 0.0f);
GLES20.glDisable(GLES20.GL_POLYGON_OFFSET_FILL);
checkGlError("fbo setup");
// Load the shaders if we have not done so
if (mProgram <= 0) {
createProgram();
Log.i(TAG, "InitializeTexture created program with ID: " + mProgram);
if (mProgram <= 0)
Log.e(TAG, "Failed to initialize shaders!");
}
// Set up the program
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glUniform1i(mUniforms[UNIFORM_TEXTURE], 0);
checkGlError("glUniform1i");
// clear the scene
GLES20.glClearColor(0.0f,0.0f, 0.1f, 1.0f);
checkGlError("glClearColor");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Bind out source texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mSourceTexture);
checkGlError("glBindTexture");
GLES20.glFrontFace( GLES20.GL_CW );
// Our object to render
ByteBuffer imageVerticesBB = ByteBuffer.allocateDirect(8 * 4);
imageVerticesBB.order(ByteOrder.nativeOrder());
FloatBuffer imageVertices = imageVerticesBB.asFloatBuffer();
imageVertices.put(new float[]{
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f}
);
imageVertices.position(0);
// The object's texture coordinates
ByteBuffer textureCoordinatesBB = ByteBuffer.allocateDirect(8 * 4);
imageVerticesBB.order(ByteOrder.nativeOrder());
FloatBuffer textureCoordinates = textureCoordinatesBB.asFloatBuffer();
textureCoordinates.put(new float[]{
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f}
);
textureCoordinates.position(0);
// Update attribute values.
GLES20.glEnableVertexAttribArray(ATTRIB_VERTEX);
GLES20.glVertexAttribPointer(ATTRIB_VERTEX, 2, GLES20.GL_FLOAT, false, 0, imageVertices);
GLES20.glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON);
GLES20.glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GLES20.GL_FLOAT, false, 0, textureCoordinates);
// Draw the quad
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
If you want to dive in, I've put up a nice gist with the update loop, setup and shaders here: https://gist.github.com/acgourley/7783624
I'm checking the result of this as an Android port to UnityFBO (MIT License) so all help is both appreciated and will be shared more broadly.
The declaration of your vertex shader output and fragment shader input do not mach for the texture coordinate varying (different precision qualifiers). Ordinarily this would not be an issue, but for reasons I will discuss below using highp in your fragment shader may come back to bite you in the butt.
Vertex shader:
attribute vec4 position;
attribute mediump vec4 textureCoordinate;
varying mediump vec2 coordinate;
void main()
{
gl_Position = position;
coordinate = textureCoordinate.xy;
}
Fragment shader:
varying highp vec2 coordinate;
uniform sampler2D texture;
void main()
{
gl_FragColor = texture2D(texture, coordinate);
}
In OpenGL ES 2.0 highp is an optional feature in fragment shaders. You should not declare anything highp in a fragment shader unless GL_FRAGMENT_PRECISION_HIGH is defined by the pre-processor.
GLSL ES 1.0 Specification - 4.5.4: Available Precision Qualifiers - pp. 36
The built-in macro GL_FRAGMENT_PRECISION_HIGH is defined to one on systems supporting highp precision in the fragment language
#define GL_FRAGMENT_PRECISION_HIGH 1
and is not defined on systems not supporting highp precision in the fragment language. When defined, this macro is available in both the vertex and fragment languages. The highp qualifier is an optional feature in the fragment language and is not enabled by #extension.
The bottom line is you need to check whether the fragment shader supports highp precision before declaring something highp or re-write your declaration in the fragment shader to use mediump. I cannot see much reason for arbitrarily increasing the precision of the vertex shader coordinates in the fragment shader, I would honestly expect to see it written as highp in both the vertex shader and fragment shader or kept mediump.

Correct vertex shader code? OpenGL ES 2.0

Edit Code added, please see below
Edit 2 - Screenshots from device included at bottom along with explanation
Edit 3 - New code added
I have 2 classes, a rendered and a custom 'quad' class.
I have these declared at class level in my renderer class:
final float[] mMVPMatrix = new float[16];
final float[] mProjMatrix = new float[16];
final float[] mVMatrix = new float[16];
And in my onSurfaceChanged method I have:
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
and....
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// TODO Auto-generated method stub
myBitmap = BitmapFactory.decodeResource(curView.getResources(), R.drawable.box);
//Create new Dot objects
dot1 = new Quad();
dot1.setTexture(curView, myBitmap);
dot1.setSize(300,187); //These numbers are the size but are redundant/not used at the moment.
myBitmap.recycle();
//Set colour to black
GLES20.glClearColor(0, 0, 0, 1);
}
And finally from this class, onDrawFrame:
#Override
public void onDrawFrame(GL10 gl) {
// TODO Auto-generated method stub
//Paint the screen the colour defined in onSurfaceCreated
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Set the camera position (View matrix) so looking from the front
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Combine
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
dot1.rotateQuad(0,0,45, mMVPMatrix); //x,y,angle and matrix passed in
}
Then, in my quad class:
This declared at class level:
private float[] mRotationMatrix = new float[16];
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mRotationHandle;
//Create our vertex shader
String strVShader =
"uniform mat4 uMVPMatrix;" +
"uniform mat4 uRotate;" +
"attribute vec4 a_position;\n"+
"attribute vec2 a_texCoords;" +
"varying vec2 v_texCoords;" +
"void main()\n" +
"{\n" +
// "gl_Position = a_position * uRotate;\n"+
// "gl_Position = uRotate * a_position;\n"+
"gl_Position = a_position * uMVPMatrix;\n"+
// "gl_Position = uMVPMatrix * a_position;\n"+
"v_texCoords = a_texCoords;" +
"}";
//Fragment shader
String strFShader =
"precision mediump float;" +
"varying vec2 v_texCoords;" +
"uniform sampler2D u_baseMap;" +
"void main()" +
"{" +
"gl_FragColor = texture2D(u_baseMap, v_texCoords);" +
"}";
Then method for setting texture (don't think this is relevant to this problem though!!)
public void setTexture(GLSurfaceView view, Bitmap imgTexture){
this.imgTexture=imgTexture;
iProgId = Utils.LoadProgram(strVShader, strFShader);
iBaseMap = GLES20.glGetUniformLocation(iProgId, "u_baseMap");
iPosition = GLES20.glGetAttribLocation(iProgId, "a_position");
iTexCoords = GLES20.glGetAttribLocation(iProgId, "a_texCoords");
texID = Utils.LoadTexture(view, imgTexture);
}
And finally, my 'rotateQuad' method (which currently is supposed to draw and rotate the quad).
public void rotateQuad(float x, float y, int angle, float[] mvpMatrix){
Matrix.setRotateM(mRotationMatrix, 0, angle, 0, 0, 0.1f);
// Matrix.translateM(mRotationMatrix, 0, 0, 0, 0); //Removed temporarily
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mvpMatrix, 0, mRotationMatrix, 0, mvpMatrix, 0);
float[] vertices = {
-.5f,.5f,0, 0,0,
.5f,.5f,0, 1,0,
-.5f,-.5f,0, 0,1,
.5f,-.5f,0, 1,1
};
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
//Bind the correct texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID);
//Use program
GLES20.glUseProgram(iProgId);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(iProgId, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// get handle to shape's rotation matrix
mRotationHandle = GLES20.glGetUniformLocation(iProgId, "uRotate");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mRotationHandle, 1, false, mRotationMatrix, 0);
//Set starting position for vertices
vertexBuf.position(0);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iPosition, 3, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for position
GLES20.glEnableVertexAttribArray(iPosition);
//Set starting position for texture
vertexBuf.position(3);
//Specify attributes for vertex
GLES20.glVertexAttribPointer(iTexCoords, 2, GLES20.GL_FLOAT, false, 5 * 4, vertexBuf);
//Enable attribute for texture
GLES20.glEnableVertexAttribArray(iTexCoords);
//Draw it
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
for Edit 2.
This is my quad as drawn in the center of the screen. No rotation.
This is the same quad rotated at +45 Degrees with the code "gl_Position = a_position * uMVPMatrix;" + in my vertex shader (it's from a different project now so the shader variable is a_position and not vPosition), it's looks correct!
However, this is the same quad rotated at +45 Degrees with the 2 shader variables switched (so they read "gl_Position = uMVPMatrix * a_position;" - as you can see, it's not quite right.
Also just a side note, you can't see it here as the quare is symetrical, but each method also rotates in the opposite direction to the other....
Any help appreciated.
It's really impossible to tell because we don't know what you are passing to these two variables.
OpenGL is column-major format, so if vPosition is in fact a vector, and uMVPMatrix is a matrix, then the first option is correct, if this is in your shader.
If this is not in your shader but in your program code, then there is not enough information.
If you are using the first option but getting unexpected results, you are likely not computing your matrix properly or not passing the correct vertices.
Normally in the vertex shader you should multiple the positions by the MVP, that is
gl_Position = uMVPMatrix *vPosition;
When you change the order this should work...
Thanks to all for the help.
I managed to track down the problem (For the most part). I will show what I did.
It was the following line:
Matrix.multiplyMM(mvpMatrix, 0, mvpMatrix, 0, mRotationMatrix, 0);
As you can see I was multiplying the matrices and storing them back into one that I was using in the multiplication.
So I created a new matrix called mvpMatrix2 and stored the results in that. Then passed that to my vertex shader.
//Multiply matrices
Matrix.multiplyMM(mvpMatrix2, 0, mvpMatrix, 0, mRotationMatrix, 0);
//get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(iProgId, "uMVPMatrix");
//Give to vertex shader variable
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix2, 0);
After applying this, there is no distortion (And also, with regards to my other question here Using Matrix. Rotate in OpenGL ES 2.0 I am able to translate the centre of the quad). I say 'for the most part' because however, when I rotate it, it rotates backwards (so if I say rotate +45 degrees, (Clockwise), it actually rotates the quad by -45 degrees (Anit-clockwise).
But hopefully, this will help anyone who has a similar problem in the future.

Using Matrix. Rotate in OpenGL ES 2.0

Edit - Added more code
Having a lot of problems attempting to correctly rotate my quad using OpenGL ES 2.0.
It always seems to rotate around the centre of the screen co-ordinates. I'm trying to get it to rotate around it's own centre (for 2d, so z axis only).
I've been experimenting with Matrix.translate as show below. However, changing the x or y pos here simply draws the quad at a different place on the screen, but when it rotates, again it rotates around the centre of the screen. Please could someone explain how to get it to spin around it's own z axis (like a wheel)?
Thanks, here are the relevant lines of code - if more is needed, please ask and I will post. (Please note, I've looked at a lot of similar questions on SO and the wider internet but I've not managed to find an answer thus far).
Thanks.
//Set rotation
Matrix.setRotateM(mRotationMatrix, 0, -angle, 0, 0, 1.0f);
//Testing translation
Matrix.translateM(mRotationMatrix, 0, -.5f, .5f, 0f);
// Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mvpMatrix, 0, mRotationMatrix, 0, mvpMatrix, 0);
My Shaders (declared at class level)
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
From onSurfaceChanged
float ratio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
In My onDrawFrame method
// Set the camera position (View matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
I´ve encountered the same problems (seen weird distortions and everything else), my solution based on the Android Training > Displaying Graphics with OpenGL ES > Adding Motion below.
(Head over to my detailed post for at if needed:
OpenGL ES Android Matrix Transformations.)
Set a mModelMatrix to identity Matrix
Matrix.setIdentityM(mModelMatrix, 0); // initialize to identity matrix
Apply translation to the mModelMatrix
Matrix.translateM(mModelMatrix, 0, -0.5f, 0, 0); // translation to the left
Apply rotation to a mRotationMatrix (angles in degrees)
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
Combine rotation and translation via Matrix.multiplyMM
mTempMatrix = mModelMatrix.clone();
Matrix.multiplyMM(mModelMatrix, 0, mTempMatrix, 0, mRotationMatrix, 0);
Combine the model matrix with the projection and camera view
mTempMatrix = mMVPMatrix.clone();
Matrix.multiplyMM(mMVPMatrix, 0, mTempMatrix, 0, mModelMatrix, 0);
Here is a walkthrough. Let's say you were to draw a teapot... the modelMatrix would be an identity to start with. The shape is centered on the origin like this:
Verify this is what you have before you continue...
Once you have you should apply the rotation to the model matrix, compile+run - you get a rotated copy...
Once you have this you can translate:
So for you, all you need to do appears to verify when rotation matrix is identity e.g.
Matrix.setIdentityM( mRotationMatrix,0);
that the shape is in the center. If it is not move it to the center.
Once it is in the center apply the rotation e.g.
Matrix.setIdentityM( mRotationMatrix,0);
<as needed movement to center>
Matrix.rotate(mRotationMatrix, 0, -angle, 0, 0, 1.0f);
<any other translation you want>
Do it in steps to make your life easy so you see what is going on.
Rotation usually occurs around the origin, so you want to rotate your quad before you translate it. If you rotate after you translate, then the quad will first be moved away from the origin, then rotated around the origin.
Without knowing how your Matrix function are implemented, we cannot advise on whether you are using them correctly. All you've show us in the functions' interface.
But in general, rotate before you translate.
Apply your operations backwards:
1st- Matrix.translateM(mRotationMatrix, 0, -.5f, .5f, 0f);
2nd- Matrix.setRotateM(mRotationMatrix, 0, -angle, 0, 0, 1.0f);
It will rotate around its own center

OpenglES 2.0 PNG alpha texture overlap

I'm trying to draw multiple hexagons on the screen that have an alpha channel. the image is this:
So, I load the texture into the program and that's ok. When it runs, the alpha channel is blended with the background color and that's ok but, when two hexagons overlap themselves, the overlapped part becomes the color of the background! Below the picture:
Of course, this is not the effect that I expected.. I want them to overlap without this background being drawn over the other texture. Here is my code for drawing:
GLES20.glUseProgram(Program);
hVertex = GLES20.glGetAttribLocation(Program,"vPosition");
hColor = GLES20.glGetUniformLocation(Program, "vColor");
uTexture = GLES20.glGetUniformLocation(Program, "u_Texture");
hTexture = GLES20.glGetAttribLocation(Program, "a_TexCoordinate");
hMatrix = GLES20.glGetUniformLocation(Program, "uMVPMatrix");
GLES20.glVertexAttribPointer(hVertex, 3, GLES20.GL_FLOAT, false, 0, bVertex);
GLES20.glEnableVertexAttribArray(hVertex);
GLES20.glUniform4fv(hColor, 1, Color, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, hTexture);
GLES20.glUniform1i(uTexture, 0);
GLES20.glVertexAttribPointer(hTexture, 2, GLES20.GL_FLOAT, false, 0, bTexture);
GLES20.glEnableVertexAttribArray(hTexture);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
x=-1;y=0;z=0;
for (int i=0;i<10;i++) {
Matrix.setIdentityM(ModelMatrix, 0);
Matrix.translateM(ModelMatrix, 0, x, y, z);
x+=0.6f;
Matrix.multiplyMM(ModelMatrix, 0, ModelMatrix, 0, ProjectionMatrix, 0);
GLES20.glUniformMatrix4fv(hMatrix, 1, false, ModelMatrix, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, DrawOrder.length, GLES20.GL_UNSIGNED_SHORT, bDrawOrder);
}
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDisableVertexAttribArray(hVertex);
}
And My fragment shader:
public final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_FragColor = vColor * texture2D(u_Texture, v_TexCoordinate);" +
"}";
and my renderer code:
super(context);
setEGLContextClientVersion(2);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setEGLConfigChooser(8, 8, 8, 8, 8, 8);
renderer = new GLRenderer(context);
setRenderer(renderer);
I already tried to use diferent functions on glBlendFunc but nothing seems to work.. Does Anyone knows what the problem is? I'm really lost.. If needs anymore code just ask!
Thank you!
My guess is that you need to disable the depth test when drawing these. Since they all appear at the same depth, when you draw your leftmost ring, it writes into the depth buffer for every pixel in the quad, even the transparent ones.
Then when you draw the next quad to the right, the pixels which overlap don't get drawn because they fail the depth test, so you just get a blank area where it intersects with the first quad.

GLSL - Wrong data in vertex attribute

I have already lost 2 days trying to figure out on this issue, but with no use.
I have written a collada animation renderer using opengles2.0 for android; using shader for skinning. The code is almost complete and it runs just fine on my HTC DesireS.
But, when I try to run the same on a tridernt SetTopBox with PowerVR chipset, my geometry is not displayed. After a day of debugging, I found out that it is happening because I am getting != -1 as bone matrix indeices in the shader.
I verified that it is == -1 in my phone; but is != -1 in the SetTopBox.
What could possibly be wrong?
Please save me from this big trouble.
Sorry for not puttingup the code.
Here is the vertex shader. I am expecting vec2(boneIndices) to have -1 in [0] as well as [1]; but is not so on Powervr.
attribute vec4 vPosition;
attribute vec2 vTexCoord;
attribute vec2 boneIndices;
attribute vec2 boneWeights;
uniform mat4 boneMatrices[BNCNT];
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
varying mediump vec2 fTexCoord;
varying mediump vec3 col;
void main(){
vec4 tempPosition = vPosition;
int index = int(boneIndices.x);
col = vec3(1.0, 0.0, 0.0);
if(index >= 0){
col.y = 1.0;
tempPosition = (boneMatrices[index] * vPosition) * boneWeights.x;
}
index = int(boneIndices.y);
if(index >= 0){
col.z = 1.0;
tempPosition = (boneMatrices[index] * vPosition) * boneWeights.y + tempPosition;
}
gl_Position = projectionMatrix * viewMatrix * modelMatrix * tempPosition;
fTexCoord = vTexCoord;
}
setting up the attribute pointers
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), 0);
glVertexAttribPointer(texCoord, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(3*sizeof(GLfloat)));
glVertexAttribPointer(boneIndices, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(9*sizeof(GLfloat)));
glVertexAttribPointer(boneWeights, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(11*sizeof(GLfloat)));
glEnableVertexAttribArray(position);
glEnableVertexAttribArray(texCoord);
glEnableVertexAttribArray(boneIndices);
glEnableVertexAttribArray(boneWeights);
my vertex and index buffers
GLfloat vertices[13*6] =
{-0.5*size, -0.5*size, 0, 0,1, 1,1,1,1, -1,-1, 0,0,
-0.5*size, 0.5*size, 0, 0,0, 1,1,1,1, -1,-1, 0,0,
0.5*size, 0.5*size, 0, 1,0, 1,1,1,1, -1,-1, 0,0,
-0.5*size, -0.5*size, 0, 0,1, 1,1,1,1, -1,-1, 0,0,
0.5*size, 0.5*size, 0, 1,0, 1,1,1,1, -1,-1, 0,0,
0.5*size, -0.5*size, 0, 1,1, 1,1,1,1, -1,-1, 0,0 };
GLushort indices[]= {0,1,2, 3,4,5};
I am expecting the indices to be -1 in the shader; but they are not.
After days of frustration, I finally found the problem by myself.
The culprit was the "int()" function call, which was returning 0 even if I specify -1.
The observed behavior is that it returns
0 for -1
-1 for -2
-2 for -3 and like...
I am not sure if this is a driver/hw bug, or if it is because of the floating point representation where -1 is represented as something like "-.9999999"
Can anybody shed a little more light on this?

Categories

Resources