LibGDX ShaderProgram is not compiled on Android device - android

I am using LibGDX and have a simple fragment shader:
#ifdef GL_ES
#define mediump lowp
precision mediump float;
#else
#define lowp
#endif
uniform sampler2D u_texture;
uniform vec2 iResolution;
uniform float iGlobalTime;
uniform float glow;
void main(void)
{
vec2 uv = gl_FragCoord.xy / iResolution.xy;
//uv.y = 1-uv.y; // THIS LINE EVOKES AN ERROR!!!
uv.y += (sin((uv.x + (iGlobalTime * 0.2)) * 10.0) * 0.02)+0.02;
vec4 color = vec4(glow,glow,glow,1);
vec4 texColor = texture2D(u_texture, uv)*color;
gl_FragColor = texColor;
}
It works well on PC, but on Android device this shader is not compiled. If I comment the line(commented one in the example), everything goes ok.
How I use it:
mesh = new Mesh(true, 4, 6, VertexAttribute.Position(), VertexAttribute.ColorUnpacked(), VertexAttribute.TexCoords(0));
mesh.setVertices(new float[]
{0.5f, 0.5f, 0, 1, 1, 1, 1, 0, 1,
0.5f, -0.5f, 0, 1, 1, 1, 1, 1, 1,
-0.5f, -0.5f, 0, 1, 1, 1, 1, 1, 0,
-0.5f, 0.5f, 0, 1, 1, 1, 1, 0, 0});
mesh.setIndices(new short[] {0, 1, 2, 2, 3, 0});
this.texture = texture;
shader = new ShaderProgram(Gdx.files.internal("shaders/default.vertex"),
Gdx.files.internal("shaders/glowwave.fragment"));
Render method:
texture.bind();
shader.begin();
shader.setUniformMatrix("u_worldView", localCam.projection);
shader.setUniformi("u_texture", 0);
shader.setUniformf("iGlobalTime", time);
shader.setUniformf("iResolution", new Vector2(Gdx.graphics.getWidth(),Gdx.graphics.getHeight()+15));
shader.setUniformf("glow", glow);
mesh.render(shader, GL20.GL_TRIANGLES);
shader.end();
What the reason can it be? Seems to me the problem is in preprocessor, although I could be wrong.

ERROR: 0:17: '-' : Wrong operand types. No operation '-' exists that takes a left-hand operand of type 'const int' and a right operand of type 'float' (and there is no acceptable conversion)
ERROR: 0:17: 'assign' : cannot convert from 'int' to 'float'
ERROR: 2 compilation errors. No code generated.
try this: use 1.0 rather than 1
uv.y = 1.0 - uv.y; // THIS LINE was EVOKing AN ERROR!!!

Related

VBO, Shader C++ Android Confusion

I'm having trouble understanding where/how to setup buffers for a native android application in VS 2015. I apologize if this isn't the best way to ask a question. I appreciate any help/insight.
This is what I have so far:
(in engine_init_display)
GLint vShaderLength = vertex_shader.length();
const GLchar* vcode = vertex_shader.c_str();
GLint fShaderLength = fragment_shader.length();
const GLchar* fcode = fragment_shader.c_str();
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vcode, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fcode, NULL);
glCompileShader(fs);
shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
GLint pos_id = glGetAttribLocation(shader_programme, "position");
//Set vertex data
glUseProgram(shader_programme);
glVertexAttribPointer(pos_id, 0, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(pos_id, //GLuint
3, //GLint size
GL_FLOAT, //GLenum type
GL_FALSE, //GLboolean
(sizeof(float) * 5), //GLsizei stride
points //const GLvoid *pointer
);
glEnableVertexAttribArray(pos_id);
(in engine_draw_frame)
glClearColor(1.0f, 0.41f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
eglSwapBuffers(engine->display, engine->surface);
With this, I get a pink (clear colour) background. I'm not sure what I'm doing wrong.
Here are my vertex data and shaders
float points[] =
{
-0.2f, 0.6f, 0.0f,
0.0f, 1.0f,
0.5f, 0.5f, 0.0f,
1.0f, 1.0f,
-0.5f, -0.5f, 0.0f,
0.0f, 0.0f,
0.5f, -0.5f, 0.0f,
1.0f, 0.0f
};
unsigned short indices[] =
{
0, 2, 1, 2, 3, 1
};
std::string vertex_shader =
"#version 300 es \n"
"in vec3 position; \n"
"void main () { \n"
" gl_Position = vec4 (position, 1.0); \n"
"} \n";
std::string fragment_shader =
"#version 300 es \n"
"precision highp float; \n"
"out vec4 frag_colour; \n"
"void main () { \n"
" frag_colour = vec4 (0.5, 0.0, 0.5, 1.0); \n"
"} \n";
OK, I figured it out. There isn't anything wrong with my shaders or vertex array. The problem was that I didn't specify EGL to create an OpenGLES2 context using EGL_CONTEXT_CLIENT_VERSION.
Check here -> Khronos Specification, page 43 (of pdf) for more info
Sample from specification:
EGLContext eglCreateContext(EGLDisplay dpy,
EGLConfig config, EGLContext share_context,
const EGLint *attrib_list);
if the *attrib_list is left null, the default is OpenGLES1 and shaders will not work in that context.
So, what you need to do is create an attribute list. Something along the lines of:
EGLint contextAttributes[]=
{
EGL_CONTEXT_CLIENT_VERSION,2,
EGL_NONE
}
and pass that to the create context
p_context = eglCreateContext(display, config, NULL, contextAttributes);
Basically, I was too unsure of my ability with vertex buffers that I focused on that for a long time.

Draw an Image based texture in openGl android(Wear)

I am drawing an image based texture using opengl in android, But the image is drawn partially only shown below
my coding
#Override
public void onGlContextCreated() {
super.onGlContextCreated();
shaders = new ShadersDla();
float[] vts = { // x, y, s, t.
-1, 1, 1, 1, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0
};
// AllocateDirect prevents the GC moving this memory.
vtBuffer = ByteBuffer.allocateDirect(vts.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
vtBuffer.put(vts);
}
#Override
public void onGlSurfaceCreated(int width, int height) {
super.onGlSurfaceCreated(width, height);
float aspectRatio = (float) width / height;
float dist = .001f;
Matrix.frustumM(projectionMatrix, 0,
-aspectRatio * dist, aspectRatio * dist, // Left, right.
-dist, dist, // Bottom, top.
dist, 100); // Near, far.
makeTexture();
}
Shader
private static final String VERTEX_SHADER =
// Pass in the modelview matrix as a constant.
"uniform mat4 u_mvpMatrix; \n"
// Pass in the position and texture coordinates per vertex.
+ "attribute vec4 a_position; \n"
+ "attribute vec2 a_texCoord; \n"
// Varyings are sent on to the fragment shader.
+ "varying vec2 v_texCoord; \n"
+ "void main() { \n"
// Transform the vertex coordinate into clip coordinates.
+ " gl_Position = u_mvpMatrix * a_position; \n"
// Pass through the texture coordinate.
+ " v_texCoord = a_texCoord; \n"
+ "} \n";
Need some help to do this stuff.kindly guide me a easy way i'm new to android and opengl....
Change the texture coordinates as
{-1.0f, 1.0f, 0,0 ,
1.0f, 1.0f, 1,0,
-1.0f,-1.0f, 0,1 ,
1.0f, -1.0f, 1,1

OpenGL 3.0, cannot draw textured quad

I am trying to modify a sample from the OpenGL ES 3.0 programming guide book, but I cannot figure out why I can't draw a textured quad.
here is the main drawing loop (extracted from the book, with minor modifications):
void DrawTexturedQuad(ESContext *esContext)
{
UserData *userData = static_cast<UserData *>(esContext->userData);
GLfloat vVertices[] =
{
-0.5f, 0.5f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-0.5f, -0.5f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
0.5f, -0.5f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
0.5f, 0.5f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
// Use the program object
glUseProgram ( userData->programObjectMultiTexture );
// Load the MVP matrix
glUniformMatrix4fv(userData->mvpLoc, 1, GL_FALSE,(GLfloat *) &userData->grid.mvpMatrix.m[0][0]);
#define LOCATION_VERT (7)
#define LOCATION_TEXT (8)
// Load the vertex position
glVertexAttribPointer ( LOCATION_VERT, 3, GL_FLOAT,
GL_FALSE, 5 * sizeof ( GLfloat ), vVertices );
// Load the texture coordinate
glVertexAttribPointer ( LOCATION_TEXT, 2, GL_FLOAT,
GL_FALSE, 5 * sizeof ( GLfloat ), &vVertices[3] );
glEnableVertexAttribArray ( LOCATION_VERT );
glEnableVertexAttribArray ( LOCATION_TEXT );
// Bind the base map
glActiveTexture ( GL_TEXTURE0 );
glBindTexture ( GL_TEXTURE_2D, userData->baseMapTexId );
// Set the base map sampler to texture unit to 0
glUniform1i ( userData->baseMapLoc, 0 );
// Bind the light map
glActiveTexture ( GL_TEXTURE1 );
glBindTexture ( GL_TEXTURE_2D, userData->lightMapTexId );
// Set the light map sampler to texture unit 1
glUniform1i ( userData->lightMapLoc, 1 );
glDrawElements ( GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices );
glDisableVertexAttribArray ( LOCATION_VERT );
glDisableVertexAttribArray ( LOCATION_TEXT );
}
The vertex shader:
#version 300 es
uniform mat4 u_mvpMatrix;
layout(location = 7) in vec3 a_position;
layout(location = 8) in vec2 a_texCoord;
out vec2 v_texCoord;
void main()
{
//gl_Position = vec4(a_position, 1.f);
gl_Position = u_mvpMatrix * vec4(a_position, 1.f);
v_texCoord = a_texCoord;
}
the Fragment shader:
#version 300 es
precision mediump float;
in vec2 v_texCoord;
layout(location = 0) out vec4 outColor;
uniform sampler2D s_baseMap;
uniform sampler2D s_lightMap;
void main()
{
vec4 baseColor;
vec4 lightColor;
baseColor = texture( s_baseMap, v_texCoord );
lightColor = texture( s_lightMap, v_texCoord );
outColor = baseColor * (lightColor + 0.25);
//outColor = vec4(1.f, 1.f, 1.f, 1.f);
}
I did three tests, but they all look incorrect although the setup and shaders look right to me.
1st test (top window of the picture). The code is as shown above.
2nd test (middle window of the picture). I by passed the samplers and hard-coded the color on the fragment shader like so:
outColor = vec4(1.f, 1.f, 1.f, 1.f);
This proved that my matrices are correct, since I see a white quad properly tranformed on the screen.
3rd test (bottom window). I bypassed the matrix tranform to test if the samplers are correct. I do see the textured quad rendered but no transformation. like so:
// this is so odd, I can see the textures!!!!
// why the transform can mess things up?
gl_Position = vec4(a_position, 1.f);
v_texCoord = a_texCoord;
Anyway, if anyone can spot the issue, I'd really appraciated. I am using the Mali ARM emulator and there is no way to debug the shader, I have no idea what is going on inside. Yet, the code setup looks correct but they don't seem to work all together for some reason.

OpenGL ES 2.0 Failing to correctly assign the color attribute

I'm struggling a bit to apply the color for my geometry. When I specify it directly in the vertex shader ("varColor = vec4(1.0, 0.5, 0.4, 1.0);") - everything is ok. But if I use color values from the "vColor" attribue - everything gets messed up.
(Added some screenshots to show what I mean)
Can someone help me to figure out what am I doing wrong, or point me in the right direction? Thanks.
Using "varColor = vec4(1.0, 0.5, 0.4, 1.0);"
Using "varColor = vColor"
Vertex shader:
precision mediump float;
uniform mat4 modelViewProjectionMatrix;
attribute vec4 vPosition;
attribute vec2 vTexCoord;
attribute vec4 vColor;
varying lowp vec4 varColor;
varying mediump vec2 varTexCoord;
void main()
{
gl_Position = modelViewProjectionMatrix * vPosition;
varTexCoord = vTexCoord;
// varColor = vColor;
varColor = vec4(1.0, 0.5, 0.4, 1.0);
}
Fragment shader:
precision mediump float;
uniform sampler2D Texture0;
varying vec4 varColor;
varying vec2 varTexCoord;
void main()
{
gl_FragColor = texture2D(Texture0, varTexCoord) * varColor;
}
After shader is linked, I'm binding my attributes like this:
mMatrixMVP = glGetUniformLocation(mProgramId, "modelViewProjectionMatrix");
glBindAttribLocation(mProgramId, 0, "vPosition");
glBindAttribLocation(mProgramId, 1, "vTexCoord");
glBindAttribLocation(mProgramId, 2, "vColor");
mTexture = glGetUniformLocation(mProgramId, "Texture0");
glUniform1i(mTexture, 0);
Structure that holds my vertex information:
struct Vertex
{
float xyz[3];
float st[2];
unsigned char color[4]; // All assigned to value of 255
};
When rendering, after vertex buffer is bound, I'm setting vertex attributes like this:
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*) offsetof(Vertex, xyz));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*) offsetof(Vertex, st));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(Vertex), (GLvoid*) offsetof(Vertex, color));
glActiveTexture(GL_TEXTURE0);
pTexture->Bind(); // Just a "glBindTexture(GL_TEXTURE_2D, mTextureId);"
glUniform1i(mpCurrentShader->GetTexture(), 0);
After this I'm binding the index buffer and calling "glDrawElements".
Then, calling glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); and disabling all attributes with "glDisableVertexAttribArray" and finally - calling glBindBuffer(GL_ARRAY_BUFFER, 0);
You need to make your glBindAttribLocation() calls before linking the shader program.
If you link a program without specifying the location of attributes with either glBindAttribLocation(), or with layout qualifiers in the shader code, the attribute locations will be assigned automatically when the shader program is linked. If you call glBindAttribLocation() after linking the program, the new locations will only take effect if you link the shader program again.

GLSL - Wrong data in vertex attribute

I have already lost 2 days trying to figure out on this issue, but with no use.
I have written a collada animation renderer using opengles2.0 for android; using shader for skinning. The code is almost complete and it runs just fine on my HTC DesireS.
But, when I try to run the same on a tridernt SetTopBox with PowerVR chipset, my geometry is not displayed. After a day of debugging, I found out that it is happening because I am getting != -1 as bone matrix indeices in the shader.
I verified that it is == -1 in my phone; but is != -1 in the SetTopBox.
What could possibly be wrong?
Please save me from this big trouble.
Sorry for not puttingup the code.
Here is the vertex shader. I am expecting vec2(boneIndices) to have -1 in [0] as well as [1]; but is not so on Powervr.
attribute vec4 vPosition;
attribute vec2 vTexCoord;
attribute vec2 boneIndices;
attribute vec2 boneWeights;
uniform mat4 boneMatrices[BNCNT];
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
varying mediump vec2 fTexCoord;
varying mediump vec3 col;
void main(){
vec4 tempPosition = vPosition;
int index = int(boneIndices.x);
col = vec3(1.0, 0.0, 0.0);
if(index >= 0){
col.y = 1.0;
tempPosition = (boneMatrices[index] * vPosition) * boneWeights.x;
}
index = int(boneIndices.y);
if(index >= 0){
col.z = 1.0;
tempPosition = (boneMatrices[index] * vPosition) * boneWeights.y + tempPosition;
}
gl_Position = projectionMatrix * viewMatrix * modelMatrix * tempPosition;
fTexCoord = vTexCoord;
}
setting up the attribute pointers
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), 0);
glVertexAttribPointer(texCoord, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(3*sizeof(GLfloat)));
glVertexAttribPointer(boneIndices, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(9*sizeof(GLfloat)));
glVertexAttribPointer(boneWeights, 2, GL_FLOAT, GL_FALSE, 13*sizeof(GLfloat), (GLvoid*)(11*sizeof(GLfloat)));
glEnableVertexAttribArray(position);
glEnableVertexAttribArray(texCoord);
glEnableVertexAttribArray(boneIndices);
glEnableVertexAttribArray(boneWeights);
my vertex and index buffers
GLfloat vertices[13*6] =
{-0.5*size, -0.5*size, 0, 0,1, 1,1,1,1, -1,-1, 0,0,
-0.5*size, 0.5*size, 0, 0,0, 1,1,1,1, -1,-1, 0,0,
0.5*size, 0.5*size, 0, 1,0, 1,1,1,1, -1,-1, 0,0,
-0.5*size, -0.5*size, 0, 0,1, 1,1,1,1, -1,-1, 0,0,
0.5*size, 0.5*size, 0, 1,0, 1,1,1,1, -1,-1, 0,0,
0.5*size, -0.5*size, 0, 1,1, 1,1,1,1, -1,-1, 0,0 };
GLushort indices[]= {0,1,2, 3,4,5};
I am expecting the indices to be -1 in the shader; but they are not.
After days of frustration, I finally found the problem by myself.
The culprit was the "int()" function call, which was returning 0 even if I specify -1.
The observed behavior is that it returns
0 for -1
-1 for -2
-2 for -3 and like...
I am not sure if this is a driver/hw bug, or if it is because of the floating point representation where -1 is represented as something like "-.9999999"
Can anybody shed a little more light on this?

Categories

Resources