OpenGL ES GLSL optimization - android

I'm developing a game which uses glsl shader to produce terrain based on the vertex "height". This works very very fast on desktop but quite slow on android tablet. This is of course natural. I'm still a noob with GLSL and OpenGL. This is the result from the shader:
lowp setting doesn't affect that much for performance. I have now tuned this and frame rate is ~20. Can someone give ideas how to improve this shader code:
Fragment shader:
#ifdef GL_ES
#define LOWP lowp
precision mediump float;
#else
#define LOWP
#endif
varying float EyeVertexZ;
//varying LOWP float Explored;
varying LOWP vec2 v_texCoords;
uniform LOWP float u_offset;
uniform sampler2D u_texture;
uniform sampler2D u_texture2;
uniform sampler2D u_texture3;
uniform vec2 resolution;
void main() {
LOWP vec4 color;
vec4 colorGrass = vec4(0.631, 0.69, 0.49 ,1.0);
float a;
float height = EyeVertexZ;
color = colorGrass;
//if height under 0.67 --> water
if(height < 0.67)
{
vec2 coords = v_texCoords.st;
coords.x -= u_offset;
LOWP vec3 nColor;
float bumpAmount = 0.2;
//bumpmapping texture
nColor = texture2D(u_texture, coords).rgb;
//water color, deep water darker
color = vec4(0.3, 0.4 + EyeVertexZ*0.33,0.6 + EyeVertexZ*0.33 ,1.0);
bool useNormals = true;
bool useShadow = true;
bool yInvert = false;
vec3 lightColor = vec3(1.0, 1.0, 1.0);
vec3 attenuation = vec3(0.5, 0.5,0.5);
vec3 ambientColor = vec3(1.0, 1.0, 1.0);
vec4 v_color = vec4(1.0, 1.0, 1.0, 1.0);
float ambientIntensity = 0.3;
vec3 light = vec3(resolution.x, 0.0, 0.5);
//some bump map programs will need the Y value flipped..
nColor.g = yInvert ? 1.0 - nColor.g : nColor.g;
//this is for debugging purposes, allowing us to lower the intensity of our bump map
LOWP vec3 nBase = vec3(0.5, 0.5, 1.0);
nColor = mix(nBase, nColor, bumpAmount);
//normals need to be converted to [-1.0, 1.0] range and normalized
LOWP vec3 normal = normalize(nColor * 2.0 - 1.0);
//here we do a simple distance calculation
LOWP vec3 deltaPos = vec3( (light.xy - gl_FragCoord.xy) / resolution.xy, light.z);
LOWP vec3 lightDir = normalize(deltaPos);
LOWP float lambert = useNormals ? clamp(dot(normal, lightDir), 0.0, 1.0) : 1.0;
//now let's get a nice little falloff
LOWP float d = sqrt(dot(deltaPos, deltaPos));
LOWP float att = useShadow ? 1.0 / ( attenuation.x + (attenuation.y*d) + (attenuation.z*d*d) ) : 1.0;
LOWP vec3 result = (ambientColor * ambientIntensity) + (lightColor.rgb * lambert) * att;
result *= color.rgb;
color = v_color * vec4(result, color.a);
}
a = height - 0.65;
a = max(a,0.0);
color = mix(color, texture2D(u_texture2, v_texCoords),a < 0.02 ? a*50.0 : 0.0);
if(height > 0.67 && height <= 0.70)
{
a = height-0.67;
color = mix( texture2D(u_texture2, v_texCoords),color, a*33.0);
}
gl_FragColor = color; //mix(color, hex, hex.a);
}
Vertex shader
#ifdef GL_ES
#define LOWP lowp
precision mediump float;
#else
#define LOWP
#endif
attribute vec4 a_position;
attribute vec2 a_texCoords;
uniform mat4 u_worldView;
varying LOWP float noise;
varying float EyeVertexZ;
varying LOWP float Explored;
varying vec2 v_texCoords;
void main() {
gl_Position = u_worldView*a_position;
EyeVertexZ = a_position.z;
v_texCoords = a_texCoords;
}

Related

why this shader crashes

precision highp float;
uniform sampler2D uTexture;
varying vec2 vCoordinate;
uniform float uAlpha;
varying vec4 vColor;
void main(){
vec4 color=texture2D(uTexture,vCoordinate);
color.a *= uAlpha;
color.r = 1.0;
color.g = 1.0;
color.b = 1.0;
color.a = 1.0;
gl_FragColor=color;
// gl_FragColor = vec4(1.0,0.0,0.0,1.0);
}
it crashed where I'm drawing a plane, but if I use this frag.shader to a sphere.it works well.

How to remove horizontal stripe artifact from GLSL shader?

I'm working on custom photo with depth effect view.I am passing touch coordinates to GLSurfaceView renderer to "change perspective".But there are horizontal stripes between texture and it's mirrow when doing this.
My fragment shader code:
#ifdef GL_ES
precision highp float;
#endif
varying vec2 texcoordVarying;
uniform sampler2D texture;
uniform sampler2D depth;
uniform float time;
uniform vec2 touch;
uniform vec2 limit;
uniform vec4 resolution;
vec2 mirrored(vec2 v) {
vec2 m = mod(v,2.);
return mix(m,2.0 - m, step(1.0 ,m));
}
void main(void) {
vec2 uv = 1.0 * gl_FragCoord.xy / resolution.xy ;
vec2 vUv = (uv - vec2(0.5))*resolution.zw + vec2(0.5);
vUv.y = 1. - vUv.y;
vec4 tex1 = texture2D(depth,mirrored(vUv));
vec2 fake3d = vec2(vUv.x + (tex1.r - 0.5)* touch.x/limit.x, vUv.y + (tex1.r - 0.5)* touch.y/limit.y );
gl_FragColor = texture2D(texture,mirrored(fake3d));
}
I am passing coordinates like this:
queueEvent {
renderer.touchTargetX = event.rawX / renderer.width
renderer.touchTargetY = event.rawY / renderer.height
}
requestRender()
If your wrap mode is GL_REPEAT, try using GL_CLAMP_TO_EDGE.

Lighting in Android, OpenGL ES 3.0

This is my first question here
The thing is I'm programming an app with OpenGL ES in Android Studio, and I've encountered some problems.
The lights seem to work on the emulator, but when building the APK and installing it on an actual device (with Android 5.0, with opengl es version 3.0 and support for glsl es 3.00), it looks weird:
Emulator, Android Studio
Emulator
Phone:
Phone
It looks like the lighting is not done er fragment on the phone or something, I dont know whats happening.
Here are the shaders:
#version 300 es
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
uniform int boolGUI;
uniform float opacity;
uniform float blackness;
in vec3 vPosition;
in vec2 vUV;
in vec3 vNormal;
//"in vec3 vColor;" +
//Lights
uniform mat4 lMat;
out mat4 frLMat;
out vec3 frColor;
out vec3 frNormalCam;
//out vec3 frLightCam;
out vec3 frEyeCam;
out vec2 oUV;
out float z;
flat out int frBoolGUI;
flat out float frOpacity;
flat out float frBlackness;
out vec3 frPos;
out mat4 frV;
void main() {
mat4 uMVPMatrix = P * V * M;
vec4 vPos;
vPos.xyz = vPosition;
vPos.w = 1.0;
gl_Position = uMVPMatrix * vPos;
frColor = vec3(1.0,1.0,1.0);
//vec3 light = vec3(0.3,-0.3,-0.4);
vec3 light = normalize(vec3(-0.1,-0.3,0.2))*0.6;
light = normalize(-vPos.xyz+vec3(0.0,0.0,1.0));
//frLightCam = (V * vec4(light,0)).xyz;
//frNormal = vNormal;
frNormalCam = ( V * M * vec4(normalize(vNormal),0)).xyz;
frEyeCam = (V*vec4(0,-3,2.5,0)).xyz;
oUV = vUV.xy;
z = vPos.z;
frBoolGUI = boolGUI;
frOpacity = opacity;
frBlackness = blackness;
frPos = (M*vPos).xyz;
frV = V;
frLMat=lMat;
}
And the fragment shader:
#version 300 es
precision mediump float;
in vec3 frColor;
in vec3 frNormalCam;
//in vec3 frLightCam;
in vec3 frEyeCam;
in vec3 frPos;
in vec2 oUV;
in float z;
flat in int frBoolGUI;
flat in float frOpacity;
flat in float frBlackness;
//vec3 l = vec3(1.0,0.1,-0.2);
out vec4 FragColor;
in mat4 frV;
in mat4 frLMat;
uniform sampler2D myTextureSampler;
void main() {
float att = 0.0;
//for(int i=0;i<4;i++) {
int i=0;
if(frLMat[i][3]>0.0) {
vec3 li=frLMat[i].xyz;
vec3 lightVec = -frPos.xyz+li;//vec3(0.0,-0.5,1.5);
vec3 light = normalize(lightVec)*0.6;
vec3 frLightCam = (frV * vec4(light,0)).xyz;
float lightAtt = 0.6/(length(lightVec))+0.4;
float ambient = 0.40;
vec3 E = normalize(frEyeCam);
vec3 R = reflect(-frLightCam,frNormalCam);
float cosAlpha = clamp( dot( E,R ), 0.0,1.0 );
float spec = pow(cosAlpha,4.0)*5.0;
att += ((spec + clamp( dot(frNormalCam,frLightCam)*(1.0-ambient), 0.0,1.0 ))*lightAtt+ambient)*frLMat[i].w;
}
//}
//if(z>1.7) att = att-(z-1.7)*1.7;
//if(frPos.y>2.0) att = att-abs((frPos.y-2.0)*1.7);
FragColor.xyz = texture( myTextureSampler, oUV ).rgb * att;//*lightAtt;// frColor * att;
FragColor.w=texture(myTextureSampler,oUV).w;
vec3 gamma = vec3(1.0/0.85);
gamma = vec3(1.0/0.8);
FragColor.xyz = pow(FragColor.xyz,gamma)*(frBlackness+vec3(0.05))+vec3(0.06);
FragColor.a = frOpacity;
if (frBoolGUI==1) FragColor = texture(myTextureSampler,oUV);
}
(Please excuse the clumsy code, im only testing right now).
Any help would be appreciated

Game runs fine on newer devices but not on older

My games runs fine on newer devices (eg..droid turbo, galaxy note 3) but on my motorola xoom there are problems, such as :
1) My shaderProgram causes the stage that is drawn inside of it to not be rendered
2) Touch inputs don't work every time using InputProcessor (only senses 90% of the time)
These are the problems I currently notice. Could this be because it is an older OS version (4.1.2)? Not that old. Are known bugs with libgdx's ShaderProgram? Even then, the touch input not sensing every click is very strange. Thanks for your time and help! My rendering code for the shaderProgram is all correct, so no need to show that. And for touchInput, I am just using touchDown method from InputProcessor.
EDIT :
Error produced by shaderProgram
"(22) : error C1101: ambiguous overloaded function reference "smoothstep(mediump float, float, lowp float)"
Vertex Shader
#version 100
attribute vec4 a_position;
attribute vec4 a_color;
attribute vec2 a_texCoord0;
uniform mat4 u_worldView;
uniform vec2 u_lightPos;
uniform vec3 u_lightColor;
uniform vec4 u_spaceColor;
varying vec4 v_color;
varying vec2 v_texCoords;
varying vec2 v_lightPos;
varying vec3 v_lightColor;
varying vec2 v_position;
varying vec4 v_spaceColor;
void main() {
v_color = a_color;
v_lightColor = u_lightColor;
v_lightPos = u_lightPos;
v_position.xy = a_position.xy;
v_texCoords = a_texCoord0;
gl_Position = u_worldView * a_position;
v_spaceColor = u_spaceColor;
}
Fragment Shader
#version 100
#ifdef GL_ES
precision lowp float;
#endif
varying vec4 v_color;
varying vec2 v_texCoords;
varying vec2 v_lightPos;
varying vec3 v_lightColor;
varying vec2 v_position;
varying vec4 v_spaceColor;
uniform sampler2D u_texture;
void main() {
for(int row = 0; row < 2; row++) {
for(int col = 0; col < 2; col++) {
float dist = distance(v_position, vec2(-1 + col, 1 - row));
float delta = 0.1;
float alpha = smoothstep(100.0-delta, 100.0, dist);
if(dist > 23.0){
gl_FragColor = mix(v_spaceColor, v_color, alpha);
}
else{
gl_FragColor = v_color * texture2D(u_texture, v_texCoords);
}
}
}
}

GL_BYTE normals not interpreted properly in Android shader

My shader works GREAT on iOS and Win32. The problem is Android. All devices really botch the normals if I send them as GL_BYTE. If I convert them to GL_FLOAT, everything's peachy.
glVertexAttribPointer(program->slots[Shaders::A_NORMAL_SLOT], 3, GL_FLOAT, GL_FALSE, 0, frame->normals);
glVertexAttribPointer(program->slots[Shaders::A_NORMAL_SLOT], 3, GL_BYTE, GL_TRUE, 0, frame->normals);
and YES, 'frame->normals' is of type float* in the first example, and of type char* in the second example. Like I said, this exact same code works great on iOS/Win32.
What's going on here? Is this some kind of Android bug that it won't accept byte normals?
My shader, Vertex:
uniform mat3 normal_matrix;
uniform mat4 mvp_matrix;
uniform vec3 u_lightpos;
uniform vec3 u_eyepos;
uniform vec4 u_color;
attribute vec4 a_vertex;
attribute vec3 a_normal;
attribute vec2 a_texcoord0;
attribute vec4 a_color;
varying vec2 v_texcoord0;
varying vec3 v_viewDir;
varying vec3 v_lightDir;
varying vec3 v_normal;
varying vec4 v_color;
void main( void )
{
vec4 fvObjectPosition = mvp_matrix * (a_vertex);
v_viewDir = u_eyepos;
v_lightDir = u_lightpos;
v_normal = normalize(normal_matrix * a_normal);
v_color = u_color * a_color;
v_texcoord0 = a_texcoord0.xy;
gl_Position = fvObjectPosition;
}
Fragment:
#ifdef GL_ES
precision mediump float;
#endif
uniform vec4 u_specular;
uniform vec4 u_diffuse;
uniform float u_specularpower;
uniform sampler2D t_texture0;
varying vec2 v_texcoord0;
varying vec3 v_viewDir;
varying vec3 v_lightDir;
varying vec3 v_normal;
varying vec4 v_color;
void main( void )
{
float fNDotL = dot( v_normal, v_lightDir );
vec3 fvReflection = normalize( ( ( 2.0 * v_normal ) * fNDotL ) - v_lightDir );
vec4 fvBaseColor = v_color * texture2D( t_texture0, v_texcoord0 );
vec4 fvTotalDiffuse = vec4((u_diffuse * fNDotL).xyz, 1.0) * fvBaseColor;
vec4 fvTotalSpecular = u_specular * ( pow( max( 0.0, dot( fvReflection, -v_viewDir ) ), u_specularpower ) );
gl_FragColor = vec4(( fvBaseColor + fvTotalDiffuse + fvTotalSpecular ).xyz, v_color.w);
}

Categories

Resources