GLSL - ripling effect, bad performance on android - android

I needed to create some ripling effect for one sprite in my game, here's the vertexShader:
attribute vec4 a_position; // just taking in necessary attributes
attribute vec2 a_texCoord0;
uniform mat4 u_projTrans; // Combination of view and projection matrix
varying vec2 v_texCoords;
void main() {
v_texCoords = a_texCoord0;
gl_Position = u_projTrans * a_position; //as I said, it is sprite so no need for modelMatrix
}
and here's the fragment:
#ifdef GL_ES
precision mediump float;
#endif
varying vec2 v_texCoords;
uniform sampler2D u_texture; //texture of sprite
uniform float time;
void main()
{
vec2 uv;
if (time > 0.0) { // time is > 0.0 when I want the ripling effect to be applied,
vec2 cPos = -1.0 + 2.0 * v_texCoords.xy; // converting tex.Coords to -1 - 1
float cLength = length(cPos); //taking length of it
uv = v_texCoords.xy +( (cPos/cLength)*cos(cLength*12.0-time*4.0)*0.03 ) // just some calculations for the ripling effect
}
else
uv = v_texCoords.xy; // if I don't want to use the ripling effect, I use normal texCoords
vec4 tex = texture2D(u_texture, uv); //sampling texture
gl_FragColor = tex;
}
It all works fine, the performance's fine on PC, but when running it on android, the performance is a lot worse... As you can see, shader's are trivial but they somehow are expensive.. Anyways, sprite I draw has width about 2000 - 4000 px and height 720. Also, when I replace v_texCoords with different vector(for example vec2(1, 1)) in cPos calc: vec2 cPos = -1.0 + 2.0 * v_texCoords.xy; the performance improves heavily..
I don't really know what's so expensive there. If anyone had some advices, I'd be happy. Thanks in advance

Related

Refraction in OpenGL ES 2.0/3.0. Large pixels texture

Trying to implement refraction in OpenGL ES 2.0/3.0. Used the following shaders:
Vertex shader:
#version 300 es
precision lowp float;
uniform mat4 u_mvMatrix;
in vec4 a_position;
in vec3 a_normal;
...
out mediump vec2 v_refractCoord;
const mediump float eta = 0.95;
void main() {
vec4 eyePositionModel = u_mvMatrix * a_position;
// eye direction in model space
mediump vec3 eyeDirectModel = normalize(a_position.xyz - eyePositionModel.xyz);
// calculate refraction direction in model space
mediump vec3 refractDirect = refract(eyeDirectModel, a_normal, eta);
// project refraction
refractDirect = (u_mvpMatrix * vec4(refractDirect, 0.0)).xyw;
// map refraction direction to 2d coordinates
v_refractCoord = 0.5 * (refractDirect.xy / refractDirect.z) + 0.5;
...
}
Fragment shader:
...
in mediump vec2 v_refractCoord;
uniform samplerCube s_texture; // skybox
void main() {
outColor = texture(s_texture, vec3(v_refractCoord, 1.0));
}
Method for loading texture:
#JvmStatic
fun createTextureCubemap(context: Context, rowID: Int) {
val input = context.resources.openRawResource(rowID)
val bitmap = BitmapFactory.decodeStream(input)
val textureId = IntArray(1)
glGenTextures(1, textureId, 0)
glBindTexture(GL_TEXTURE_CUBE_MAP, textureId[0])
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, bitmap, 0)
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, bitmap, 0)
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, bitmap, 0)
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, bitmap, 0)
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, bitmap, 0)
GLUtils.texImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, bitmap, 0)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_NEAREST)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
return textureId[0]
}
But the texture is obtained with large pixels like:
What could be the reason for this? Maybe this is normal for a low-poly model? It seems that the texture is too close.
Note: The fewer polygons - the less quality becomes.
Thanks in advance for any comment/answer!
image from goodfon.ru
Solution: On the #Rabbid76 advice, I changed the normal data. It turned out that in the Blender you need to set the Shading for the object as smooth (no flat) - this increases the number of normals when exporting to the format *.obj: Why OBJ export writes face normals instead of vertex normals
Also, on the #Rabbid76 advice, I changed the line:
vec3 eyeDirectModel = normalize(- eyePositionModel.xyz);
As a result, pixelation has disappeared:
In addition, pixel artifacts may also appear when calculate refraction in the vertex shader, so I transferred the calculations to the fragment shader. Here is the modified shader code:
Vertex shader:
#version 300 es
precision lowp float;
uniform mat4 u_mvpMatrix;
uniform mat4 u_mvMatrix;
in vec4 a_position;
in vec3 a_normal;
out vec3 v_normal;
out lowp float SpecularIntensity;
out vec3 v_eyeDirectModel;
float getSpecularIntensity(vec4 position, vec3 a_normal, vec3 eyeDirectModel) {
float shininess = 30.0;
vec3 lightPosition = vec3(-20.0, 0.0, 0.0);
mediump vec3 LightDirModel = normalize(lightPosition - position.xyz);
mediump vec3 halfVector = normalize(LightDirModel + eyeDirectModel);
lowp float NdotH = max(dot(a_normal, halfVector), 0.0);
return pow(NdotH, shininess);
}
void main() {
v_normal = a_normal;
vec4 eyePositionModel = u_mvMatrix * a_position;
// Eye direction in model space
vec3 eyeDirectModel = normalize(- eyePositionModel.xyz);
// specular lighting
SpecularIntensity = getSpecularIntensity(a_position, a_normal, eyeDirectModel);
v_eyeDirectModel = eyeDirectModel;
gl_Position = u_mvpMatrix * a_position;
}
Fragment shader:
#version 300 es
precision lowp float;
uniform mat4 u_mvpMatrix;
in vec3 v_normal;
in lowp float SpecularIntensity;
in vec3 v_eyeDirectModel;
out vec4 outColor;
uniform samplerCube s_texture; // skybox
const float eta = 0.65;
void main() {
// Calculate refraction direction in model space
vec3 refractDirect = refract(v_eyeDirectModel, normalize(v_normal), eta);
// Project refraction
refractDirect = (u_mvpMatrix * vec4(refractDirect, 0.0)).xyw;
// Map refraction direction to 2d coordinates
vec2 refractCoord = 0.5 * (refractDirect.xy / refractDirect.z) + 0.5;
vec4 glassColor = texture(s_texture, vec3(refractCoord, 1.0));
outColor = glassColor + SpecularIntensity;
outColor.a = 0.8; // transparent
}
First of all there is a mistake in the shader code. a_position.xyz - eyePositionModel.xyz does not make any sense, since a_position is the vertex coordinate in model space and eyePositionModel is the vertex coordinate in view space.
You have to compute the incident vector for refract in view sapce. That is the vector from the eye position to the vertex. Since the eye position in view space is (0, 0, 0), it is:
vec4 eyePositionView = u_mvMatrix * a_position;
// eye direction in model space
mediump vec3 eyeDirectView = normalize(- eyePositionView.xyz);
Furthermore, it is an issue of the normal vector attributes.
The problem is caused by the fact that the normal vectors are computed per face rather than individually for each vertex.
Note, the refraction direction (refractDirect) depends on the vertex coordinate (eyeDirectModel) and the normal vector (a_normal):
mediump vec3 refractDirect = refract(eyeDirectModel, a_normal, eta);
Since the normal vectors are different between adjacent surfaces, you can see a noticeable edge between the faces of the the mesh.
If the normal vectors are computed per vertex, then the adjacent faces share the vertex coordinates and the corresponding normal vectors. That would causes a smooth transition from face to face.

Why is there an offset when I render this overlay?

I use Vuforia SDK to render the video stream of my phone's camera on the screen.
So the texture is generated by the Vuforia library, not me.
The shaders used to render this background are:
// Vertex Shader
attribute vec4 a_position;
attribute vec2 a_textureCoords;
varying vec2 v_textureCoords;
uniform mat4 u_projectionMatrix;
void main()
{
gl_Position = u_projectionMatrix * a_position;
v_textureCoords = a_textureCoords;
}
// Fragment Shader
varying highp vec2 v_textureCoords;
uniform sampler2D u_currentTexture;
void main()
{
vec4 currentColor = texture2D(u_currentTexture, v_textureCoords);
gl_FragColor = currentColor;
}
Now, I want an overlay in the upper-left corner of the screen:
I don't want this overlay to display only a pink texture, but rather a multiply blend of the pink texture and the background texture. Note that the textures do not have the same coordinates.
But for now, let's forget about the blending and let's just render the background texture in the shader program of the pink texture. So in the end, yes, one should see no difference between the background-only version and the bacground with overlay version.
As you can see (look at the painting and the top of the chair), there is a small offset...
The shaders used to render the overlay are:
// Vertex Shader
attribute vec4 a_position;
attribute vec2 a_currentTextureCoords;
varying vec2 v_currentTextureCoords;
void main()
{
gl_Position = a_position;
v_currentTextureCoords = a_currentTextureCoords;
}
// Fragment Shader
varying highp vec2 v_currentTextureCoords;
uniform sampler2D u_currentTexture;
uniform sampler2D u_backgroundTexture;
void main()
{
vec2 screenSize = vec2(1080.0, 1920.0);
vec2 cameraResolution = vec2(720.0, 1280.0);
vec2 texelSize = vec2(1.0 / screenSize.x, 1.0 / screenSize.y);
vec2 scaleFactor = vec2(cameraResolution.x / screenSize.x, cameraResolution.y / screenSize.y);
vec2 uv = gl_FragCoord.xy * texelSize * scaleFactor;
uv = vec2(scaleFactor.y - uv.y, scaleFactor.x - uv.x);
vec4 backgroundColor = texture2D(u_backgroundTexture, uv);
gl_FragColor = backgroundColor;
}
Are my calculations wrong?
Why do you need this line?
uv = vec2(scaleFactor.y - uv.y, scaleFactor.x - uv.x);
Not sure what arithmetic relationship the absolute texture coordinates have with the scale factor which needs an addition or a subtraction ...
P.S. it's not related to your question, but your shaders will be shorter and easier to read if you just use the vector operations in the language. For example, replace:
vec2 scaleFactor = vec2(cameraResolution.x / screenSize.x, cameraResolution.y / screenSize.y);
... with ...
vec2 scaleFactor = cameraResolution / screenSize;
As long as the vector types are the same length, it will do exactly what you expect with a lot less typing ...

OpenGL ES - How to implement the Glossy Shader in Blender Cycles Render?

In Blender Cycles render, there's a shader type called Glossy. Now I want to implement this glossy shader using OpenGL ES 2.0 on Android. I'm wondering where can I find a GLSL implementation of this shader? Below is an image displaying the glossy reflection (see the reflection on the plane).
That effect is basically just a gaussian blur filter. There is a good example of this for OpenGL ES 2.0 in the PowerVR SDK in these files:
\Examples\Intermediate\Bloom\OGLES2\BlurVertShader.vsh:
// Blur filter kernel shader
//
// 0 1 2 3 4
// x--x--X--x--x <- original filter kernel
// y---X---y <- filter kernel abusing the hardware texture filtering
// |
// texel center
//
//
// Using hardware texture filtering, the amount of samples can be
// reduced to three. To calculate the offset, use this formula:
// d = w1 / (w1 + w2), whereas w1 and w2 denote the filter kernel weights
attribute highp vec3 inVertex;
attribute mediump vec2 inTexCoord;
uniform mediump float TexelOffsetX;
uniform mediump float TexelOffsetY;
varying mediump vec2 TexCoord0;
varying mediump vec2 TexCoord1;
varying mediump vec2 TexCoord2;
void main()
{
// Pass through vertex
gl_Position = vec4(inVertex, 1.0);
// Calculate texture offsets and pass through
mediump vec2 offset = vec2(TexelOffsetX, TexelOffsetY);
TexCoord0 = inTexCoord - offset;
TexCoord1 = inTexCoord;
TexCoord2 = inTexCoord + offset;
}
\Examples\Intermediate\Bloom\OGLES2\BlurFragShader.vsh:
uniform lowp sampler2D sTexture;
/*
Separated Gaussian 5x5 filter, first row: 1 5 6 5 1
*/
varying mediump vec2 TexCoord0;
varying mediump vec2 TexCoord1;
varying mediump vec2 TexCoord2;
void main()
{
lowp vec3 color = texture2D(sTexture, TexCoord0).rgb * 0.333333;
color = color + texture2D(sTexture, TexCoord1).rgb * 0.333333;
color = color + texture2D(sTexture, TexCoord2).rgb * 0.333333;
gl_FragColor.rgb = color;
}

OpenGL Rendering issue on Samsung Devices

I have an application drawing some objects with OpenG-ES 2.0.The application fails to render on some of samsung devices. I tried debugging and it seems to be problem with the vertex and fragment shaders.
Here is my shader code:
Vertex Shader:
attribute vec3 Position;
attribute vec4 SourceColor;
varying vec4 DestinationColor;
uniform mat4 Projection;
uniform mat4 Modelview;
uniform mat4 CordTransform;
attribute float flag;
attribute float clubColorFlag;
attribute vec2 TexCoordIn; // New
varying vec2 TexCoordOut; // New
varying float flagS;
varying float flagClubColorS;
void main(void) {
gl_Position = Projection * Modelview * vec4(Position,1.0);
flagS = flag;
flagClubColorS = clubColorFlag;
if (clubColorFlag == 1.0) {
DestinationColor = vec4(0.190,0.309,0.309,1.0);
}
else {
DestinationColor = SourceColor;
}
exCoordOut = TexCoordIn;
gl_PointSize = 1.0;
}
Fragment Shader:
varying lowp vec4 DestinationColor;
varying lowp vec2 TexCoordOut; // New
uniform sampler2D Texture; // New
varying lowp float flagS;
varying lowp float flagClubColorS;
void main(void) {
gl_FragColor = DestinationColor;
if(flagS == 1.0){
gl_FragColor = DestinationColor;
}
else if (flagClubColorS == 1.0) {
gl_FragColor = DestinationColor;
}
else if (flagS == 0.0){
gl_FragColor = DestinationColor * texture2D(Texture, TexCoordOut); // New
}
}
I am not sure what is the problem but I get texture uniform -1 if I comment out the if-else part in fragment shader. In other scenario, it is zero. Both shaders compile without any errors.
Is it related to precision? Please help me to debug the issue.
I am answering my own question for people who are going to refer this later. I could kind of solve the issue.
The problem was not in either of the shaders. The issue was, I was using glDrawElements() method to draw objects with vertex and index buffers. I replaced this call with glDrawArrays() method using only vertex buffer and everything worked fine.
I am still not sure of the exact issue but it may help somebody struggling long for similar issue.

OpenGL ES 2.0, messing with all things

I'm making a multi-platform Game Engine (Open source in the future), and I was making a game based on it. When testing on a Samsung Galaxy S2 (I9100B), it runs perfectly, but when I tried to run on my other phone (Samsung Galaxy S) the things get messy.
Here is a screenshot when running on Galaxy S2:
And here is when I run on the Galaxy S:
I managed to reduce the number of triangles in the scene too, but even with 50 triangles on the screen I got the same problem.
Disabling the lighting reduces the problem, but does not eliminate it. I thought that was a memory problem on my phone, so I tried on another Galaxy S, but the same problem occurs.
Does someone know where I can start looking? The automatic GC is not frequent (about 2 times per 5 seconds).
Samsung Galaxy S2:
Android Version 2.3.4
Kernel: 2.6.35.7-I9100UHKI2-CL553601 se.infra#SEI-07 #2
Samsung Galaxy S:
Android Version 2.3.3
Kernek: 2.6.35.7-I9000BVJJW4-CL240848 pescio#bldhp-4 #28
Fragment Shader Code:
precision mediump float;
uniform sampler2D uSampler;
uniform float uIsPoint;
uniform float uEnableLight;
uniform float uDisableTexture;
varying vec4 vColor;
varying vec2 vTextureCoord;
varying vec4 vPosition;
uniform vec3 uPointLightingColor;
varying vec3 vColorWeight;
void main(){
if(uIsPoint >= 2.0) {
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}else{
vec4 calcColor;
if(uEnableLight >= 2.0)
calcColor = vec4(vColor.rgb * vColorWeight, vColor.a);
else
calcColor = vColor;
vec4 texColor = vec4(1.0,1.0,1.0,1.0);
if(uDisableTexture < 2.0)
texColor = texture2D(uSampler, vTextureCoord);
gl_FragColor = vec4(texColor.rgb * calcColor.rgb, texColor.a*calcColor.a);
}
}
Vertex Shader Code:
//Atributos
uniform mat4 uMVMatrix; //Model/View Matrix
uniform mat4 uPMatrix; //Model/View/Projection Matrix
uniform mat3 uNMatrix; //Normal Matrix
attribute vec3 aVertexPosition;
attribute vec4 aVertexColor;
attribute vec2 aTextureCoord;
attribute vec3 aNormal;
varying vec4 vColor;
varying vec2 vTextureCoord;
varying vec3 vNormal;
varying vec4 vPosition;
//Lighting
uniform vec3 uAmbientColor;
uniform vec3 uLightDir;
uniform vec3 uLightColor;
uniform vec3 uSpecLightColor;
uniform float uShine;
varying vec3 vColorWeight;
void main(){
//Lighting
vec3 normal = normalize(uNMatrix * aNormal);
vec3 lightNorm = normalize(uLightDir);
float lightWeight = max(dot(aNormal,lightNorm),0.0);
vec3 halfVec = normalize(uLightDir - gl_Position.xyz);
float specWeight = pow(max(dot(normal,halfVec),0.0),uShine);
vColorWeight = uAmbientColor + (lightWeight * uLightColor) + (uSpecLightColor*specWeight);
//Others
vNormal = uNMatrix * aNormal;
vPosition = uMVMatrix * vec4(aVertexPosition,1.0);
gl_Position = uPMatrix * vPosition;
vColor = aVertexColor;
gl_PointSize = 2.0;
vTextureCoord = aTextureCoord;
}
Try increasing your depth buffer precision.
http://www.opengl.org/wiki/Depth_Buffer_Precision
This looks like a vertex ordering issue. The Galaxy S probably has back face culling turned on by default, so it will remove all triangles that do not face the viewer.
Which triangles are front-facing can be determined with glFrontFace()
You should try both clockwise and counter-clockwise to see if the culled and non-culled triangles switch places. If they do, you must either turn off back-face culling or make sure the vertex order is the same for all triangles.

Categories

Resources