I'm having mpvMatrix (mvpMatrix is calculated as model * view * projection using Matrix.multiply()) and passing it to shader and it's ok.
Now i'd like to move multiplication from CPU to GPU (from Matrix.multiply to shader).
If i'm trying to pass model, view and projection martrix separately and multiply them in shader i see nothing.
I'm sure matrix are not null and have real values (as if i multiply them on CPU instead of GPU it's ok), matrix uniform index are >= 0 (they are found in shader). What can be wrong?
Shaders:
private final String vertexShader =
"uniform mat4 u_mMatrix; \n" // Матрица модели
+ "uniform mat4 u_vMatrix; \n" // Матрица модели
+ "uniform mat4 u_pMatrix; \n" // Матрица модели
+ "attribute vec4 a_Position; \n" // Информация о положении вершин.
+ "attribute vec2 a_TexCoordinate; \n" // Per-vertex texture coordinate information we will pass in.
+ "varying vec2 v_TexCoordinate; \n" // This will be passed into the fragment shader.
+ "void main() \n" // Начало программы вершинного шейдера.
+ "{ \n"
+ " v_TexCoordinate = a_TexCoordinate; \n"
+ " mat4 mvMatrix = u_mMatrix * u_vMatrix;"
+ " mat4 mvpMatrix = mvMatrix * u_pMatrix;"
+ " gl_Position = mvpMatrix \n" // gl_Position специальные переменные используемые для хранения конечного положения.
+ " * a_Position; \n" // Умножаем вершины на матрицу для получения конечного положения
+ "} \n"; // в нормированных координатах экрана.
private final String fragmentShader =
"precision mediump float; \n" // Устанавливаем по умолчанию среднюю точность для переменных. Максимальная точность в фрагментном шейдере не нужна.
+ "uniform sampler2D u_Texture; \n" // The input texture.
+ "varying vec2 v_TexCoordinate; \n" // Interpolated texture coordinate per fragment.
+ "void main() \n" // Точка входа для фрагментного шейдера.
+ "{ \n"
+ " gl_FragColor = texture2D(u_Texture, v_TexCoordinate); \n" // Передаем значения цветов.
+ "}";
Binding:
mMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_mMatrix"); // = 0
vMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_vMatrix"); // = 2
pMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_pMatrix"); // = 3
Drawing:
GLES20.glUniformMatrix4fv(mMatrixHandle, 1, false, modelMatrix, 0);
GLES20.glUniformMatrix4fv(vMatrixHandle, 1, false, viewMatrix, 0);
GLES20.glUniformMatrix4fv(pMatrixHandle, 1, false, projectionMatrix, 0);
Actually my matrix multiplication order in shader was wrong, the correct order is:
gl_Position = u_pMatrix * u_mMatrix * u_vMatrix * a_Position;
so the correct shader source is:
private final String vertexShader =
"uniform mat4 u_mMatrix; \n" // Матрица модели
+ "uniform mat4 u_vMatrix; \n" // Матрица модели
+ "uniform mat4 u_pMatrix; \n" // Матрица модели
+ "attribute vec4 a_Position; \n" // Информация о положении вершин.
+ "attribute vec2 a_TexCoordinate; \n" // Per-vertex texture coordinate information we will pass in.
+ "varying vec2 v_TexCoordinate; \n" // This will be passed into the fragment shader.
+ "void main() \n" // Начало программы вершинного шейдера.
+ "{ \n"
+ " v_TexCoordinate = a_TexCoordinate; \n"
+ " gl_Position = u_pMatrix * u_mMatrix * u_vMatrix \n" // gl_Position специальные переменные используемые для хранения конечного положения.
+ " * a_Position; \n" // Умножаем вершины на матрицу для получения конечного положения
+ "} \n"; // в нормированных координатах экрана.
Related
I am implementing an android app with OpenGL and C++, I am rendering some triangles with different Y position, all of them are in the X-Z plane, the camera is Orthographic projection with near value 3 for example.
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
The orthographic projection is:
projectionMatrix = glm::ortho(-(width/2.f)/zoom, (width/2.f)/zoom, -(height/2.f)/zoom,(height/2.f)/zoom,near,far);
these values are
projectionMatrix = glm::ortho(-100, 100, -197,197,3,50);
sometimes I get the triangles seem to have Z-Fighting although they have different Y values, the camera looks at them from above.
vertexShader = "#version 300 es\n"
"precision highp float;\n"
"layout (location = 0) in vec3 position;\n"
"layout (location = 1) in vec3 normal;\n"
"layout (location = 2) in vec2 texCoord;\n"
"\n"
"uniform mat4 model;\n"
"uniform mat4 view;\n"
"uniform mat4 projection;\n"
"\n"
"out vec2 TexCoords;\n"
"out vec3 Normal;\n"
"out vec3 FragPos;\n"
"\n"
"void main(){\n"
" Normal = mat3(transpose(inverse(model))) * normal;\n"
" TexCoords = texCoord;\n"
" FragPos = vec3(model * vec4(position, 1.0));\n"
" gl_Position = projection * view * model * vec4(position, 1.0);\n"
"}";
fragmentShader = "#version 300 es\n"
"precision highp float;\n"
"\n"
"struct DirectLight{\n"
" vec3 Direction;"
" vec3 color;\n"
" float intensity;\n"
"};\n"
"out vec4 glFragColor;\n"
"\n"
"in vec2 TexCoords;\n"
"in vec3 Normal;\n"
"in vec3 FragPos;\n"
"\n"
"uniform int hasTexture;\n"
"uniform int hasSpecular;\n"
"uniform float alpha;\n"
"uniform vec3 ViewPos;\n"
"\n"
"uniform sampler2D map_diffuse;\n"
"uniform sampler2D map_specular;\n"
"\n"
"uniform vec3 diffuse;\n"
"uniform vec3 specular;\n"
"uniform float shininess;\n"
"uniform DirectLight Sun1;\n"
"vec3 calcLight(DirectLight light,vec3 diffColor,vec3 specColor){"
" vec3 normal = normalize(Normal);\n"
" vec3 lightDir = normalize(-light.Direction);\n"
" float diffFactor = max(dot(normal,lightDir), 0.0);\n"
" vec3 _diffuse = light.intensity * light.color * diffFactor * diffColor;\n"
" vec3 viewDir = normalize(ViewPos - FragPos);\n"
" vec3 halfwayDir = normalize(lightDir + viewDir);\n"
" float specFactor = pow(max(dot(normal, halfwayDir), 0.0), shininess);\n"
" vec3 _specular = specColor * light.intensity * light.color * specFactor;\n"
" return _diffuse + _specular ;\n"
"}"
"void main(){\n"
" vec4 diffColor;\n"
" vec4 specColor;\n"
" if(hasTexture == 1)\n"
" diffColor = texture(map_diffuse, TexCoords);\n"
" else\n"
" diffColor = vec4(diffuse,alpha);\n"
" if(hasSpecular == 1)\n"
" specColor = texture(map_specular, TexCoords);\n"
" else\n"
" specColor = vec4(specular,alpha);\n"
" vec4 _ambient = vec4(0.4,0.4,0.4,alpha) * diffColor;\n"
" vec4 result = vec4("
" calcLight(Sun1,diffColor.xyz,specColor.xyz),alpha)"
" + _ambient;"
" float gamma = 2.2;\n"
" result = pow(result, vec4(1.0/gamma));\n"
" glFragColor = result;\n"
"}";
This is my VertexShader:
public static final String vs_Face =
"uniform mat4 transMatrix;" +
"uniform mat4 scaleMatrix;" +
"uniform mat4 rotateZMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 a_texCoord;" +
"varying vec2 v_texCoord;" +
"void main() {" +
" gl_Position = transMatrix * rotateZMatrix *scaleMatrix * vec4(vPosition.xyz,1.0) ;" +
" v_texCoord = a_texCoord.xy;" +
"}";
This my FragmentShader:
public static final String fs_Face =
"precision mediump float;" +
"varying vec2 v_texCoord;" +
"uniform sampler2D s_texture;" +
"void main() {" +
" gl_FragColor = texture2D( s_texture, v_texCoord );" +
"}";
This is my Draw function
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE,GLES20.GL_ONE_MINUS_SRC_ALPHA);
//
GLES20.glUseProgram(mProgramMagic);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mGLCubeId[0]);
GLES20.glEnableVertexAttribArray(mPositionHandleFace);
GLES20.glVertexAttribPointer(mPositionHandleFace, 2, GLES20.GL_FLOAT, false, 0, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mMagicTextureCoordinateId[0]);
GLES20.glEnableVertexAttribArray ( mTexCoordLocFace );
GLES20.glVertexAttribPointer(mTexCoordLocFace, 2, GLES20.GL_FLOAT, false, 4 * 2, 0);
//control matrix
GLES20.glUniformMatrix4fv(mTransHandle,1,false,transMatrix,0);
GLES20.glUniformMatrix4fv(mScaleHandle,1,false,scaleMatrix,0);
GLES20.glUniformMatrix4fv(mMvpMatrixHandle,1,false,mvpMatrix,0);
GLES20.glUniformMatrix4fv(mRotateZHandle,1,false,rotateZMatrix,0);
GLES20.glUniformMatrix4fv(mTransZeroMatrixHandle, 1, false, mTransZeroMatrix, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,mTextures[0]);
if(i>=mTotalCount)
{
i=i-mTotalCount;
}
// update texture
if(mPathList.size()-1>=i&&mTextureMap.containsKey(mPathList.get(i)))
{
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D,0,0,0,mTextureMap.get(mPathList.get(i)));
}
GLES20.glUniform1i ( mSamplerLocFace, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandleFace);
GLES20.glDisableVertexAttribArray(mTexCoordLocFace);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0);
If I dont add the rotateZ matrix like this: gl_Position =vec4(vPosition.xyz,1.0) ,the texture can display properly. but after i add the rotateZMatrix,its shape will change.
This is the picture after i rotate 45 degrees,the rabbit ears and noise is the texture:
Picture
I don't know what i did wrong,I have tried multiple variations of this, but none of them seem to work.
I am doing yuv to rgb conversion using opengl shaders. But its only show green and pink colors. I am using ffmpeg to decode movie. I am beginner in this, so have no idea how to solve it. ffmpeg give me three buffers of yuv. I am directly assigning these buffers to three textures.
Here are shaders I am using.
static const char* VERTEX_SHADER =
"attribute vec4 vPosition; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 tc; \n"
"uniform mat4 u_mvpMat; \n"
"void main() \n"
"{ \n"
" gl_Position = u_mvpMat * vPosition; \n"
" tc = a_texCoord; \n"
"} \n";
static const char* YUV_FRAG_SHADER =
"#ifdef GL_ES \n"
"precision highp float; \n"
"#endif \n"
"varying vec2 tc; \n"
"uniform sampler2D TextureY; \n"
"uniform sampler2D TextureU; \n"
"uniform sampler2D TextureV; \n"
"uniform float imageWidth; \n"
"uniform float imageHeight; \n"
"void main(void) \n"
"{ \n"
"float nx, ny; \n"
"vec3 yuv; \n"
"vec3 rgb; \n"
"nx = tc.x; \n"
"ny = tc.y; \n"
"yuv.x = texture2D(TextureY, tc).r; \n"
"yuv.y = texture2D(TextureU, vec2(nx/2.0, ny/2.0)).r - 0.5; \n"
"yuv.z = texture2D(TextureV, vec2(nx/2.0, ny/2.0)).r - 0.5; \n"
// Using BT.709 which is the standard for HDTV
"rgb = mat3( 1, 1, 1, \n"
"0, -0.18732, 1.8556, \n"
"1.57481, -0.46813, 0)*yuv;\n"
// BT.601, which is the standard for SDTV is provided as a reference
//"rgb = mat3( 1, 1, 1, \n"
// "0, -0.34413, 1.772, \n"
// "1.402, -0.71414, 0) * yuv; \n"
"gl_FragColor = vec4(rgb, 1.0); \n"
"} \n";
Output:
What wrong am I doing? Please help me out with this.
Thank You.
UPDATE:
While debugging ffmpeg decoding, I found that ffmpeg decoder give PIX_FMT_YUV420P output format. Do I have to make some tweaks to get correct image colors?
I'm not sure about this transformation:
"rgb = mat3( 1, 1, 1, \n"
"0, -0.18732, 1.8556, \n"
"1.57481, -0.46813, 0)*yuv;\n"
In refreshing my memory on matrix * vector operations in GLSL using this page as reference, I think you either need to transpose the coefficient matrix, or move yuv to the front of the operation, i.e., yuv * mat3(...). Performing the operation as mat3(...) * yuv means:
r = y * 1 + u * 1 + v * 1
g = y * 0 + u * -0.18732 + v * 1.8556
b = y * 1.57481 + u * -0.46813 + v * 0
And these conversions are very incorrect.
As another reference, here's a small, complete, sample GLSL shader program that converts YUV -> RGB that may be of some guidance: http://www.fourcc.org/source/YUV420P-OpenGL-GLSLang.c
How can I pass a float value to fragment shader ?
This is my code on android:
int aUseTexture = GLES20.glGetAttribLocation(program, "uUseTexture");
GLES20.glUniform1f(aUseTexture, 1.0f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
Here is my shader:
String verticesShader =
"uniform mat4 uScreen;\n" +
"attribute vec2 aPosition;\n" +
"attribute vec3 aColor;\n" +
"attribute vec2 aTexPos; \n" +
"varying vec2 vTexPos; \n" +
"varying vec3 vColor;\n" +
"void main() {\n" +
" vTexPos = aTexPos; \n" +
" gl_Position = uScreen * vec4(aPosition.xy, 0.0, 1.0);\n" +
" vColor = aColor;\n" +
"}";
// Our fragment shader. Just return vColor.
// If you look at this source and just said 'WTF?', remember
// that all the attributes are defined in the VERTEX shader and
// all the 'varying' vars are considered OUTPUT of vertex shader
// and INPUT of the fragment shader. Here we just use the color
// we received and add a alpha value of 1.
String fragmentShader =
"uniform float uUseTexture; \n" +
"uniform sampler2D uTexture;\n" +
"precision mediump float;\n"+
"varying vec2 vTexPos; \n" +
"varying vec3 vColor;\n" +
"void main(void)\n" +
"{\n" +
" if ( uUseTexture != 1.0 ) \n" +
" gl_FragColor = vec4(vColor.xyz, 1); \n" +
" else \n" +
" gl_FragColor = texture2D(uTexture, vTexPos); \n" +
//" gl_FragColor = vec4(vColor.xyz, 1);\n" +
"}";
You can see the if statement in the fragment shader , that is the one i tried to check if i pass in 1.0 it should do texture else use color.
You are probably using the wrong function call for "uniform variable". Try glGetUniformLocation() as follow:
int aUseTexture = GLES20.glGetUniformLocation(program, "uUseTexture");
Also, the floating point testing (uUseTexture != 1.0) may not be always reliable all the time. You may want to use an integer type.
As far as I know you have to pass the value through the vertex shader before it can get to the fragment shader. e.g. add "uniform float uUseTexture_in; \n" and "varying float uUseTexture; \n" at the top of the vertex shader, in the main function add "uUseTexture = uUseTexture_in;". And your shader should work
I'm trying to create 3D scene. There is textured Earth with clouds. I think that clouds I will be able to create by transparent sphere and texture of clouds on it(if I'm wrong please correct me). So I have some code below:
ShaderProgram shader;
Mesh mesh, cloudMesh;
Texture texture, cloudTexture;
Matrix4 matrix = new Matrix4();
public void create() {
String vertexShader = "attribute vec4 a_position; \n"
+ "attribute vec4 a_color;\n"
+ "attribute vec2 a_texCoords;\n"
+ "uniform mat4 u_worldView;\n"
+ "varying vec4 v_color;"
+ "varying vec2 v_texCoords;"
+ "void main() \n"
+ "{ \n"
+ " v_color = vec4(1, 1, 1, 1); \n"
+ " v_texCoords = a_texCoords; \n"
+ " gl_Position = u_worldView * a_position; \n"
+ "} \n";
String fragmentShader = "#ifdef GL_ES\n"
+ "precision mediump float;\n"
+ "#endif\n"
+ "varying vec4 v_color;\n"
+ "varying vec2 v_texCoords;\n"
+ "uniform sampler2D u_texture;\n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = v_color * texture2D(u_texture, v_texCoords);\n"
+ "}";
shader = new ShaderProgram(vertexShader, fragmentShader);
if (shader.isCompiled() == false) {
System.exit(0);
}
mesh = ObjLoader.loadObj(Gdx.files.internal("objects/earth.obj").read());
cloudMesh = ObjLoader.loadObj(Gdx.files.internal("objects/cloud_sphere.obj").read());
texture = new Texture(Gdx.files.internal("images/earthmap.jpg"));
cloudTexture = new Texture(Gdx.files.internal("images/earth_clouds_map.jpg"));
}
public void render() {
angle += Gdx.graphics.getDeltaTime() * 30;
matrix.setToRotation(axis, angle);
Gdx.graphics.getGL20().glViewport(-10, 10, Gdx.graphics.getWidth(),
Gdx.graphics.getHeight());
Gdx.graphics.getGL20().glClearColor(0.2f, 0.2f, 0.2f, 1);
Gdx.graphics.getGL20().glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.graphics.getGL20().glEnable(GL20.GL_TEXTURE_2D);
Gdx.graphics.getGL20().glEnable(GL20.GL_BLEND);
Gdx.graphics.getGL20().glBlendFunc(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
texture.bind();
shader.begin();
shader.setUniformMatrix("u_worldView", matrix);
shader.setUniformi("u_texture", 0);
mesh.render(shader, GL20.GL_TRIANGLE_STRIP);
shader.end();
//SHOULD I USE SHADER PROGRAM LIKE BELOW?
cloudTexture .bind();
shader.begin();
shader.setUniformMatrix("u_worldView", matrix);
shader.setUniformi("u_texture", 0);
cloudMesh.render(shader, GL20.GL_TRIANGLE_STRIP);
shader.end();
}
So I don't see any textures on spheres. Where is mistake? If someone have any ideas, links or examples it will be appreciated!
Before loading your textures you have to define a "slot" where your texture will be stored:
Gdx.graphics.getGL20().glActiveTexture(GL20.GL_TEXTURE0);
texture = new Texture(Gdx.files.internal("images/earthmap.jpg"));
texture.bind();
Gdx.graphics.getGL20().glActiveTexture(GL20.GL_TEXTURE1);
cloudTexture = new Texture(Gdx.files.internal("images/earth_clouds_map.jpg"));
cloudTexture.bind();
in your render-method you then bind the texture and assign it to the shader using these "slots"
shader.setUniformi("u_texture", 0);
...
...
shader.setUniformi("u_texture", 1);
that should be all...