How to fill polygon in android using opengl-es 2.0? - android

I want to draw polygon as shown in images and fill it with colors.
I have tried polygon using triangle but it will not help me.Anyone know please help me out.
OpenGLProjectRenderer.java
public class OpenGLProjectRenderer implements Renderer {
List<Float> points = new ArrayList<Float>();
private static final String TAG = "Renderer";
private static final int POSITION_COMPONENT_COUNT = 2;
private static final int BYTES_PER_FLOAT = 4;
private FloatBuffer vertexData = ByteBuffer
.allocateDirect(20000 * BYTES_PER_FLOAT)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
private Context context;
private int program;
private static final String A_POSITION = "a_Position";
private int aPositionLocation;
private static final String U_COLOR = "u_Color";
private int uColorLocation;
private HashMap<Integer, ArrayList<Float>> lines = new HashMap<Integer, ArrayList<Float>>();
int position = 0;
public OpenGLProjectRenderer(Context context) {
this.context = context;
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUniform4f(uColorLocation, 1.0f, 0.0f, 0.0f, 1.0f);
for (int p = 0; p < lines.size(); p++) {
vertexData.put(toFloatarray(lines.get(p)));
int vertices = (int) lines.get(p).size() / 2;
int b = vertices % 4 == 0 ? vertices-1 : vertices - 2;
Log.d(TAG,""+lines.size());
glDrawArrays(GLES20.GL_LINE_LOOP, 0, lines.size());
vertexData.clear();
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
String vertexShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_vertex_shader);
String fragmentShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_fragment_shader);
int vertexShader = ShaderHelper.compileVertexShader(vertexShaderSource);
int fragmentShader = ShaderHelper
.compileFragmentShader(fragmentShaderSource);
program = ShaderHelper.linkProgram(vertexShader, fragmentShader);
ShaderHelper.validateProgram(program);
glUseProgram(program);
uColorLocation = glGetUniformLocation(program, U_COLOR);
aPositionLocation = glGetAttribLocation(program, A_POSITION);
vertexData.position(0);
glVertexAttribPointer(aPositionLocation, POSITION_COMPONENT_COUNT,
GL_FLOAT, false, 0, vertexData);
glEnableVertexAttribArray(aPositionLocation);
}
ArrayList<Float> temp = new ArrayList<Float>();
public void handleTouchPress(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchPress", points.size() + "");
temp.add(normalizedX);
temp.add(normalizedY);
lines.put(position, temp);
}
public void handleTouchDrag(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchDrag", points.size() + "");
}
public float[] toFloatarray(List<Float> floatList) {
float[] floatArray = new float[floatList.size()];
int i = 0;
for (Float f : floatList) {
floatArray[i++] = (f != null ? f : Float.NaN);
}
return floatArray;
}
public void handleTouchUp(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchUp", points.size() + "");
position++;
}}
Using above code I am able to draw polygon using above code using GL_LINE_LOOP but not able to fill created polygon.

OpenGL ES 2.0 support drawing only triangles as a basic primitives. There are 3 ways of drawing polygons using triangles,
1) Triangles
2) Triangle Strips
3) Triangle Fan
In your case you can try triangle Fan to draw a polygon, provided you know point inside the plan.
Here is an example of drawing a circle.

Related

android opengl es 2 display after several rotate error

I am trying to draw lines connected to each other. Let's say they are creating a
rectangle. At each corner also I have points. When I have tested code in my
Samsung Galaxy Note 4 after several rotations, using touch, all rectangle ended up
with a single point at the center. I could not figure out why this was happening.
Code works fine in emulator. Also I am storing line and point data as array list
in my code. These array lists are used to create line and point object classes to
render.
public class GLRender implements GLSurfaceView.Renderer {
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
private float[] mVRMatrix = new float[16];
private static ArrayList<Line> DrLine = new ArrayList<Line>();
private static ArrayList<Point> DrPoint = new ArrayList<Point>();
private float ratio;
private float Joint1;
private float Joint2;
private float x1, y1, z1, x2, y2, z2;
private float px, py, pz, ps;
private float[] nearN = new float[4];
private static ArrayList<ArrayList<String>> LineArray = new ArrayList<ArrayList<String>>();
private ArrayList<String> L = new ArrayList<String>();
private ArrayList<String> P = new ArrayList<String>();
private static ArrayList<ArrayList<String>> PointArray = new ArrayList<ArrayList<String>>();
private static ArrayList<String> EmptyLine = new ArrayList<String>();
private static ArrayList<String> EmptyPoint = new ArrayList<String>();
private static ArrayList<ArrayList<ArrayList<String>>> UndoL = new ArrayList<ArrayList<ArrayList<String>>>();
private static ArrayList<ArrayList<ArrayList<String>>> UndoP = new ArrayList<ArrayList<ArrayList<String>>>();
public GLRender(Activity activity) {
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
//GLES20.glClearColor(0.53f, 0.53f, 0.53f, 1.0f);
GLES20.glClearColor(0.474f, 0.537f, 0.078f, 1.0f);
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Drawer();
Line XLine = new Line();
XLine.SetVerts(0.0f, 0.0f, 0.0f, 1.0f / 10, 0.0f, 0.0f);
XLine.SetColor(1.0f, 0.0f, 0.0f, 1.0f);
Line YLine = new Line();
YLine.SetVerts(0.0f, 0.0f, 0.0f, 0.0f, 1.0f / 10, 0.0f);
YLine.SetColor(0.0f, 1.0f, 0.0f, 1.0f);
Line ZLine = new Line();
ZLine.SetVerts(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f / 10);
ZLine.SetColor(0.0f, 0.0f, 1.0f, 1.0f);
Matrix.setIdentityM(mViewMatrix, 0);
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 1.0f , 0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.setIdentityM(mRotationMatrix, 0);
Matrix.rotateM(mRotationMatrix, 0, mXAngle, 0, 1f, 0);
Matrix.rotateM(mRotationMatrix, 0, mYAngle, 1f, 0, 0);
Matrix.setIdentityM(mVRMatrix, 0);
Matrix.multiplyMM(mVRMatrix, 0, mViewMatrix, 0, mRotationMatrix, 0);
Matrix.setIdentityM(mMVPMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mVRMatrix, 0);
Matrix.scaleM(mMVPMatrix,0,mScale,mScale,mScale);
if (DrLine.size() > 0) {
for (Line LineX : DrLine) {
LineX.draw(mMVPMatrix);
}
}
if (DrPoint.size() > 0) {
for (Point PointX : DrPoint) {
PointX.draw(mMVPMatrix);
}
}
XLine.draw(mMVPMatrix);
YLine.draw(mMVPMatrix);
ZLine.draw(mMVPMatrix);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
ratio = (float) width / height;
//Matrix.frustumM(mProjectionMatrix, 0, -ratio*10, ratio*10, -10, 10, 1, 9);
Matrix.orthoM(mProjectionMatrix, 0, -ratio, ratio, -1.0f, 1.0f, -5.0f, 5.0f);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private Float TempMaxPointHolder = 0.0f;
private Float MaxPointHolder = 0.0f;
public Float Scaler(){
Float TempScaler = 1.0f;
Iterator<ArrayList<String>> MaxPointFinder = PointArray.iterator();
while (MaxPointFinder.hasNext()){
ArrayList<String> MaxPoint = MaxPointFinder.next();
TempMaxPointHolder = Math.abs(Math.max(Math.max(Float.parseFloat(MaxPoint.get(1)),Float.parseFloat(MaxPoint.get(2))),Float.parseFloat(MaxPoint.get(3))));
if (TempMaxPointHolder > MaxPointHolder){
MaxPointHolder = TempMaxPointHolder;
}
}
TempScaler = 0.9f / MaxPointHolder;
return TempScaler;
}
public void Drawer(){
Float Scaler = Scaler();
Integer Lindex = 0;
ArrayList<Point> TempDrPoint = new ArrayList<Point>();
ArrayList<Line> TempDrLine = new ArrayList<Line>();
Iterator<ArrayList<String>> Litr = LineArray.iterator();
while (Litr.hasNext()) {
L = Litr.next();
Joint1 = Float.parseFloat(L.get(1));
Joint2 = Float.parseFloat(L.get(2));
Iterator<ArrayList<String>> PitrL = PointArray.iterator();
while (PitrL.hasNext()){
P = PitrL.next();
if (Float.parseFloat(P.get(0)) == Joint1) {
x1 = Float.parseFloat(P.get(1)) * Scaler;
y1 = Float.parseFloat(P.get(2)) * Scaler;
z1 = Float.parseFloat(P.get(3)) * Scaler;
}
if (Float.parseFloat(P.get(0)) == Joint2) {
x2 = Float.parseFloat(P.get(1)) * Scaler;
y2 = Float.parseFloat(P.get(2)) * Scaler;
z2 = Float.parseFloat(P.get(3)) * Scaler;
}
}
Line TempLine = new Line();
TempLine.SetVerts(x1, y1, z1, x2, y2, z2);
if (L.get(3) == "0") {
TempLine.SetColor(0.0f, 0.0f, 0.0f, 1.0f);
}
else if (L.get(3) == "1"){
TempLine.SetColor(0.66f, 0.73f, 0.21f, 1.0f);
}
TempDrLine.add(TempLine);
Lindex = Lindex + 1;
}
setDrLine(TempDrLine);
Integer Pindex = 0;
Iterator<ArrayList<String>> Pitr = PointArray.iterator();
while (Pitr.hasNext()){
P = Pitr.next();
px = Float.parseFloat(P.get(1)) * Scaler;
py = Float.parseFloat(P.get(2)) * Scaler;
pz = Float.parseFloat(P.get(3)) * Scaler;
ps = Float.parseFloat(P.get(4));
Point TempPoint = new Point();
TempPoint.SetPointVerts(px, py, pz);
if (ps == 0.0f) {
TempPoint.SetPointColor(0.65f, 0.37f, 0.11f, 1.0f);
}
else if (ps == 1.0f) {
TempPoint.SetPointColor(0.68f, 0.07f, 0.32f, 1.0f);
}
TempDrPoint.add(TempPoint);
Pindex = Pindex + 1;
}
setDrPoint(TempDrPoint);
}
public volatile float mXAngle;
public volatile float mYAngle;
public ArrayList<Line> getDrLine() {
return DrLine;
}
public ArrayList<Point> getDrPoint(){
return DrPoint;
}
public void setDrLine(ArrayList<Line> XDrLine) {
DrLine = XDrLine;
}
public void setDrPoint(ArrayList<Point> XDrPoint) {
DrPoint = XDrPoint;
}
public float getXAngle() {
return mXAngle;
}
public float getYAngle(){
return mYAngle;
}
public void setAngleX(float Xangle) {
mXAngle = Xangle;
}
public void setAngleY(float Yangle) {
mYAngle = Yangle;
}
public volatile float mScale = 1;
public volatile float mXFocus;
public volatile float mYFocus;
public void setZoom(float scale){
mScale = scale;
}
public void setFocus(float XFocus, float YFocus){
mXFocus = XFocus;
mYFocus = YFocus;
}
public float getmScale(){
return mScale;
}
public float getmXFocus(){
return mXFocus;
}
public float getmYFocus(){
return mYFocus;
}
public void setLineArray(ArrayList<ArrayList<String>> XLine){
LineArray = XLine;
}
public ArrayList<ArrayList<String>> getLineArray(){
return LineArray;
}
public void setUndoL(ArrayList<ArrayList<String>> UndoLine){
this.UndoL.add(new ArrayList<ArrayList<String>>(UndoLine));
}
public ArrayList<ArrayList<ArrayList<String>>> getUndoL() {
return UndoL;
}
public void setUndoP(ArrayList<ArrayList<String>> UndoPoint){
this.UndoP.add(new ArrayList<ArrayList<String>>(UndoPoint));
}
public ArrayList<ArrayList<ArrayList<String>>> getUndoP() {
return UndoP;
}
public void setPointArray(ArrayList<ArrayList<String>> XPoint){
PointArray = XPoint;
}
public ArrayList<ArrayList<String>> getPointArray(){
return PointArray;
}
gives error contents_sample_state: [ agr({[3 ,38]=11, [3 ,7 ,38]=28}) ]
public class Line {
private FloatBuffer VertexBuffer;
private final String VertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String FragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
protected int GlProgram;
protected int PositionHandle;
protected int ColorHandle;
protected int MVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float LineCoords[] = {
0.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f
};
private final int VertexCount = LineCoords.length / COORDS_PER_VERTEX;
private final int VertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Line() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
LineCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
VertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
int vertexShader = GLRender.loadShader(GLES20.GL_VERTEX_SHADER, VertexShaderCode);
int fragmentShader = GLRender.loadShader(GLES20.GL_FRAGMENT_SHADER, FragmentShaderCode);
GlProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(GlProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(GlProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(GlProgram); // creates OpenGL ES program executables
}
public void SetVerts(float v0, float v1, float v2, float v3, float v4, float v5) {
LineCoords[0] = v0;
LineCoords[1] = v1;
LineCoords[2] = v2;
LineCoords[3] = v3;
LineCoords[4] = v4;
LineCoords[5] = v5;
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
}
public void SetColor(float red, float green, float blue, float alpha) {
color[0] = red;
color[1] = green;
color[2] = blue;
color[3] = alpha;
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(GlProgram);
// get handle to vertex shader's vPosition member
PositionHandle = GLES20.glGetAttribLocation(GlProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(PositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(PositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
VertexStride, VertexBuffer);
// get handle to fragment shader's vColor member
ColorHandle = GLES20.glGetUniformLocation(GlProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(ColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
MVPMatrixHandle = GLES20.glGetUniformLocation(GlProgram, "uMVPMatrix");
//GLRender.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(MVPMatrixHandle, 1, false, mvpMatrix, 0);
//GLRender.checkGlError("glUniformMatrix4fv");
GLES20.glLineWidth(5);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_LINES, 0, VertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(PositionHandle);
}
}
public class GLSurface extends GLSurfaceView {
//public static GLRender xRender;
ScaleGestureDetector ScaleDetect;
MainActivity mMain;
OpenGL XOPL = mMain.xOpenGL;
public GLSurface(Context context, AttributeSet attrs){
super(context, attrs);
// Render the view only when there is a change in the drawing data
//setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
ScaleDetect = new ScaleGestureDetector(context, new ScaleDetectorListener());
}
private float mPreviousX;
private float mPreviousY;
float density = this.getResources().getDisplayMetrics().density;
private static final int MAX_CLICK_DURATION = 300;
private long pressStartTime;
#Override
public boolean onTouchEvent(MotionEvent e) {
ScaleDetect.onTouchEvent(e);
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
if(!ScaleDetect.isInProgress()) {
float xMoveRange = x - mPreviousX;
float yMoveRange = y - mPreviousY;
float dx = 0.0f;
float dy = 0.0f;
float w = this.getWidth();
float h = this.getHeight();
if (Math.abs(xMoveRange) > w / 100 && Math.abs(yMoveRange) < h / 100){
dx = xMoveRange / density / 2.0f;
XOPL.xRender.setAngleX(XOPL.xRender.getXAngle() + dx);
}
if (Math.abs(xMoveRange) < w / 100 && Math.abs(yMoveRange) > h / 100){
dy = yMoveRange / density / 2.0f;
XOPL.xRender.setAngleY(XOPL.xRender.getYAngle() + dy);
}
XOPL.myGLView.requestRender();
}
break;
case MotionEvent.ACTION_DOWN:
pressStartTime = System.currentTimeMillis();
break;
case MotionEvent.ACTION_UP:
long pressDuration = System.currentTimeMillis() - pressStartTime;
if (pressDuration < MAX_CLICK_DURATION) {
float Nx = (x - Float.parseFloat(Double.toString(this.getWidth())) * 0.5f) / (Float.parseFloat(Double.toString(this.getWidth())) * 0.5f);
float Ny = (y - Float.parseFloat(Double.toString(this.getHeight())) * 0.5f) / (Float.parseFloat(Double.toString(this.getHeight())) * 0.5f);
float Nz = 1.0f;
XOPL.xRender.setNCoordinate(Nx, Ny, Nz);
XOPL.xRender.Selection();
XOPL.myGLView.requestRender();
}
break;
}
mPreviousX = x;
mPreviousY = y;
return true;
}
private float sizeCoef = 1;
public class ScaleDetectorListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
float scaleFocusX = 0;
float scaleFocusY = 0;
public boolean onScale(ScaleGestureDetector arg0) {
float scale = arg0.getScaleFactor() * sizeCoef;
sizeCoef = scale;
XOPL.xRender.setZoom(sizeCoef);
XOPL.myGLView.requestRender();
return true;
}
public boolean onScaleBegin(ScaleGestureDetector arg0) {
invalidate();
scaleFocusX = arg0.getFocusX();
scaleFocusY = arg0.getFocusY();
XOPL.xRender.setFocus(scaleFocusX,scaleFocusY);
return true;
}
public void onScaleEnd(ScaleGestureDetector arg0) {
scaleFocusX = 0;
scaleFocusY = 0;
XOPL.xRender.setFocus(scaleFocusX,scaleFocusY);
}
}
}
Not sure if this is the problem, but watch out for things like:
XOPL.xRender.setAngleX(XOPL.xRender.getXAngle() + dx)
... especially when using mediump precision in shaders. Generally I'd recommend changing setAngle functions to exploit rotational symmetry and wrap the value around so you get an absolute range used at the API-level of of +-Pi.
In your current code if the user keeps swiping in one direction, eventually you'll run our of bits and everything will either stop rotating (at best), or fail with infinities or NaN results (at worst).
Note using "-Pi to +Pi" is preferred to using "0 to +2Pi", because the sign-bit is free in most floating point representations so preserves more dynamic precision.
Example code:
public float wrapRadians(float angle) {
// If angle is negative ensure value is higher than minus pi
if (angle < 0) {
while (angle < -math.PI) {
angle += 2 * math.PI;
}
// Else angle is positive so ensure value is less than pi
} else {
while (angle > math.PI) {
angle -= 2 * math.PI;
}
}
return angle;
}
public float setXAngle(float XAngle) {
mXAngle = wrapRadians(XAngle);
}
public float setYAngle(float YAngle) {
mYAngle = wrapRadians(YAngle);
}

Android OpenGL ES 2.0 - Texture black

I am using OpenGL|ES 2.0 to create a simple rectangle. I am not able to get the textures working.My Renderer & other class files as below.
TextureRenderer.java
public class TextureRenderer implements Renderer {
private final Context context;
private final float[] projectionMatrix = new float[16];
private final float[] modelMatrix = new float[16];
private Table table;
private Mallet mallet;
private TextureShaderProgram textureProgram;
private ColorShaderProgram colorProgram;
private int texture;
public TextureRenderer(Context context) {
this.context = context;
}
#Override
public void onDrawFrame(GL10 gl) {
// TODO Auto-generated method stub
// Clear the rendering surface.
glClear(GL_COLOR_BUFFER_BIT);
// Draw the table.
textureProgram.useProgram();
textureProgram.setUniforms(projectionMatrix, texture);
table.bindData(textureProgram);
table.draw();
// Draw the mallets.
colorProgram.useProgram();
colorProgram.setUniforms(projectionMatrix);
mallet.bindData(colorProgram);
mallet.draw();
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
// Set the OpenGL viewport to fill the entire surface.
glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// TODO Auto-generated method stub
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
table = new Table();
mallet = new Mallet();
textureProgram = new TextureShaderProgram(context);
colorProgram = new ColorShaderProgram(context);
texture = TextureHelper.loadTexture(context, R.drawable.air_hockey_surface);
Log.d("TAG",""+texture);
}}
Table.java
public class Table {
private static final int POSITION_COMPONENT_COUNT = 2;
private static final int TEXTURE_COORDINATES_COMPONENT_COUNT = 2;
private static final int STRIDE = (POSITION_COMPONENT_COUNT
+ TEXTURE_COORDINATES_COMPONENT_COUNT) * Constants.BYTES_PER_FLOAT;
private final VertexArray vertexArray;
private static final float[] VERTEX_DATA = {
// Order of coordinates: X, Y, S, T
// Triangle Fan
0f, 0f, 0.5f, 0.5f,
-0.5f, -0.8f, 0f, 0.9f,
0.5f, -0.8f, 1f, 0.9f,
0.5f, 0.8f, 1f, 0.1f,
-0.5f, 0.8f, 0f, 0.1f,
-0.5f, -0.8f, 0f, 0.9f };
public Table() {
vertexArray = new VertexArray(VERTEX_DATA);
}
public void bindData(TextureShaderProgram textureProgram) {
vertexArray.setVertexAttribPointer(0,
textureProgram.getPositionAttributeLocation(),
POSITION_COMPONENT_COUNT, STRIDE);
vertexArray.setVertexAttribPointer(POSITION_COMPONENT_COUNT,
textureProgram.getTextureCoordinatesAttributeLocation(),
TEXTURE_COORDINATES_COMPONENT_COUNT, STRIDE);
}
public void draw() {
glDrawArrays(GL_TRIANGLE_FAN, 0, 6);
}}
Mallet.java
public class Mallet {
private static final int POSITION_COMPONENT_COUNT = 2;
private static final int COLOR_COMPONENT_COUNT = 3;
private static final int STRIDE =
(POSITION_COMPONENT_COUNT + COLOR_COMPONENT_COUNT)
* Constants.BYTES_PER_FLOAT;
private static final float[] VERTEX_DATA = {
// Order of coordinates: X, Y, R, G, B
0f, -0.4f, 0f, 0f, 1f,
0f, 0.4f, 1f, 0f, 0f };
private final VertexArray vertexArray;
public Mallet() {
vertexArray = new VertexArray(VERTEX_DATA);
}
public void bindData(ColorShaderProgram colorProgram) {
vertexArray.setVertexAttribPointer(0,
colorProgram.getPositionAttributeLocation(),
POSITION_COMPONENT_COUNT, STRIDE);
vertexArray.setVertexAttribPointer(POSITION_COMPONENT_COUNT,
colorProgram.getColorAttributeLocation(),
COLOR_COMPONENT_COUNT, STRIDE);
}
public void draw() {
glDrawArrays(GL_POINTS, 0, 2);
}}
ShaderProgram.java
public class ShaderProgram {
// Uniform constants
protected static final String U_MATRIX = "u_Matrix";
protected static final String U_TEXTURE_UNIT = "u_TextureUnit";
// Attribute constants
protected static final String A_POSITION = "a_Position";
protected static final String A_COLOR = "a_Color";
protected static final String A_TEXTURE_COORDINATES = "a_TextureCoordinates";
// Shader program
protected final int program;
protected ShaderProgram(Context context, int vertexShaderResourceId,
int fragmentShaderResourceId) {
// Compile the shaders and link the program.
program = ShaderHelper.buildProgram(TextResourceReader
.readTextFileFromResource(context, vertexShaderResourceId),
TextResourceReader.readTextFileFromResource(context,
fragmentShaderResourceId));
}
public void useProgram() {
// Set the current OpenGL shader program to this program.
glUseProgram(program);
}}
ColorShaderProgram.java
public class ColorShaderProgram extends ShaderProgram {
// Uniform locations
private final int uMatrixLocation;
// Attribute locations
private final int aPositionLocation;
private final int aColorLocation;
public ColorShaderProgram(Context context) {
super(context, R.raw.simple_vertex_shader, R.raw.simple_fragment_shader);
// Retrieve uniform locations for the shader program.
uMatrixLocation = glGetUniformLocation(program, U_MATRIX);
// Retrieve attribute locations for the shader program.
aPositionLocation = glGetAttribLocation(program, A_POSITION);
aColorLocation = glGetAttribLocation(program, A_COLOR);
}
public void setUniforms(float[] matrix) {
// Pass the matrix into the shader program.
glUniformMatrix4fv(uMatrixLocation, 1, false, matrix, 0);
}
public int getPositionAttributeLocation() {
return aPositionLocation;
}
public int getColorAttributeLocation() {
return aColorLocation;
}}
TextureShaderProgram.java
public class TextureShaderProgram extends ShaderProgram {
// Uniform locations
private final int uMatrixLocation;
private final int uTextureUnitLocation;
// Attribute locations
private final int aPositionLocation;
private final int aTextureCoordinatesLocation;
public TextureShaderProgram(Context context) {
super(context, R.raw.texture_vertex_shader,
R.raw.texture_fragment_shader);
// Retrieve uniform locations for the shader program.
uMatrixLocation = glGetUniformLocation(program, U_MATRIX);
uTextureUnitLocation = glGetUniformLocation(program, U_TEXTURE_UNIT);
// Retrieve attribute locations for the shader program.
aPositionLocation = glGetAttribLocation(program, A_POSITION);
aTextureCoordinatesLocation = glGetAttribLocation(program,
A_TEXTURE_COORDINATES);
}
public void setUniforms(float[] matrix, int textureId) {
// Pass the matrix into the shader program.
glUniformMatrix4fv(uMatrixLocation, 1, false, matrix, 0);
// Set the active texture unit to texture unit 0.
glActiveTexture(GL_TEXTURE0);
// Bind the texture to this unit.
glBindTexture(GL_TEXTURE_2D, textureId);
// Tell the texture uniform sampler to use this texture in the shader by
// telling it to read from texture unit 0.
glUniform1i(uTextureUnitLocation, 0);
}
public int getPositionAttributeLocation() {
return aPositionLocation;
}
public int getTextureCoordinatesAttributeLocation() {
return aTextureCoordinatesLocation;
}}
TextureHelper.java
public class TextureHelper {
private static final String TAG = "TextureHelper";
public static int loadTexture(Context context, int resourceId) {
final int[] textureObjectIds = new int[1];
glGenTextures(1, textureObjectIds, 0);
if (textureObjectIds[0] == 0) {
if (LoggerConfig.ON) {
Log.w(TAG, "Could not generate a new OpenGL texture object.");
}
return 0;
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
final Bitmap bitmap = BitmapFactory.decodeResource(
context.getResources(), resourceId, options);
if (bitmap == null) {
if (LoggerConfig.ON) {
Log.w(TAG, "Resource ID " + resourceId
+ " could not be decoded.");
}
glDeleteTextures(1, textureObjectIds, 0);
return 0;
}
glBindTexture(GL_TEXTURE_2D, textureObjectIds[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
texImage2D(GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0); // unbind texture
return textureObjectIds[0];
}}
ShaderHelper.java
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
public static int compileVertexShader(String shaderCode) {
return CompileShader(GL_VERTEX_SHADER, shaderCode);
}
public static int compileFragmentShader(String shaderCode) {
return CompileShader(GL_FRAGMENT_SHADER, shaderCode);
}
private static int CompileShader(int type, String shaderCode) {
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) {
Log.w(TAG, "Could not create shader");
return 0;
}
// shader and associate it with shadercode
glShaderSource(shaderObjectId, shaderCode);
glCompileShader(shaderObjectId);
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS, compileStatus, 0);
Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode + "\n:"
+ glGetShaderInfoLog(shaderObjectId));
if (compileStatus[0] == 0) { // If it failed, delete the shader object.
// glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
return shaderObjectId;
}
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "couldn't craete prgram");
return 0;
}
glAttachShader(programObjectId, vertexShaderId);
glAttachShader(programObjectId, fragmentShaderId);
glLinkProgram(programObjectId);
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS, linkStatus, 0);
Log.v(TAG, "Results of linking program:\n"
+ glGetProgramInfoLog(programObjectId));
if (linkStatus[0] == 0) { // If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
return programObjectId;
}
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS, validateStatus, 0);
Log.v(TAG, "Results of validating program: " + validateStatus[0]
+ "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
if (LoggerConfig.ON) {
validateProgram(program);
}
return program;
}}
VertexArray.java
public class VertexArray {
private final FloatBuffer floatBuffer;
public VertexArray(float[] vertexData) {
floatBuffer = ByteBuffer
.allocateDirect(vertexData.length * Constants.BYTES_PER_FLOAT)
.order(ByteOrder.nativeOrder()).asFloatBuffer().put(vertexData);
}
public void setVertexAttribPointer(int dataOffset, int attributeLocation,
int componentCount, int stride) {
floatBuffer.position(dataOffset);
glVertexAttribPointer(attributeLocation, componentCount, GL_FLOAT,
false, stride, floatBuffer);
glEnableVertexAttribArray(attributeLocation);
floatBuffer.position(0);
}}
I am refering to this two websites Link1 & Link2. I just got black screen whenever I run my app. Image is also in power of 2 (512*512) but I am not able to find what is the problem in this code. Please help if anyone knows about it.

Add png texture in GLSurfaceView.Renderer

I want to add another Texture from a PNG Image, it is more like a logo so I wanted it to be in center and in the background shows the GLSurfaceView with all its effects and models. Please see my nicely done illustrations.
---------------------------
| GLSurfaceView * |
| |
| |
| |
| * ------------- |
| | | |
| | PNG | |
| | Texture | * |
| * | | |
| ------------- |
| * |
| |
| |
| * * |
---------------------------
LEGEND: * = some good effects :D (pweew! pweew!)
I am trying to make a live wallpaper using WallpaperService which has a subclass that extends Engine class, that class has a subclass that extends GLSurfaceView, what I have so far is the GLSurfaceView and it is working great!
-Added- FlashWallpaperService
public class FlashWallpaperService extends WallpaperService {
#Override
public Engine onCreateEngine() {
return new GLEngine();
}
public class GLEngine extends Engine {
private FlashGLSurfaceView flashGLSurfaceView;
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
flashGLSurfaceView = new FlashGLSurfaceView(FlashWallpaperService.this);
}
#Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
flashGLSurfaceView.onResume();
} else {
flashGLSurfaceView.onPause();
}
}
#Override
public void onDestroy() {
super.onDestroy();
flashGLSurfaceView.onDestroy();
}
class FlashGLSurfaceView extends GLSurfaceView {
private ActivityManager activityManager;
private ConfigurationInfo configurationInfo;
private boolean supportsEs2;
public FlashGLSurfaceView(Context context) {
super(context);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
public FlashGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.setEGLContextClientVersion(2);
this.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
this.setZOrderOnTop(false);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
#Override
public SurfaceHolder getHolder() {
return getSurfaceHolder();
}
public void onDestroy() {
super.onDetachedFromWindow();
}
}
}
}
-Added- Renderer class
public class FlashSystemRenderer implements GLSurfaceView.Renderer {
public float ratio;
public int mvpMatrixHandle;
public int mvMatrixHandle = -1;
public int positionHandle;
public int normalHandle;
public int textureCoordinateHandle;
public int programHandle;
public int miscHandle;
public int sizeX = 35;
public int sizeY = 70;
public float mTime;
private GLSurfaceView mGlSurfaceView;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
private float[] mTemporaryMatrix = new float[16];
private int timeHandle;
private long mStartTime;
private int frames;
private long startTime;
private boolean mStart;
private long timePassed;
private float dt;
private long t_current;
private long t_prev;
private float dt_prev = 1;
private ValueAnimator animator;
private Bitmap mBitmap;
private FlashSystem mFlashSystem;
private Context context;
private int resolutionHandle;
private int mWidth;
private int mHeight;
private int timesRepeated;
private float delta;
private ExecutorService mExecutor = Executors.newSingleThreadExecutor();
public FlashSystemRenderer(GLSurfaceView glSurfaceView) {
mGlSurfaceView = glSurfaceView;
context = glSurfaceView.getContext();
}
#Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glFrontFace(GLES20.GL_CW);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 0.0f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 1.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_vert);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_frag);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[]{"a_Position", "a_TexCoordinate", "a_TileXY"});
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
#SuppressWarnings("UnnecessaryLocalVariable")
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
this.ratio = ratio;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
mStartTime = System.currentTimeMillis();
mExecutor.execute(new FlashsGenerator(this));
}
#Override
public void onDrawFrame(GL10 gl10) {
logFrame();
drawGl();
if (mFlashSystem != null) {
mFlashSystem.render();
}
}
private void drawGl() {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(programHandle);
// Set program handles
mvpMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mvMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVMatrix");
timeHandle = GLES20.glGetUniformLocation(programHandle, "u_Time");
resolutionHandle = GLES20.glGetUniformLocation(programHandle, "u_Resolution");
positionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
normalHandle = GLES20.glGetAttribLocation(programHandle, "a_Normal");
textureCoordinateHandle = GLES20.glGetAttribLocation(programHandle, "a_TexCoordinate");
miscHandle = GLES20.glGetAttribLocation(programHandle, "a_Misc");
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, 5f);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mvMatrixHandle, 1, false, mMVPMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in u_Time
GLES20.glUniform1f(timeHandle, (System.currentTimeMillis() - mStartTime) / 3500f);
// u_Resolution
GLES20.glUniform2f(resolutionHandle, mWidth, mHeight);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
}
public void logFrame() {
frames++;
timePassed = (System.nanoTime() - startTime) / 1_000_000;
if(timePassed >= 10_000) {
frames = 0;
startTime = System.nanoTime();
}
}
public void onTouchEvent() {
if (mStart) {
reset();
}
mStart = !mStart;
mStartTime = System.nanoTime();
}
private void reset() {
if (animator != null) {
animator.cancel();
}
mStartTime = 0;
dt = 0;
t_prev = 0;
}
public FlashSystem getFlashSystem() {
return mFlashSystem;
}
public void setFlashSystem(final FlashSystem flashSystem) {
mFlashSystem = flashSystem;
}
public void queue(Runnable runnable) {
mGlSurfaceView.queueEvent(runnable);
}
}
-Added- flash_vert.glsl
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
uniform float u_Time;
uniform vec2 u_Resolution;
attribute vec4 a_Position; //initial
attribute vec2 a_TexCoordinate;
attribute vec4 a_Misc; //initial
varying vec2 v_TexCoordinate;
varying float v_Radius;
#define RADIUS 3.5
float rand( vec2 co )
{
return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void rotate( in float angle, inout vec2 vector )
{
mat2 rotationMatrix = mat2( cos( angle ), -sin( angle ),
sin( angle ), cos( angle ));
vector *= rotationMatrix;
}
void main()
{
// Transform the vertex into eye space.
//v_Position = vec3(u_MVMatrix * a_Position);
float aspect = u_Resolution.x / u_Resolution.y;
// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;
vec2 centerPos = a_Position.xy;
float f = mix(1.0, a_Misc.t, u_Time);
centerPos *= mod(f, RADIUS);
float size = a_Misc.s;
size = mix(0.0, size, mod(f, RADIUS)/RADIUS);
vec2 relativePos = vec2(
(a_TexCoordinate.s - 0.5) * 2.0 * size,
(a_TexCoordinate.t - 0.5) * 2.0 * size
);
vec2 v = vec2(0.0, 1.0);
vec4 pos = vec4(
relativePos + centerPos,
0.0,
1.0
);
gl_Position = u_MVPMatrix * pos;
v_Radius = size * 2.5;
}
-Added- flash_frag.glsl
precision mediump float;
uniform sampler2D uTexture;
varying vec2 vTexPosition;
void main() {
gl_FragColor = texture2D(uTexture, vTexPosition);
}
How can I add the PNG texture from my existing renderer to render them together given that the PNG texture having transparency/alpha is in the top of all other objects? All of what I did find from searching did not work, some does but it show the PNG texture only

Stuck with compute shader in Android 5.1.1 (Opengl 3.1)

I have written very simple program to get values from compute shader and render it directly on to the screen.
I am suspecting that the shader storage buffer is not being binded to my vbo after I call all necessary compute shader methods.
I am sharing the code please see if there are some errors. No compilation error I am getting and moreover the device I am using also supports gl 3.1 that I have checked.
And also in addition to all this do we have to mention anything in manifest?
This is my renderer code
public class MyRenderer implements Renderer{
Context context;
int ProgramId;
int ComputeShaderProgramId;
int aPositionLocation;
int aColorLocation;
int radiusLocation;
int gIndexBufferBinding;
float rad=0;
int[] vbo = new int[1];
int NUM_VERTS_H = 16;
int NUM_VERTS_V = 16;
int GROUP_SIZE_WIDTH = 8;
int GROUP_SIZE_HEIGHT = 8;
MyRenderer(Context context)
{
this.context = context;
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES31.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
String VetrexShaderSource = ShaderHelper.readFile(context, R.raw.vertexshader);
String FragmentShaderSource = ShaderHelper.readFile(context, R.raw.fragmentshader);
String ComputeShaderSource = ShaderHelper.readFile(context, R.raw.hellocompute);
int vertexShader = ShaderHelper.compileShader(VetrexShaderSource,GLES31.GL_VERTEX_SHADER);
int fragmentShader = ShaderHelper.compileShader(FragmentShaderSource,GLES31.GL_FRAGMENT_SHADER);
int computeShader = ShaderHelper.compileShader(ComputeShaderSource,GLES31.GL_COMPUTE_SHADER);
ProgramId = ShaderHelper.createShaderProgram(vertexShader, fragmentShader);
GLES31.glUseProgram(ProgramId);
aPositionLocation = GLES31.glGetAttribLocation(ProgramId, "a_Position");
aColorLocation = GLES31.glGetAttribLocation(ProgramId, "aColorCoordinate");
ComputeShaderProgramId = ShaderHelper.createComputeShaderProgram(computeShader);
GLES31.glUseProgram(ComputeShaderProgramId);
gIndexBufferBinding = 1;
GLES31.glGenBuffers(1, vbo,0);
}
#Override
public void onDrawFrame(GL10 gl) {
GLES31.glClear(GLES31.GL_COLOR_BUFFER_BIT|GLES31.GL_DEPTH_BUFFER_BIT);
GLES31.glUseProgram(ComputeShaderProgramId);
GLES31.glBindBufferBase(GLES31.GL_SHADER_STORAGE_BUFFER, gIndexBufferBinding, vbo[0]);
// GLES31.glDispatchCompute(
// (NUM_VERTS_H % GROUP_SIZE_WIDTH + NUM_VERTS_H) / GROUP_SIZE_WIDTH,
// (NUM_VERTS_V % GROUP_SIZE_HEIGHT + NUM_VERTS_V) / GROUP_SIZE_HEIGHT,
// 1);
GLES31.glDispatchCompute(2,1,1);
GLES31.glBindBufferBase(GLES31.GL_SHADER_STORAGE_BUFFER, gIndexBufferBinding, 0);
GLES31.glMemoryBarrier(GLES31.GL_SHADER_STORAGE_BARRIER_BIT);
System.out.println("error "+GLES31.glGetError());
// Bind VBO
GLES31.glBindBuffer( GLES31.GL_ARRAY_BUFFER, vbo[0] );
// Bind Vertex and Fragment rendering shaders
GLES31.glUseProgram(ProgramId);
GLES31.glEnableVertexAttribArray(aPositionLocation);
// Draw points from VBO
GLES31.glDrawArrays(GLES31.GL_POINTS, 0, 6);
}
}
this is my compute shader (hellocompute)
#version 310 es
layout (local_size_x = 3) in;
layout (std430, binding = 1) buffer Output
{
float data[];
} outBuffer;
void main()
{
uint ident = gl_GlobalInvocationID.x;
outBuffer.data[ident] = float(ident)*0.2;
memoryBarrierShared();
barrier();
}
this is my vertexshader
attribute float a_Position;
void main()
{
gl_PointSize = 15.0;
gl_Position = vec4(a_Position,0.0,0.0,1.0);
}
this is my fragment shader
precision mediump float;
void main()
{
//gl_FragColor = vColorCoordinate;
gl_FragColor = vec4(1.0,1.0,0.0,1.0);
}
I should see some yellow dots at x = 0.2, 0.4, 0.6, 0.8, 1.0
But only one dot is seen at x=0 since it is taking the default zero value for attrib a_Position.
New values should reflect when binding to vbo after dispatch call.

Error passing vertices to shader

I'm beginning simple 2d gles 2.0 android application.
For some strange reason I always get one point in the center of the screen instead of vertex coordinates passed to the shader.
I'm clearly doing something wrong. Can't figure what.
P.S. I'm not using any projection matrices, because i need standard quad for drawing. tried projection - did not help.
public class TestActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GLSurfaceView glv = new GLSurfaceView(this);
glv.setEGLContextClientVersion(2);
SimpleRenderer renderer = new SimpleRenderer(this);
glv.setRenderer(renderer);
glv.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
setContentView(glv);
}
}
public class SimpleRenderer implements GLSurfaceView.Renderer {
private final float[] squareVertices = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private FloatBuffer squareBuffer;
private final Context context;
private int text_program;
private int aPositionLocation2;
public SimpleRenderer(Context context) {
this.context = context;
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).asFloatBuffer();
squareBuffer.put(squareVertices).position(0);
}
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(text_program);
glEnableVertexAttribArray(aPositionLocation2);
glVertexAttribPointer(aPositionLocation2, 2, GL_FLOAT, false, 0, squareBuffer);
glDrawArrays(GL_POINTS, 0, 4);
glDisableVertexAttribArray(aPositionLocation2);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
glViewport(0, 0, width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
text_program = ShaderHelper.buildProgram(TextResourceReader.readTextFileFromResource(context, R.raw.texture_vertex_shader),
TextResourceReader.readTextFileFromResource(context, R.raw.texture_fragment_shader));
aPositionLocation2 = glGetAttribLocation(text_program, "a_Position");
glClearColor(0f, 0f, 0f, 0f);
}
}
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
public static int compileVertexShader(String shaderCode) {
return compileShader(GL_VERTEX_SHADER, shaderCode);
}
public static int compileFragmentShader(String shaderCode) {
return compileShader(GL_FRAGMENT_SHADER, shaderCode);
}
private static int compileShader(int type, String shaderCode) {
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) Log.w(TAG, "Shader not created!");
glShaderSource(shaderObjectId, shaderCode);
glCompileShader(shaderObjectId);
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS, compileStatus, 0);
Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode + "\n:"
+ glGetShaderInfoLog(shaderObjectId));
if (compileStatus[0] == 0) {
// If it failed, delete the shader object.
glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
return shaderObjectId;
}
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "Could not create new program");
return 0;
}
glAttachShader(programObjectId, vertexShaderId);
glAttachShader(programObjectId, fragmentShaderId);
glLinkProgram(programObjectId);
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS, linkStatus, 0);
Log.v(TAG, "Results of linking program:\n"
+ glGetProgramInfoLog(programObjectId));
if (linkStatus[0] == 0) {
// If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
return programObjectId;
}
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS, validateStatus, 0);
Log.v(TAG, "Results of validating program: " + validateStatus[0]
+ "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
validateProgram(program);
return program;
}
}
vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
gl_PointSize = 10.0;
}
fragment shader:
void main()
{
gl_FragColor = vec4(1.0,1.0,1.0,1.0);
}
The problem was indeed in passing vertex data: OpenGL uses native byte order (little-endian for x86 emu), but I have allocated buffer in java (big-endian I suppose) so corrupted float values were passed to vertex shader. After specifying byte order in byte buffer everything works fine.
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();

Categories

Resources