I have written very simple program to get values from compute shader and render it directly on to the screen.
I am suspecting that the shader storage buffer is not being binded to my vbo after I call all necessary compute shader methods.
I am sharing the code please see if there are some errors. No compilation error I am getting and moreover the device I am using also supports gl 3.1 that I have checked.
And also in addition to all this do we have to mention anything in manifest?
This is my renderer code
public class MyRenderer implements Renderer{
Context context;
int ProgramId;
int ComputeShaderProgramId;
int aPositionLocation;
int aColorLocation;
int radiusLocation;
int gIndexBufferBinding;
float rad=0;
int[] vbo = new int[1];
int NUM_VERTS_H = 16;
int NUM_VERTS_V = 16;
int GROUP_SIZE_WIDTH = 8;
int GROUP_SIZE_HEIGHT = 8;
MyRenderer(Context context)
{
this.context = context;
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES31.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
String VetrexShaderSource = ShaderHelper.readFile(context, R.raw.vertexshader);
String FragmentShaderSource = ShaderHelper.readFile(context, R.raw.fragmentshader);
String ComputeShaderSource = ShaderHelper.readFile(context, R.raw.hellocompute);
int vertexShader = ShaderHelper.compileShader(VetrexShaderSource,GLES31.GL_VERTEX_SHADER);
int fragmentShader = ShaderHelper.compileShader(FragmentShaderSource,GLES31.GL_FRAGMENT_SHADER);
int computeShader = ShaderHelper.compileShader(ComputeShaderSource,GLES31.GL_COMPUTE_SHADER);
ProgramId = ShaderHelper.createShaderProgram(vertexShader, fragmentShader);
GLES31.glUseProgram(ProgramId);
aPositionLocation = GLES31.glGetAttribLocation(ProgramId, "a_Position");
aColorLocation = GLES31.glGetAttribLocation(ProgramId, "aColorCoordinate");
ComputeShaderProgramId = ShaderHelper.createComputeShaderProgram(computeShader);
GLES31.glUseProgram(ComputeShaderProgramId);
gIndexBufferBinding = 1;
GLES31.glGenBuffers(1, vbo,0);
}
#Override
public void onDrawFrame(GL10 gl) {
GLES31.glClear(GLES31.GL_COLOR_BUFFER_BIT|GLES31.GL_DEPTH_BUFFER_BIT);
GLES31.glUseProgram(ComputeShaderProgramId);
GLES31.glBindBufferBase(GLES31.GL_SHADER_STORAGE_BUFFER, gIndexBufferBinding, vbo[0]);
// GLES31.glDispatchCompute(
// (NUM_VERTS_H % GROUP_SIZE_WIDTH + NUM_VERTS_H) / GROUP_SIZE_WIDTH,
// (NUM_VERTS_V % GROUP_SIZE_HEIGHT + NUM_VERTS_V) / GROUP_SIZE_HEIGHT,
// 1);
GLES31.glDispatchCompute(2,1,1);
GLES31.glBindBufferBase(GLES31.GL_SHADER_STORAGE_BUFFER, gIndexBufferBinding, 0);
GLES31.glMemoryBarrier(GLES31.GL_SHADER_STORAGE_BARRIER_BIT);
System.out.println("error "+GLES31.glGetError());
// Bind VBO
GLES31.glBindBuffer( GLES31.GL_ARRAY_BUFFER, vbo[0] );
// Bind Vertex and Fragment rendering shaders
GLES31.glUseProgram(ProgramId);
GLES31.glEnableVertexAttribArray(aPositionLocation);
// Draw points from VBO
GLES31.glDrawArrays(GLES31.GL_POINTS, 0, 6);
}
}
this is my compute shader (hellocompute)
#version 310 es
layout (local_size_x = 3) in;
layout (std430, binding = 1) buffer Output
{
float data[];
} outBuffer;
void main()
{
uint ident = gl_GlobalInvocationID.x;
outBuffer.data[ident] = float(ident)*0.2;
memoryBarrierShared();
barrier();
}
this is my vertexshader
attribute float a_Position;
void main()
{
gl_PointSize = 15.0;
gl_Position = vec4(a_Position,0.0,0.0,1.0);
}
this is my fragment shader
precision mediump float;
void main()
{
//gl_FragColor = vColorCoordinate;
gl_FragColor = vec4(1.0,1.0,0.0,1.0);
}
I should see some yellow dots at x = 0.2, 0.4, 0.6, 0.8, 1.0
But only one dot is seen at x=0 since it is taking the default zero value for attrib a_Position.
New values should reflect when binding to vbo after dispatch call.
Related
I am new to OpenGL ES and I am trying to display a square in my android app but it is showing only black screen (no square) when I am running the app. Can anyone tell me where am I doing wrong and please solve the issue ?
MainActivity.java
public class MainActivity extends AppCompatActivity {
private GLSurfaceView myGLSurfaceView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
myGLSurfaceView = new GLSurfaceView(this);
// Check if the system supports OpenGL ES 2.0.
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final boolean supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2)
{
// Request an OpenGL ES 2.0 compatible context.
myGLSurfaceView.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
myGLSurfaceView.setRenderer(new MyRenderer());
}
else
{
// This is where you could create an OpenGL ES 1.x compatible
// renderer if you wanted to support both ES 1 and ES 2.
return;
}
setContentView(myGLSurfaceView);
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onResume() {
super.onResume();
}
}
MyRenderer.java
class MyRenderer implements GLSurfaceView.Renderer {
private Square mySq;
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
mySq = new Square();
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
#Override
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
mySq.draw();
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
Square.java
public class Square {
private FloatBuffer vertexBuffer;
private ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
// private final int vertexCount = squareCoords.length;
private final int vertexCount =4;
private final int vertexStride = Coords_per_vertex * 4;
static final int Coords_per_vertex = 3;
static float squareCoords[] = {
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
private short drawOrder[]={0,1,2,0,2,3};
private static final String vertexShaderCode = "attribute vec4 vPosition;" +
"void main() {" +
"gl_Position = vPosition;" +
"}";
private final String fragmentShaderCode = "precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
"gl_Color = vColor;" +
"}";
public Square(){
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = MyRenderer.loadShader(GLES20.GL_VERTEX_SHADER,vertexShaderCode);
int fragmentShader = MyRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
GLES20.glUseProgram(mProgram);
}
public void draw(){
MyRenderer myR = new MyRenderer();
mPositionHandle = GLES20.glGetAttribLocation(mProgram,"vPosition");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle,Coords_per_vertex,GLES20.GL_FLOAT,false,vertexStride,vertexBuffer);
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
GLES20.glUniform4fv(mColorHandle,1,color,0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES,0,vertexCount);
// Disable vertex array
// GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I want to draw polygon as shown in images and fill it with colors.
I have tried polygon using triangle but it will not help me.Anyone know please help me out.
OpenGLProjectRenderer.java
public class OpenGLProjectRenderer implements Renderer {
List<Float> points = new ArrayList<Float>();
private static final String TAG = "Renderer";
private static final int POSITION_COMPONENT_COUNT = 2;
private static final int BYTES_PER_FLOAT = 4;
private FloatBuffer vertexData = ByteBuffer
.allocateDirect(20000 * BYTES_PER_FLOAT)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
private Context context;
private int program;
private static final String A_POSITION = "a_Position";
private int aPositionLocation;
private static final String U_COLOR = "u_Color";
private int uColorLocation;
private HashMap<Integer, ArrayList<Float>> lines = new HashMap<Integer, ArrayList<Float>>();
int position = 0;
public OpenGLProjectRenderer(Context context) {
this.context = context;
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUniform4f(uColorLocation, 1.0f, 0.0f, 0.0f, 1.0f);
for (int p = 0; p < lines.size(); p++) {
vertexData.put(toFloatarray(lines.get(p)));
int vertices = (int) lines.get(p).size() / 2;
int b = vertices % 4 == 0 ? vertices-1 : vertices - 2;
Log.d(TAG,""+lines.size());
glDrawArrays(GLES20.GL_LINE_LOOP, 0, lines.size());
vertexData.clear();
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
String vertexShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_vertex_shader);
String fragmentShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_fragment_shader);
int vertexShader = ShaderHelper.compileVertexShader(vertexShaderSource);
int fragmentShader = ShaderHelper
.compileFragmentShader(fragmentShaderSource);
program = ShaderHelper.linkProgram(vertexShader, fragmentShader);
ShaderHelper.validateProgram(program);
glUseProgram(program);
uColorLocation = glGetUniformLocation(program, U_COLOR);
aPositionLocation = glGetAttribLocation(program, A_POSITION);
vertexData.position(0);
glVertexAttribPointer(aPositionLocation, POSITION_COMPONENT_COUNT,
GL_FLOAT, false, 0, vertexData);
glEnableVertexAttribArray(aPositionLocation);
}
ArrayList<Float> temp = new ArrayList<Float>();
public void handleTouchPress(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchPress", points.size() + "");
temp.add(normalizedX);
temp.add(normalizedY);
lines.put(position, temp);
}
public void handleTouchDrag(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchDrag", points.size() + "");
}
public float[] toFloatarray(List<Float> floatList) {
float[] floatArray = new float[floatList.size()];
int i = 0;
for (Float f : floatList) {
floatArray[i++] = (f != null ? f : Float.NaN);
}
return floatArray;
}
public void handleTouchUp(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchUp", points.size() + "");
position++;
}}
Using above code I am able to draw polygon using above code using GL_LINE_LOOP but not able to fill created polygon.
OpenGL ES 2.0 support drawing only triangles as a basic primitives. There are 3 ways of drawing polygons using triangles,
1) Triangles
2) Triangle Strips
3) Triangle Fan
In your case you can try triangle Fan to draw a polygon, provided you know point inside the plan.
Here is an example of drawing a circle.
I want to add another Texture from a PNG Image, it is more like a logo so I wanted it to be in center and in the background shows the GLSurfaceView with all its effects and models. Please see my nicely done illustrations.
---------------------------
| GLSurfaceView * |
| |
| |
| |
| * ------------- |
| | | |
| | PNG | |
| | Texture | * |
| * | | |
| ------------- |
| * |
| |
| |
| * * |
---------------------------
LEGEND: * = some good effects :D (pweew! pweew!)
I am trying to make a live wallpaper using WallpaperService which has a subclass that extends Engine class, that class has a subclass that extends GLSurfaceView, what I have so far is the GLSurfaceView and it is working great!
-Added- FlashWallpaperService
public class FlashWallpaperService extends WallpaperService {
#Override
public Engine onCreateEngine() {
return new GLEngine();
}
public class GLEngine extends Engine {
private FlashGLSurfaceView flashGLSurfaceView;
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
flashGLSurfaceView = new FlashGLSurfaceView(FlashWallpaperService.this);
}
#Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
flashGLSurfaceView.onResume();
} else {
flashGLSurfaceView.onPause();
}
}
#Override
public void onDestroy() {
super.onDestroy();
flashGLSurfaceView.onDestroy();
}
class FlashGLSurfaceView extends GLSurfaceView {
private ActivityManager activityManager;
private ConfigurationInfo configurationInfo;
private boolean supportsEs2;
public FlashGLSurfaceView(Context context) {
super(context);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
public FlashGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.setEGLContextClientVersion(2);
this.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
this.setZOrderOnTop(false);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
#Override
public SurfaceHolder getHolder() {
return getSurfaceHolder();
}
public void onDestroy() {
super.onDetachedFromWindow();
}
}
}
}
-Added- Renderer class
public class FlashSystemRenderer implements GLSurfaceView.Renderer {
public float ratio;
public int mvpMatrixHandle;
public int mvMatrixHandle = -1;
public int positionHandle;
public int normalHandle;
public int textureCoordinateHandle;
public int programHandle;
public int miscHandle;
public int sizeX = 35;
public int sizeY = 70;
public float mTime;
private GLSurfaceView mGlSurfaceView;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
private float[] mTemporaryMatrix = new float[16];
private int timeHandle;
private long mStartTime;
private int frames;
private long startTime;
private boolean mStart;
private long timePassed;
private float dt;
private long t_current;
private long t_prev;
private float dt_prev = 1;
private ValueAnimator animator;
private Bitmap mBitmap;
private FlashSystem mFlashSystem;
private Context context;
private int resolutionHandle;
private int mWidth;
private int mHeight;
private int timesRepeated;
private float delta;
private ExecutorService mExecutor = Executors.newSingleThreadExecutor();
public FlashSystemRenderer(GLSurfaceView glSurfaceView) {
mGlSurfaceView = glSurfaceView;
context = glSurfaceView.getContext();
}
#Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glFrontFace(GLES20.GL_CW);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 0.0f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 1.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_vert);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_frag);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[]{"a_Position", "a_TexCoordinate", "a_TileXY"});
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
#SuppressWarnings("UnnecessaryLocalVariable")
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
this.ratio = ratio;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
mStartTime = System.currentTimeMillis();
mExecutor.execute(new FlashsGenerator(this));
}
#Override
public void onDrawFrame(GL10 gl10) {
logFrame();
drawGl();
if (mFlashSystem != null) {
mFlashSystem.render();
}
}
private void drawGl() {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(programHandle);
// Set program handles
mvpMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mvMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVMatrix");
timeHandle = GLES20.glGetUniformLocation(programHandle, "u_Time");
resolutionHandle = GLES20.glGetUniformLocation(programHandle, "u_Resolution");
positionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
normalHandle = GLES20.glGetAttribLocation(programHandle, "a_Normal");
textureCoordinateHandle = GLES20.glGetAttribLocation(programHandle, "a_TexCoordinate");
miscHandle = GLES20.glGetAttribLocation(programHandle, "a_Misc");
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, 5f);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mvMatrixHandle, 1, false, mMVPMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in u_Time
GLES20.glUniform1f(timeHandle, (System.currentTimeMillis() - mStartTime) / 3500f);
// u_Resolution
GLES20.glUniform2f(resolutionHandle, mWidth, mHeight);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
}
public void logFrame() {
frames++;
timePassed = (System.nanoTime() - startTime) / 1_000_000;
if(timePassed >= 10_000) {
frames = 0;
startTime = System.nanoTime();
}
}
public void onTouchEvent() {
if (mStart) {
reset();
}
mStart = !mStart;
mStartTime = System.nanoTime();
}
private void reset() {
if (animator != null) {
animator.cancel();
}
mStartTime = 0;
dt = 0;
t_prev = 0;
}
public FlashSystem getFlashSystem() {
return mFlashSystem;
}
public void setFlashSystem(final FlashSystem flashSystem) {
mFlashSystem = flashSystem;
}
public void queue(Runnable runnable) {
mGlSurfaceView.queueEvent(runnable);
}
}
-Added- flash_vert.glsl
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
uniform float u_Time;
uniform vec2 u_Resolution;
attribute vec4 a_Position; //initial
attribute vec2 a_TexCoordinate;
attribute vec4 a_Misc; //initial
varying vec2 v_TexCoordinate;
varying float v_Radius;
#define RADIUS 3.5
float rand( vec2 co )
{
return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void rotate( in float angle, inout vec2 vector )
{
mat2 rotationMatrix = mat2( cos( angle ), -sin( angle ),
sin( angle ), cos( angle ));
vector *= rotationMatrix;
}
void main()
{
// Transform the vertex into eye space.
//v_Position = vec3(u_MVMatrix * a_Position);
float aspect = u_Resolution.x / u_Resolution.y;
// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;
vec2 centerPos = a_Position.xy;
float f = mix(1.0, a_Misc.t, u_Time);
centerPos *= mod(f, RADIUS);
float size = a_Misc.s;
size = mix(0.0, size, mod(f, RADIUS)/RADIUS);
vec2 relativePos = vec2(
(a_TexCoordinate.s - 0.5) * 2.0 * size,
(a_TexCoordinate.t - 0.5) * 2.0 * size
);
vec2 v = vec2(0.0, 1.0);
vec4 pos = vec4(
relativePos + centerPos,
0.0,
1.0
);
gl_Position = u_MVPMatrix * pos;
v_Radius = size * 2.5;
}
-Added- flash_frag.glsl
precision mediump float;
uniform sampler2D uTexture;
varying vec2 vTexPosition;
void main() {
gl_FragColor = texture2D(uTexture, vTexPosition);
}
How can I add the PNG texture from my existing renderer to render them together given that the PNG texture having transparency/alpha is in the top of all other objects? All of what I did find from searching did not work, some does but it show the PNG texture only
I' ve been working on Google Cardboard for some research.
I faced some problem when I was trying to open camera on Google Cardboard. The problem is aspect ratio. When I was looking the camera with Google Cardboard, it looks like more thinner then reality. As you can see the images below...
Normal image:
Cardboard image:
And these are the codes.
CardboardOverlayView.java
public class CardboardOverlayView extends LinearLayout {
private static final String TAG = CardboardOverlayView.class.getSimpleName();
private final CardboardOverlayEyeView mLeftView;
private final CardboardOverlayEyeView mRightView;
private AlphaAnimation mTextFadeAnimation;
public CardboardOverlayView(Context context, AttributeSet attrs) {
super(context, attrs);
setOrientation(HORIZONTAL);
LayoutParams params = new LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1.0f);
params.setMargins(0, 0, 0, 0);
mLeftView = new CardboardOverlayEyeView(context, attrs);
mLeftView.setLayoutParams(params);
addView(mLeftView);
mRightView = new CardboardOverlayEyeView(context, attrs);
mRightView.setLayoutParams(params);
addView(mRightView);
// Set some reasonable defaults.
setDepthOffset(0.016f);
setColor(Color.rgb(150, 255, 180));
setVisibility(View.VISIBLE);
mTextFadeAnimation = new AlphaAnimation(1.0f, 0.0f);
mTextFadeAnimation.setDuration(5000);
}
public void show3DToast(String message) {
setText(message);
setTextAlpha(1f);
mTextFadeAnimation.setAnimationListener(new EndAnimationListener() {
#Override
public void onAnimationEnd(Animation animation) {
setTextAlpha(0f);
}
});
startAnimation(mTextFadeAnimation);
}
private abstract class EndAnimationListener implements Animation.AnimationListener {
#Override public void onAnimationRepeat(Animation animation) {}
#Override public void onAnimationStart(Animation animation) {}
}
private void setDepthOffset(float offset) {
mLeftView.setOffset(offset);
mRightView.setOffset(-offset);
}
private void setText(String text) {
mLeftView.setText(text);
mRightView.setText(text);
}
private void setTextAlpha(float alpha) {
mLeftView.setTextViewAlpha(alpha);
mRightView.setTextViewAlpha(alpha);
}
private void setColor(int color) {
mLeftView.setColor(color);
mRightView.setColor(color);
}
/**
* A simple view group containing some horizontally centered text underneath a horizontally
* centered image.
*
* This is a helper class for CardboardOverlayView.
*/
private class CardboardOverlayEyeView extends ViewGroup {
private final ImageView imageView;
private final TextView textView;
private float offset;
public CardboardOverlayEyeView(Context context, AttributeSet attrs) {
super(context, attrs);
imageView = new ImageView(context, attrs);
imageView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
imageView.setAdjustViewBounds(true); // Preserve aspect ratio.
addView(imageView);
textView = new TextView(context, attrs);
textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14.0f);
textView.setTypeface(textView.getTypeface(), Typeface.BOLD);
textView.setGravity(Gravity.CENTER);
textView.setShadowLayer(3.0f, 0.0f, 0.0f, Color.DKGRAY);
addView(textView);
}
public void setColor(int color) {
imageView.setColorFilter(color);
textView.setTextColor(color);
}
public void setText(String text) {
textView.setText(text);
}
public void setTextViewAlpha(float alpha) {
textView.setAlpha(alpha);
}
public void setOffset(float offset) {
this.offset = offset;
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
// Width and height of this ViewGroup.
final int width = right - left;
final int height = bottom - top;
// The size of the image, given as a fraction of the dimension as a ViewGroup. We multiply
// both width and heading with this number to compute the image's bounding box. Inside the
// box, the image is the horizontally and vertically centered.
final float imageSize = 0.12f;
// The fraction of this ViewGroup's height by which we shift the image off the ViewGroup's
// center. Positive values shift downwards, negative values shift upwards.
final float verticalImageOffset = -0.07f;
// Vertical position of the text, specified in fractions of this ViewGroup's height.
final float verticalTextPos = 0.52f;
// Layout ImageView
float imageMargin = (1.0f - imageSize) / 2.0f;
float leftMargin = (int) (width * (imageMargin + offset));
float topMargin = (int) (height * (imageMargin + verticalImageOffset));
imageView.layout(
(int) leftMargin, (int) topMargin,
(int) (leftMargin + width * imageSize), (int) (topMargin + height * imageSize));
// Layout TextView
leftMargin = offset * width;
topMargin = height * verticalTextPos;
textView.layout(
(int) leftMargin, (int) topMargin,
(int) (leftMargin + width), (int) (topMargin + height * (1.0f - verticalTextPos)));
}
}
}
MainActivity.java
public class MainActivity extends CardboardActivity implements CardboardView.StereoRenderer, OnFrameAvailableListener {
private static final String TAG = "VRCamMtMMainAc";
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private Camera camera;
private final String vertexShaderCode =
"attribute vec4 position;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{" +
"gl_Position = position;" +
"textureCoordinate = inputTextureCoordinate;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"varying vec2 textureCoordinate; \n" +
"uniform samplerExternalOES s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
//" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer, vertexBuffer2;
private ShortBuffer drawListBuffer, buf2;
private int mProgram;
private int mPositionHandle, mPositionHandle2;
private int mColorHandle;
private int mTextureCoordHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float squareVertices[] = { // in counterclockwise order:
-1.0f, -1.0f, // 0.left - mid
1.0f, -1.0f, // 1. right - mid
-1.0f, 1.0f, // 2. left - top
1.0f, 1.0f, // 3. right - top
};
private short drawOrder[] = {0, 2, 1, 1, 2, 3}; // order to draw vertices
private short drawOrder2[] = {2, 0, 3, 3, 0, 1}; // order to draw vertices
static float textureVertices[] = {
0.0f, 1.0f, // A. left-bottom
1.0f, 1.0f, // B. right-bottom
0.0f, 0.0f, // C. left-top
1.0f, 0.0f // D. right-top
};
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private ByteBuffer indexBuffer; // Buffer for index-array
private int texture;
private CardboardOverlayView mOverlayView;
private CardboardView cardboardView;
private SurfaceTexture surface;
private float[] mView;
private float[] mCamera;
public void startCamera(int texture) {
surface = new SurfaceTexture(texture);
surface.setOnFrameAvailableListener(this);
camera = Camera.open();
Camera.Parameters params = camera.getParameters();
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
// Check what resolutions are supported by your camera
/*List<Camera.Size> sizes = params.getSupportedPictureSizes();
// Iterate through all available resolutions and choose one.
// The chosen resolution will be stored in mSize.
Camera.Size mSize = null;
for (Camera.Size size : sizes) {
Log.i(TAG, "Available resolution: " + size.width + "x" + size.height);
mSize = size;
}
params.setPictureSize(5312,2988);*/
camera.setParameters(params);
try {
camera.setPreviewTexture(surface);
camera.startPreview();
} catch (IOException ioe)
{
Log.i(TAG, "CAM LAUNCH FAILED");
}
}
static private int createTexture() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
private int loadGLShader(int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
/**
* Checks if we've had an error inside of OpenGL ES, and if so what that error is.
*
* #param func
*/
private static void checkGLError(String func) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, func + ": glError " + error);
throw new RuntimeException(func + ": glError " + error);
}
}
/**
* Sets the view to our CardboardView and initializes the transformation matrices we will use
* to render our scene.
*
* #param savedInstanceState
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
cardboardView.setRenderer(this);
setCardboardView(cardboardView);
mCamera = new float[16];
mView = new float[16];
mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
//mOverlayView.show3DToast("Pull the magnet when you find an object.");
}
#Override
public void onRendererShutdown() {
Log.i(TAG, "onRendererShutdown");
}
#Override
public void onSurfaceChanged(int width, int height) {
Log.i(TAG, "onSurfaceChanged");
}
/**
* Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
* arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
*
* #param config The EGL configuration used when creating the surface.
*/
#Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
texture = createTexture();
startCamera(texture);
}
/**
* Prepares OpenGL ES before we draw a frame.
*
* #param headTransform The head transformation in the new frame.
*/
#Override
public void onNewFrame(HeadTransform headTransform) {
float[] mtx = new float[16];
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surface.updateTexImage();
surface.getTransformMatrix(mtx);
}
#Override
public void onFrameAvailable(SurfaceTexture arg0) {
this.cardboardView.requestRender();
}
/**
* Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
* a parameter.
*
* #param transform The transformations to apply to render this eye.
*/
#Override
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);
}
#Override
public void onFinishFrame(Viewport viewport) {
}
#Override
public void onCardboardTrigger() {
}
}
activity_main.xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/ui_layout"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<com.google.vrtoolkit.cardboard.CardboardView
android:id="#+id/cardboard_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentTop="true"
android:layout_alignParentLeft="true" />
<com.oculab.mehmettaha.vrcamera.CardboardOverlayView
android:id="#+id/overlay"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentLeft="true"
android:layout_alignParentTop="true" />
</RelativeLayout>
Hence how can I handle this problem. I' ve tried to change camera resolution, but it didn' t work.
Cheers!
The problem is the camera preview size. Its expecting to be at or near full screen size but you're trying to make it half the width with the same height. After trying this I found the aspect much improved: https://stackoverflow.com/a/19592492/922339. It set the optimal preview size to 352 by 288 on my Nexus 5
You probably want the camera views to fill the screen, and don't mind if the preview is cropped on left and right. So, when you set the texture coordinates (textureVeryices), use left=0.25 and right=0.75.
I'm beginning simple 2d gles 2.0 android application.
For some strange reason I always get one point in the center of the screen instead of vertex coordinates passed to the shader.
I'm clearly doing something wrong. Can't figure what.
P.S. I'm not using any projection matrices, because i need standard quad for drawing. tried projection - did not help.
public class TestActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GLSurfaceView glv = new GLSurfaceView(this);
glv.setEGLContextClientVersion(2);
SimpleRenderer renderer = new SimpleRenderer(this);
glv.setRenderer(renderer);
glv.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
setContentView(glv);
}
}
public class SimpleRenderer implements GLSurfaceView.Renderer {
private final float[] squareVertices = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private FloatBuffer squareBuffer;
private final Context context;
private int text_program;
private int aPositionLocation2;
public SimpleRenderer(Context context) {
this.context = context;
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).asFloatBuffer();
squareBuffer.put(squareVertices).position(0);
}
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(text_program);
glEnableVertexAttribArray(aPositionLocation2);
glVertexAttribPointer(aPositionLocation2, 2, GL_FLOAT, false, 0, squareBuffer);
glDrawArrays(GL_POINTS, 0, 4);
glDisableVertexAttribArray(aPositionLocation2);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
glViewport(0, 0, width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
text_program = ShaderHelper.buildProgram(TextResourceReader.readTextFileFromResource(context, R.raw.texture_vertex_shader),
TextResourceReader.readTextFileFromResource(context, R.raw.texture_fragment_shader));
aPositionLocation2 = glGetAttribLocation(text_program, "a_Position");
glClearColor(0f, 0f, 0f, 0f);
}
}
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
public static int compileVertexShader(String shaderCode) {
return compileShader(GL_VERTEX_SHADER, shaderCode);
}
public static int compileFragmentShader(String shaderCode) {
return compileShader(GL_FRAGMENT_SHADER, shaderCode);
}
private static int compileShader(int type, String shaderCode) {
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) Log.w(TAG, "Shader not created!");
glShaderSource(shaderObjectId, shaderCode);
glCompileShader(shaderObjectId);
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS, compileStatus, 0);
Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode + "\n:"
+ glGetShaderInfoLog(shaderObjectId));
if (compileStatus[0] == 0) {
// If it failed, delete the shader object.
glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
return shaderObjectId;
}
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "Could not create new program");
return 0;
}
glAttachShader(programObjectId, vertexShaderId);
glAttachShader(programObjectId, fragmentShaderId);
glLinkProgram(programObjectId);
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS, linkStatus, 0);
Log.v(TAG, "Results of linking program:\n"
+ glGetProgramInfoLog(programObjectId));
if (linkStatus[0] == 0) {
// If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
return programObjectId;
}
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS, validateStatus, 0);
Log.v(TAG, "Results of validating program: " + validateStatus[0]
+ "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
validateProgram(program);
return program;
}
}
vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
gl_PointSize = 10.0;
}
fragment shader:
void main()
{
gl_FragColor = vec4(1.0,1.0,1.0,1.0);
}
The problem was indeed in passing vertex data: OpenGL uses native byte order (little-endian for x86 emu), but I have allocated buffer in java (big-endian I suppose) so corrupted float values were passed to vertex shader. After specifying byte order in byte buffer everything works fine.
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();