Add png texture in GLSurfaceView.Renderer - android

I want to add another Texture from a PNG Image, it is more like a logo so I wanted it to be in center and in the background shows the GLSurfaceView with all its effects and models. Please see my nicely done illustrations.
---------------------------
| GLSurfaceView * |
| |
| |
| |
| * ------------- |
| | | |
| | PNG | |
| | Texture | * |
| * | | |
| ------------- |
| * |
| |
| |
| * * |
---------------------------
LEGEND: * = some good effects :D (pweew! pweew!)
I am trying to make a live wallpaper using WallpaperService which has a subclass that extends Engine class, that class has a subclass that extends GLSurfaceView, what I have so far is the GLSurfaceView and it is working great!
-Added- FlashWallpaperService
public class FlashWallpaperService extends WallpaperService {
#Override
public Engine onCreateEngine() {
return new GLEngine();
}
public class GLEngine extends Engine {
private FlashGLSurfaceView flashGLSurfaceView;
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
flashGLSurfaceView = new FlashGLSurfaceView(FlashWallpaperService.this);
}
#Override
public void onVisibilityChanged(boolean visible) {
super.onVisibilityChanged(visible);
if (visible) {
flashGLSurfaceView.onResume();
} else {
flashGLSurfaceView.onPause();
}
}
#Override
public void onDestroy() {
super.onDestroy();
flashGLSurfaceView.onDestroy();
}
class FlashGLSurfaceView extends GLSurfaceView {
private ActivityManager activityManager;
private ConfigurationInfo configurationInfo;
private boolean supportsEs2;
public FlashGLSurfaceView(Context context) {
super(context);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
public FlashGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.setEGLContextClientVersion(2);
this.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
this.setZOrderOnTop(false);
if (!isInEditMode()) {
activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
configurationInfo = activityManager.getDeviceConfigurationInfo();
supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;
if (supportsEs2) {
// Request an OpenGL ES 2.0 compatible context.
this.setEGLContextClientVersion(2);
// Set the renderer to our demo renderer, defined below.
FlashSystemRenderer mRenderer = new FlashSystemRenderer(this);
this.setRenderer(mRenderer);
this.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
} else {
if (!isInEditMode()) throw new UnsupportedOperationException();
}
}
}
#Override
public SurfaceHolder getHolder() {
return getSurfaceHolder();
}
public void onDestroy() {
super.onDetachedFromWindow();
}
}
}
}
-Added- Renderer class
public class FlashSystemRenderer implements GLSurfaceView.Renderer {
public float ratio;
public int mvpMatrixHandle;
public int mvMatrixHandle = -1;
public int positionHandle;
public int normalHandle;
public int textureCoordinateHandle;
public int programHandle;
public int miscHandle;
public int sizeX = 35;
public int sizeY = 70;
public float mTime;
private GLSurfaceView mGlSurfaceView;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
private float[] mTemporaryMatrix = new float[16];
private int timeHandle;
private long mStartTime;
private int frames;
private long startTime;
private boolean mStart;
private long timePassed;
private float dt;
private long t_current;
private long t_prev;
private float dt_prev = 1;
private ValueAnimator animator;
private Bitmap mBitmap;
private FlashSystem mFlashSystem;
private Context context;
private int resolutionHandle;
private int mWidth;
private int mHeight;
private int timesRepeated;
private float delta;
private ExecutorService mExecutor = Executors.newSingleThreadExecutor();
public FlashSystemRenderer(GLSurfaceView glSurfaceView) {
mGlSurfaceView = glSurfaceView;
context = glSurfaceView.getContext();
}
#Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glFrontFace(GLES20.GL_CW);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 0.0f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 1.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_vert);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(context, R.raw.flash_frag);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[]{"a_Position", "a_TexCoordinate", "a_TileXY"});
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
#SuppressWarnings("UnnecessaryLocalVariable")
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
this.ratio = ratio;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
mStartTime = System.currentTimeMillis();
mExecutor.execute(new FlashsGenerator(this));
}
#Override
public void onDrawFrame(GL10 gl10) {
logFrame();
drawGl();
if (mFlashSystem != null) {
mFlashSystem.render();
}
}
private void drawGl() {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(programHandle);
// Set program handles
mvpMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mvMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVMatrix");
timeHandle = GLES20.glGetUniformLocation(programHandle, "u_Time");
resolutionHandle = GLES20.glGetUniformLocation(programHandle, "u_Resolution");
positionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
normalHandle = GLES20.glGetAttribLocation(programHandle, "a_Normal");
textureCoordinateHandle = GLES20.glGetAttribLocation(programHandle, "a_TexCoordinate");
miscHandle = GLES20.glGetAttribLocation(programHandle, "a_Misc");
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, 5f);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mvMatrixHandle, 1, false, mMVPMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in u_Time
GLES20.glUniform1f(timeHandle, (System.currentTimeMillis() - mStartTime) / 3500f);
// u_Resolution
GLES20.glUniform2f(resolutionHandle, mWidth, mHeight);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
}
public void logFrame() {
frames++;
timePassed = (System.nanoTime() - startTime) / 1_000_000;
if(timePassed >= 10_000) {
frames = 0;
startTime = System.nanoTime();
}
}
public void onTouchEvent() {
if (mStart) {
reset();
}
mStart = !mStart;
mStartTime = System.nanoTime();
}
private void reset() {
if (animator != null) {
animator.cancel();
}
mStartTime = 0;
dt = 0;
t_prev = 0;
}
public FlashSystem getFlashSystem() {
return mFlashSystem;
}
public void setFlashSystem(final FlashSystem flashSystem) {
mFlashSystem = flashSystem;
}
public void queue(Runnable runnable) {
mGlSurfaceView.queueEvent(runnable);
}
}
-Added- flash_vert.glsl
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
uniform float u_Time;
uniform vec2 u_Resolution;
attribute vec4 a_Position; //initial
attribute vec2 a_TexCoordinate;
attribute vec4 a_Misc; //initial
varying vec2 v_TexCoordinate;
varying float v_Radius;
#define RADIUS 3.5
float rand( vec2 co )
{
return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void rotate( in float angle, inout vec2 vector )
{
mat2 rotationMatrix = mat2( cos( angle ), -sin( angle ),
sin( angle ), cos( angle ));
vector *= rotationMatrix;
}
void main()
{
// Transform the vertex into eye space.
//v_Position = vec3(u_MVMatrix * a_Position);
float aspect = u_Resolution.x / u_Resolution.y;
// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;
vec2 centerPos = a_Position.xy;
float f = mix(1.0, a_Misc.t, u_Time);
centerPos *= mod(f, RADIUS);
float size = a_Misc.s;
size = mix(0.0, size, mod(f, RADIUS)/RADIUS);
vec2 relativePos = vec2(
(a_TexCoordinate.s - 0.5) * 2.0 * size,
(a_TexCoordinate.t - 0.5) * 2.0 * size
);
vec2 v = vec2(0.0, 1.0);
vec4 pos = vec4(
relativePos + centerPos,
0.0,
1.0
);
gl_Position = u_MVPMatrix * pos;
v_Radius = size * 2.5;
}
-Added- flash_frag.glsl
precision mediump float;
uniform sampler2D uTexture;
varying vec2 vTexPosition;
void main() {
gl_FragColor = texture2D(uTexture, vTexPosition);
}
How can I add the PNG texture from my existing renderer to render them together given that the PNG texture having transparency/alpha is in the top of all other objects? All of what I did find from searching did not work, some does but it show the PNG texture only

Related

OpenGL ES Fatal signal 11 (SIGSEGV), code 1, fault addr 0x8 in tid 8367 (GLThread 3562)

I have a crash in my call to glDrawArrays when trying to render a .obj loaded using a library. I dont know what is happening since it is my first time using openGL ES. My guess was that the number of triangles was wrong and after trying glDrawArrays with 10 as parameter, I realised this might not be the problem.
The code of my renderer:
private class RocketArrowRenderer implements GLSurfaceView.Renderer {
private final int mBytesPerFloat = 4;
private Context mContext;
private FloatBuffer mVertices;
private int mMVPMatrixHandle;
private int mPositionHandle;
private float[] mModelMatrix = new float[16];
private int mColorHandle;
private float[] mMVPMatrix = new float[16];
private final int mStrideBytes = 3 * mBytesPerFloat;
private final int mPositionOffset = 0;
private final int mPositionDataSize = 3;
private final int mColorOffset = 3;
private final int mColorDataSize = 4;
private float[] mViewMatrix = new float[16];
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n" // A constant representing the combined model/view/projection matrix.
+ "attribute vec4 a_Position; \n" // Per-vertex position information we will pass in.
+ "uniform vec4 a_Color; \n" // Per-vertex color information we will pass in.
+ "varying vec4 v_Color; \n" // This will be passed into the fragment shader.
+ "void main() \n" // The entry point for our vertex shader.
+ "{ \n"
+ " v_Color = a_Color; \n" // Pass the color through to the fragment shader.
// It will be interpolated across the triangle.
+ " gl_Position = u_MVPMatrix \n" // gl_Position is a special variable used to store the final position.
+ " * a_Position; \n" // Multiply the vertex by the matrix to get the final point in
+ "} \n"; // normalized screen coordinates.
final String fragmentShader =
"precision mediump float; \n" // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
+ "varying vec4 v_Color; \n" // This is the color from the vertex shader interpolated across the
// triangle per fragment.
+ "void main() \n" // The entry point for our fragment shader.
+ "{ \n"
+ " gl_FragColor = v_Color; \n" // Pass the color directly through the pipeline.
+ "} \n";
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 25.0f;
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 0.0f;
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
private boolean mObjLoaded = false;
public RocketArrowRenderer(Context context)
{
mContext = context;
new Thread(new Runnable() {
public void run() {
Resources res = mContext.getResources();
InputStream inputStream = res.openRawResource(R.raw.falcon_heavy_obj);
Obj obj = null;
try {
obj = ObjUtils.convertToRenderable(ObjReader.read(inputStream));
} catch (IOException e) {
e.printStackTrace();
}
mVertices = ObjData.getVertices(obj);
mObjLoaded = true;
}
}).start();
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the vertex shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if(fragmentShader != null){
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
GLES20.glCompileShader(fragmentShaderHandle);
final int[] compileStatus2 = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus2, 0);
if(compileStatus2[0] == 0){
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if(fragmentShader == null){
throw new RuntimeException("Error creating fragment shader");
}
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
GLES20.glAttachShader(programHandle, vertexShaderHandle);
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
GLES20.glLinkProgram(programHandle);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
GLES20.glUseProgram(programHandle);
}
private float[] mProjectionMatrix = new float[16];
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 8.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
if(mObjLoaded){
draw();
}
}
private void draw() {
int numberOfTriangles = mVertices.position(0).remaining() / 3;
//mVertices.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, 0);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//and later, in draw
GLES20.glUniform4f(mColorHandle, 1.0f, 0.0f, 0.0f, 1.0f); //red!
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, numberOfTriangles);
}
}
You need to call GLES20.glEnableVertexAttribArray(mPositionHandle);
before using
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, 0);

android opengl es 2 display after several rotate error

I am trying to draw lines connected to each other. Let's say they are creating a
rectangle. At each corner also I have points. When I have tested code in my
Samsung Galaxy Note 4 after several rotations, using touch, all rectangle ended up
with a single point at the center. I could not figure out why this was happening.
Code works fine in emulator. Also I am storing line and point data as array list
in my code. These array lists are used to create line and point object classes to
render.
public class GLRender implements GLSurfaceView.Renderer {
private final float[] mMVPMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
private float[] mVRMatrix = new float[16];
private static ArrayList<Line> DrLine = new ArrayList<Line>();
private static ArrayList<Point> DrPoint = new ArrayList<Point>();
private float ratio;
private float Joint1;
private float Joint2;
private float x1, y1, z1, x2, y2, z2;
private float px, py, pz, ps;
private float[] nearN = new float[4];
private static ArrayList<ArrayList<String>> LineArray = new ArrayList<ArrayList<String>>();
private ArrayList<String> L = new ArrayList<String>();
private ArrayList<String> P = new ArrayList<String>();
private static ArrayList<ArrayList<String>> PointArray = new ArrayList<ArrayList<String>>();
private static ArrayList<String> EmptyLine = new ArrayList<String>();
private static ArrayList<String> EmptyPoint = new ArrayList<String>();
private static ArrayList<ArrayList<ArrayList<String>>> UndoL = new ArrayList<ArrayList<ArrayList<String>>>();
private static ArrayList<ArrayList<ArrayList<String>>> UndoP = new ArrayList<ArrayList<ArrayList<String>>>();
public GLRender(Activity activity) {
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
//GLES20.glClearColor(0.53f, 0.53f, 0.53f, 1.0f);
GLES20.glClearColor(0.474f, 0.537f, 0.078f, 1.0f);
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
Drawer();
Line XLine = new Line();
XLine.SetVerts(0.0f, 0.0f, 0.0f, 1.0f / 10, 0.0f, 0.0f);
XLine.SetColor(1.0f, 0.0f, 0.0f, 1.0f);
Line YLine = new Line();
YLine.SetVerts(0.0f, 0.0f, 0.0f, 0.0f, 1.0f / 10, 0.0f);
YLine.SetColor(0.0f, 1.0f, 0.0f, 1.0f);
Line ZLine = new Line();
ZLine.SetVerts(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f / 10);
ZLine.SetColor(0.0f, 0.0f, 1.0f, 1.0f);
Matrix.setIdentityM(mViewMatrix, 0);
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 1.0f , 0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.setIdentityM(mRotationMatrix, 0);
Matrix.rotateM(mRotationMatrix, 0, mXAngle, 0, 1f, 0);
Matrix.rotateM(mRotationMatrix, 0, mYAngle, 1f, 0, 0);
Matrix.setIdentityM(mVRMatrix, 0);
Matrix.multiplyMM(mVRMatrix, 0, mViewMatrix, 0, mRotationMatrix, 0);
Matrix.setIdentityM(mMVPMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mVRMatrix, 0);
Matrix.scaleM(mMVPMatrix,0,mScale,mScale,mScale);
if (DrLine.size() > 0) {
for (Line LineX : DrLine) {
LineX.draw(mMVPMatrix);
}
}
if (DrPoint.size() > 0) {
for (Point PointX : DrPoint) {
PointX.draw(mMVPMatrix);
}
}
XLine.draw(mMVPMatrix);
YLine.draw(mMVPMatrix);
ZLine.draw(mMVPMatrix);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
ratio = (float) width / height;
//Matrix.frustumM(mProjectionMatrix, 0, -ratio*10, ratio*10, -10, 10, 1, 9);
Matrix.orthoM(mProjectionMatrix, 0, -ratio, ratio, -1.0f, 1.0f, -5.0f, 5.0f);
}
public static int loadShader(int type, String shaderCode){
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private Float TempMaxPointHolder = 0.0f;
private Float MaxPointHolder = 0.0f;
public Float Scaler(){
Float TempScaler = 1.0f;
Iterator<ArrayList<String>> MaxPointFinder = PointArray.iterator();
while (MaxPointFinder.hasNext()){
ArrayList<String> MaxPoint = MaxPointFinder.next();
TempMaxPointHolder = Math.abs(Math.max(Math.max(Float.parseFloat(MaxPoint.get(1)),Float.parseFloat(MaxPoint.get(2))),Float.parseFloat(MaxPoint.get(3))));
if (TempMaxPointHolder > MaxPointHolder){
MaxPointHolder = TempMaxPointHolder;
}
}
TempScaler = 0.9f / MaxPointHolder;
return TempScaler;
}
public void Drawer(){
Float Scaler = Scaler();
Integer Lindex = 0;
ArrayList<Point> TempDrPoint = new ArrayList<Point>();
ArrayList<Line> TempDrLine = new ArrayList<Line>();
Iterator<ArrayList<String>> Litr = LineArray.iterator();
while (Litr.hasNext()) {
L = Litr.next();
Joint1 = Float.parseFloat(L.get(1));
Joint2 = Float.parseFloat(L.get(2));
Iterator<ArrayList<String>> PitrL = PointArray.iterator();
while (PitrL.hasNext()){
P = PitrL.next();
if (Float.parseFloat(P.get(0)) == Joint1) {
x1 = Float.parseFloat(P.get(1)) * Scaler;
y1 = Float.parseFloat(P.get(2)) * Scaler;
z1 = Float.parseFloat(P.get(3)) * Scaler;
}
if (Float.parseFloat(P.get(0)) == Joint2) {
x2 = Float.parseFloat(P.get(1)) * Scaler;
y2 = Float.parseFloat(P.get(2)) * Scaler;
z2 = Float.parseFloat(P.get(3)) * Scaler;
}
}
Line TempLine = new Line();
TempLine.SetVerts(x1, y1, z1, x2, y2, z2);
if (L.get(3) == "0") {
TempLine.SetColor(0.0f, 0.0f, 0.0f, 1.0f);
}
else if (L.get(3) == "1"){
TempLine.SetColor(0.66f, 0.73f, 0.21f, 1.0f);
}
TempDrLine.add(TempLine);
Lindex = Lindex + 1;
}
setDrLine(TempDrLine);
Integer Pindex = 0;
Iterator<ArrayList<String>> Pitr = PointArray.iterator();
while (Pitr.hasNext()){
P = Pitr.next();
px = Float.parseFloat(P.get(1)) * Scaler;
py = Float.parseFloat(P.get(2)) * Scaler;
pz = Float.parseFloat(P.get(3)) * Scaler;
ps = Float.parseFloat(P.get(4));
Point TempPoint = new Point();
TempPoint.SetPointVerts(px, py, pz);
if (ps == 0.0f) {
TempPoint.SetPointColor(0.65f, 0.37f, 0.11f, 1.0f);
}
else if (ps == 1.0f) {
TempPoint.SetPointColor(0.68f, 0.07f, 0.32f, 1.0f);
}
TempDrPoint.add(TempPoint);
Pindex = Pindex + 1;
}
setDrPoint(TempDrPoint);
}
public volatile float mXAngle;
public volatile float mYAngle;
public ArrayList<Line> getDrLine() {
return DrLine;
}
public ArrayList<Point> getDrPoint(){
return DrPoint;
}
public void setDrLine(ArrayList<Line> XDrLine) {
DrLine = XDrLine;
}
public void setDrPoint(ArrayList<Point> XDrPoint) {
DrPoint = XDrPoint;
}
public float getXAngle() {
return mXAngle;
}
public float getYAngle(){
return mYAngle;
}
public void setAngleX(float Xangle) {
mXAngle = Xangle;
}
public void setAngleY(float Yangle) {
mYAngle = Yangle;
}
public volatile float mScale = 1;
public volatile float mXFocus;
public volatile float mYFocus;
public void setZoom(float scale){
mScale = scale;
}
public void setFocus(float XFocus, float YFocus){
mXFocus = XFocus;
mYFocus = YFocus;
}
public float getmScale(){
return mScale;
}
public float getmXFocus(){
return mXFocus;
}
public float getmYFocus(){
return mYFocus;
}
public void setLineArray(ArrayList<ArrayList<String>> XLine){
LineArray = XLine;
}
public ArrayList<ArrayList<String>> getLineArray(){
return LineArray;
}
public void setUndoL(ArrayList<ArrayList<String>> UndoLine){
this.UndoL.add(new ArrayList<ArrayList<String>>(UndoLine));
}
public ArrayList<ArrayList<ArrayList<String>>> getUndoL() {
return UndoL;
}
public void setUndoP(ArrayList<ArrayList<String>> UndoPoint){
this.UndoP.add(new ArrayList<ArrayList<String>>(UndoPoint));
}
public ArrayList<ArrayList<ArrayList<String>>> getUndoP() {
return UndoP;
}
public void setPointArray(ArrayList<ArrayList<String>> XPoint){
PointArray = XPoint;
}
public ArrayList<ArrayList<String>> getPointArray(){
return PointArray;
}
gives error contents_sample_state: [ agr({[3 ,38]=11, [3 ,7 ,38]=28}) ]
public class Line {
private FloatBuffer VertexBuffer;
private final String VertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String FragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
protected int GlProgram;
protected int PositionHandle;
protected int ColorHandle;
protected int MVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float LineCoords[] = {
0.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f
};
private final int VertexCount = LineCoords.length / COORDS_PER_VERTEX;
private final int VertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Line() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
LineCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
VertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
int vertexShader = GLRender.loadShader(GLES20.GL_VERTEX_SHADER, VertexShaderCode);
int fragmentShader = GLRender.loadShader(GLES20.GL_FRAGMENT_SHADER, FragmentShaderCode);
GlProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(GlProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(GlProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(GlProgram); // creates OpenGL ES program executables
}
public void SetVerts(float v0, float v1, float v2, float v3, float v4, float v5) {
LineCoords[0] = v0;
LineCoords[1] = v1;
LineCoords[2] = v2;
LineCoords[3] = v3;
LineCoords[4] = v4;
LineCoords[5] = v5;
VertexBuffer.put(LineCoords);
// set the buffer to read the first coordinate
VertexBuffer.position(0);
}
public void SetColor(float red, float green, float blue, float alpha) {
color[0] = red;
color[1] = green;
color[2] = blue;
color[3] = alpha;
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL ES environment
GLES20.glUseProgram(GlProgram);
// get handle to vertex shader's vPosition member
PositionHandle = GLES20.glGetAttribLocation(GlProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(PositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(PositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
VertexStride, VertexBuffer);
// get handle to fragment shader's vColor member
ColorHandle = GLES20.glGetUniformLocation(GlProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(ColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
MVPMatrixHandle = GLES20.glGetUniformLocation(GlProgram, "uMVPMatrix");
//GLRender.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(MVPMatrixHandle, 1, false, mvpMatrix, 0);
//GLRender.checkGlError("glUniformMatrix4fv");
GLES20.glLineWidth(5);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_LINES, 0, VertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(PositionHandle);
}
}
public class GLSurface extends GLSurfaceView {
//public static GLRender xRender;
ScaleGestureDetector ScaleDetect;
MainActivity mMain;
OpenGL XOPL = mMain.xOpenGL;
public GLSurface(Context context, AttributeSet attrs){
super(context, attrs);
// Render the view only when there is a change in the drawing data
//setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
ScaleDetect = new ScaleGestureDetector(context, new ScaleDetectorListener());
}
private float mPreviousX;
private float mPreviousY;
float density = this.getResources().getDisplayMetrics().density;
private static final int MAX_CLICK_DURATION = 300;
private long pressStartTime;
#Override
public boolean onTouchEvent(MotionEvent e) {
ScaleDetect.onTouchEvent(e);
float x = e.getX();
float y = e.getY();
switch (e.getAction()) {
case MotionEvent.ACTION_MOVE:
if(!ScaleDetect.isInProgress()) {
float xMoveRange = x - mPreviousX;
float yMoveRange = y - mPreviousY;
float dx = 0.0f;
float dy = 0.0f;
float w = this.getWidth();
float h = this.getHeight();
if (Math.abs(xMoveRange) > w / 100 && Math.abs(yMoveRange) < h / 100){
dx = xMoveRange / density / 2.0f;
XOPL.xRender.setAngleX(XOPL.xRender.getXAngle() + dx);
}
if (Math.abs(xMoveRange) < w / 100 && Math.abs(yMoveRange) > h / 100){
dy = yMoveRange / density / 2.0f;
XOPL.xRender.setAngleY(XOPL.xRender.getYAngle() + dy);
}
XOPL.myGLView.requestRender();
}
break;
case MotionEvent.ACTION_DOWN:
pressStartTime = System.currentTimeMillis();
break;
case MotionEvent.ACTION_UP:
long pressDuration = System.currentTimeMillis() - pressStartTime;
if (pressDuration < MAX_CLICK_DURATION) {
float Nx = (x - Float.parseFloat(Double.toString(this.getWidth())) * 0.5f) / (Float.parseFloat(Double.toString(this.getWidth())) * 0.5f);
float Ny = (y - Float.parseFloat(Double.toString(this.getHeight())) * 0.5f) / (Float.parseFloat(Double.toString(this.getHeight())) * 0.5f);
float Nz = 1.0f;
XOPL.xRender.setNCoordinate(Nx, Ny, Nz);
XOPL.xRender.Selection();
XOPL.myGLView.requestRender();
}
break;
}
mPreviousX = x;
mPreviousY = y;
return true;
}
private float sizeCoef = 1;
public class ScaleDetectorListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
float scaleFocusX = 0;
float scaleFocusY = 0;
public boolean onScale(ScaleGestureDetector arg0) {
float scale = arg0.getScaleFactor() * sizeCoef;
sizeCoef = scale;
XOPL.xRender.setZoom(sizeCoef);
XOPL.myGLView.requestRender();
return true;
}
public boolean onScaleBegin(ScaleGestureDetector arg0) {
invalidate();
scaleFocusX = arg0.getFocusX();
scaleFocusY = arg0.getFocusY();
XOPL.xRender.setFocus(scaleFocusX,scaleFocusY);
return true;
}
public void onScaleEnd(ScaleGestureDetector arg0) {
scaleFocusX = 0;
scaleFocusY = 0;
XOPL.xRender.setFocus(scaleFocusX,scaleFocusY);
}
}
}
Not sure if this is the problem, but watch out for things like:
XOPL.xRender.setAngleX(XOPL.xRender.getXAngle() + dx)
... especially when using mediump precision in shaders. Generally I'd recommend changing setAngle functions to exploit rotational symmetry and wrap the value around so you get an absolute range used at the API-level of of +-Pi.
In your current code if the user keeps swiping in one direction, eventually you'll run our of bits and everything will either stop rotating (at best), or fail with infinities or NaN results (at worst).
Note using "-Pi to +Pi" is preferred to using "0 to +2Pi", because the sign-bit is free in most floating point representations so preserves more dynamic precision.
Example code:
public float wrapRadians(float angle) {
// If angle is negative ensure value is higher than minus pi
if (angle < 0) {
while (angle < -math.PI) {
angle += 2 * math.PI;
}
// Else angle is positive so ensure value is less than pi
} else {
while (angle > math.PI) {
angle -= 2 * math.PI;
}
}
return angle;
}
public float setXAngle(float XAngle) {
mXAngle = wrapRadians(XAngle);
}
public float setYAngle(float YAngle) {
mYAngle = wrapRadians(YAngle);
}

How to fill polygon in android using opengl-es 2.0?

I want to draw polygon as shown in images and fill it with colors.
I have tried polygon using triangle but it will not help me.Anyone know please help me out.
OpenGLProjectRenderer.java
public class OpenGLProjectRenderer implements Renderer {
List<Float> points = new ArrayList<Float>();
private static final String TAG = "Renderer";
private static final int POSITION_COMPONENT_COUNT = 2;
private static final int BYTES_PER_FLOAT = 4;
private FloatBuffer vertexData = ByteBuffer
.allocateDirect(20000 * BYTES_PER_FLOAT)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
private Context context;
private int program;
private static final String A_POSITION = "a_Position";
private int aPositionLocation;
private static final String U_COLOR = "u_Color";
private int uColorLocation;
private HashMap<Integer, ArrayList<Float>> lines = new HashMap<Integer, ArrayList<Float>>();
int position = 0;
public OpenGLProjectRenderer(Context context) {
this.context = context;
}
#Override
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUniform4f(uColorLocation, 1.0f, 0.0f, 0.0f, 1.0f);
for (int p = 0; p < lines.size(); p++) {
vertexData.put(toFloatarray(lines.get(p)));
int vertices = (int) lines.get(p).size() / 2;
int b = vertices % 4 == 0 ? vertices-1 : vertices - 2;
Log.d(TAG,""+lines.size());
glDrawArrays(GLES20.GL_LINE_LOOP, 0, lines.size());
vertexData.clear();
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
String vertexShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_vertex_shader);
String fragmentShaderSource = TextResourceReader.readTextFileFromResource(
context, R.raw.simple_fragment_shader);
int vertexShader = ShaderHelper.compileVertexShader(vertexShaderSource);
int fragmentShader = ShaderHelper
.compileFragmentShader(fragmentShaderSource);
program = ShaderHelper.linkProgram(vertexShader, fragmentShader);
ShaderHelper.validateProgram(program);
glUseProgram(program);
uColorLocation = glGetUniformLocation(program, U_COLOR);
aPositionLocation = glGetAttribLocation(program, A_POSITION);
vertexData.position(0);
glVertexAttribPointer(aPositionLocation, POSITION_COMPONENT_COUNT,
GL_FLOAT, false, 0, vertexData);
glEnableVertexAttribArray(aPositionLocation);
}
ArrayList<Float> temp = new ArrayList<Float>();
public void handleTouchPress(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchPress", points.size() + "");
temp.add(normalizedX);
temp.add(normalizedY);
lines.put(position, temp);
}
public void handleTouchDrag(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchDrag", points.size() + "");
}
public float[] toFloatarray(List<Float> floatList) {
float[] floatArray = new float[floatList.size()];
int i = 0;
for (Float f : floatList) {
floatArray[i++] = (f != null ? f : Float.NaN);
}
return floatArray;
}
public void handleTouchUp(float normalizedX, float normalizedY) {
Log.v(TAG + " handleTouchUp", points.size() + "");
position++;
}}
Using above code I am able to draw polygon using above code using GL_LINE_LOOP but not able to fill created polygon.
OpenGL ES 2.0 support drawing only triangles as a basic primitives. There are 3 ways of drawing polygons using triangles,
1) Triangles
2) Triangle Strips
3) Triangle Fan
In your case you can try triangle Fan to draw a polygon, provided you know point inside the plan.
Here is an example of drawing a circle.

Android camera scales aspect ratio on Google Cardboard

I' ve been working on Google Cardboard for some research.
I faced some problem when I was trying to open camera on Google Cardboard. The problem is aspect ratio. When I was looking the camera with Google Cardboard, it looks like more thinner then reality. As you can see the images below...
Normal image:
Cardboard image:
And these are the codes.
CardboardOverlayView.java
public class CardboardOverlayView extends LinearLayout {
private static final String TAG = CardboardOverlayView.class.getSimpleName();
private final CardboardOverlayEyeView mLeftView;
private final CardboardOverlayEyeView mRightView;
private AlphaAnimation mTextFadeAnimation;
public CardboardOverlayView(Context context, AttributeSet attrs) {
super(context, attrs);
setOrientation(HORIZONTAL);
LayoutParams params = new LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1.0f);
params.setMargins(0, 0, 0, 0);
mLeftView = new CardboardOverlayEyeView(context, attrs);
mLeftView.setLayoutParams(params);
addView(mLeftView);
mRightView = new CardboardOverlayEyeView(context, attrs);
mRightView.setLayoutParams(params);
addView(mRightView);
// Set some reasonable defaults.
setDepthOffset(0.016f);
setColor(Color.rgb(150, 255, 180));
setVisibility(View.VISIBLE);
mTextFadeAnimation = new AlphaAnimation(1.0f, 0.0f);
mTextFadeAnimation.setDuration(5000);
}
public void show3DToast(String message) {
setText(message);
setTextAlpha(1f);
mTextFadeAnimation.setAnimationListener(new EndAnimationListener() {
#Override
public void onAnimationEnd(Animation animation) {
setTextAlpha(0f);
}
});
startAnimation(mTextFadeAnimation);
}
private abstract class EndAnimationListener implements Animation.AnimationListener {
#Override public void onAnimationRepeat(Animation animation) {}
#Override public void onAnimationStart(Animation animation) {}
}
private void setDepthOffset(float offset) {
mLeftView.setOffset(offset);
mRightView.setOffset(-offset);
}
private void setText(String text) {
mLeftView.setText(text);
mRightView.setText(text);
}
private void setTextAlpha(float alpha) {
mLeftView.setTextViewAlpha(alpha);
mRightView.setTextViewAlpha(alpha);
}
private void setColor(int color) {
mLeftView.setColor(color);
mRightView.setColor(color);
}
/**
* A simple view group containing some horizontally centered text underneath a horizontally
* centered image.
*
* This is a helper class for CardboardOverlayView.
*/
private class CardboardOverlayEyeView extends ViewGroup {
private final ImageView imageView;
private final TextView textView;
private float offset;
public CardboardOverlayEyeView(Context context, AttributeSet attrs) {
super(context, attrs);
imageView = new ImageView(context, attrs);
imageView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
imageView.setAdjustViewBounds(true); // Preserve aspect ratio.
addView(imageView);
textView = new TextView(context, attrs);
textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14.0f);
textView.setTypeface(textView.getTypeface(), Typeface.BOLD);
textView.setGravity(Gravity.CENTER);
textView.setShadowLayer(3.0f, 0.0f, 0.0f, Color.DKGRAY);
addView(textView);
}
public void setColor(int color) {
imageView.setColorFilter(color);
textView.setTextColor(color);
}
public void setText(String text) {
textView.setText(text);
}
public void setTextViewAlpha(float alpha) {
textView.setAlpha(alpha);
}
public void setOffset(float offset) {
this.offset = offset;
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
// Width and height of this ViewGroup.
final int width = right - left;
final int height = bottom - top;
// The size of the image, given as a fraction of the dimension as a ViewGroup. We multiply
// both width and heading with this number to compute the image's bounding box. Inside the
// box, the image is the horizontally and vertically centered.
final float imageSize = 0.12f;
// The fraction of this ViewGroup's height by which we shift the image off the ViewGroup's
// center. Positive values shift downwards, negative values shift upwards.
final float verticalImageOffset = -0.07f;
// Vertical position of the text, specified in fractions of this ViewGroup's height.
final float verticalTextPos = 0.52f;
// Layout ImageView
float imageMargin = (1.0f - imageSize) / 2.0f;
float leftMargin = (int) (width * (imageMargin + offset));
float topMargin = (int) (height * (imageMargin + verticalImageOffset));
imageView.layout(
(int) leftMargin, (int) topMargin,
(int) (leftMargin + width * imageSize), (int) (topMargin + height * imageSize));
// Layout TextView
leftMargin = offset * width;
topMargin = height * verticalTextPos;
textView.layout(
(int) leftMargin, (int) topMargin,
(int) (leftMargin + width), (int) (topMargin + height * (1.0f - verticalTextPos)));
}
}
}
MainActivity.java
public class MainActivity extends CardboardActivity implements CardboardView.StereoRenderer, OnFrameAvailableListener {
private static final String TAG = "VRCamMtMMainAc";
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private Camera camera;
private final String vertexShaderCode =
"attribute vec4 position;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{" +
"gl_Position = position;" +
"textureCoordinate = inputTextureCoordinate;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"varying vec2 textureCoordinate; \n" +
"uniform samplerExternalOES s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
//" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer, vertexBuffer2;
private ShortBuffer drawListBuffer, buf2;
private int mProgram;
private int mPositionHandle, mPositionHandle2;
private int mColorHandle;
private int mTextureCoordHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float squareVertices[] = { // in counterclockwise order:
-1.0f, -1.0f, // 0.left - mid
1.0f, -1.0f, // 1. right - mid
-1.0f, 1.0f, // 2. left - top
1.0f, 1.0f, // 3. right - top
};
private short drawOrder[] = {0, 2, 1, 1, 2, 3}; // order to draw vertices
private short drawOrder2[] = {2, 0, 3, 3, 0, 1}; // order to draw vertices
static float textureVertices[] = {
0.0f, 1.0f, // A. left-bottom
1.0f, 1.0f, // B. right-bottom
0.0f, 0.0f, // C. left-top
1.0f, 0.0f // D. right-top
};
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private ByteBuffer indexBuffer; // Buffer for index-array
private int texture;
private CardboardOverlayView mOverlayView;
private CardboardView cardboardView;
private SurfaceTexture surface;
private float[] mView;
private float[] mCamera;
public void startCamera(int texture) {
surface = new SurfaceTexture(texture);
surface.setOnFrameAvailableListener(this);
camera = Camera.open();
Camera.Parameters params = camera.getParameters();
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
// Check what resolutions are supported by your camera
/*List<Camera.Size> sizes = params.getSupportedPictureSizes();
// Iterate through all available resolutions and choose one.
// The chosen resolution will be stored in mSize.
Camera.Size mSize = null;
for (Camera.Size size : sizes) {
Log.i(TAG, "Available resolution: " + size.width + "x" + size.height);
mSize = size;
}
params.setPictureSize(5312,2988);*/
camera.setParameters(params);
try {
camera.setPreviewTexture(surface);
camera.startPreview();
} catch (IOException ioe)
{
Log.i(TAG, "CAM LAUNCH FAILED");
}
}
static private int createTexture() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
private int loadGLShader(int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
/**
* Checks if we've had an error inside of OpenGL ES, and if so what that error is.
*
* #param func
*/
private static void checkGLError(String func) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, func + ": glError " + error);
throw new RuntimeException(func + ": glError " + error);
}
}
/**
* Sets the view to our CardboardView and initializes the transformation matrices we will use
* to render our scene.
*
* #param savedInstanceState
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
cardboardView.setRenderer(this);
setCardboardView(cardboardView);
mCamera = new float[16];
mView = new float[16];
mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
//mOverlayView.show3DToast("Pull the magnet when you find an object.");
}
#Override
public void onRendererShutdown() {
Log.i(TAG, "onRendererShutdown");
}
#Override
public void onSurfaceChanged(int width, int height) {
Log.i(TAG, "onSurfaceChanged");
}
/**
* Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
* arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
*
* #param config The EGL configuration used when creating the surface.
*/
#Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
texture = createTexture();
startCamera(texture);
}
/**
* Prepares OpenGL ES before we draw a frame.
*
* #param headTransform The head transformation in the new frame.
*/
#Override
public void onNewFrame(HeadTransform headTransform) {
float[] mtx = new float[16];
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surface.updateTexImage();
surface.getTransformMatrix(mtx);
}
#Override
public void onFrameAvailable(SurfaceTexture arg0) {
this.cardboardView.requestRender();
}
/**
* Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
* a parameter.
*
* #param transform The transformations to apply to render this eye.
*/
#Override
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);
}
#Override
public void onFinishFrame(Viewport viewport) {
}
#Override
public void onCardboardTrigger() {
}
}
activity_main.xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/ui_layout"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<com.google.vrtoolkit.cardboard.CardboardView
android:id="#+id/cardboard_view"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentTop="true"
android:layout_alignParentLeft="true" />
<com.oculab.mehmettaha.vrcamera.CardboardOverlayView
android:id="#+id/overlay"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentLeft="true"
android:layout_alignParentTop="true" />
</RelativeLayout>
Hence how can I handle this problem. I' ve tried to change camera resolution, but it didn' t work.
Cheers!
The problem is the camera preview size. Its expecting to be at or near full screen size but you're trying to make it half the width with the same height. After trying this I found the aspect much improved: https://stackoverflow.com/a/19592492/922339. It set the optimal preview size to 352 by 288 on my Nexus 5
You probably want the camera views to fill the screen, and don't mind if the preview is cropped on left and right. So, when you set the texture coordinates (textureVeryices), use left=0.25 and right=0.75.

How to zoom properly in OpenGL ES 2

I've a 3d-model in OpenGL ES in Android. I've already implemented swipe-gestures for translating, rotating and zooming into the model. Everything but the zooming works fine. I'm not sure what I'm missing or what I have to change but I'm not able to zoom into my model.
The model is a building. What I'd like to do is to zoom into the different floors of the building. But no matter how I change my implementation, I'm not able to do this.
Either the building disappears when I zoom in or the zoom has a limitation so that I can't zoom into it further....
First of all I decreased the field of view by modifying the Matrix:
frustumM(matrix, 0, -ratio/zoom, ratio/zoom, -1/zoom, 1/zoom, nearPlane, farPlane).
Someone told me, that this is not the correct approach and I should modify the eyeZ value like:
eyeZ = -1.0/zoom
The first approach is working, but I'd like to know what my mistake with the second approach is, because it has the issues I mentioned in the beginning.
My renderer-class is the following:
public class MyGLRenderer implements GLSurfaceView.Renderer {
private float[] mModelMatrix = new float[16];
private final float[] mMVMatrix = new float[16];
private final float[] mProjectionMatrix = new float[16];
private final float[] mViewMatrix = new float[16];
private float nearPlaneDistance = 1f;
private float farPlaneDistance = 200f;
private float modelRatio = 1.0f;
private int offset = 0;
private float eyeX = 0;
private float eyeY = 0;
private float eyeZ = -1;
private float centerX = 0f;
private float centerY = 0f;
private float centerZ = 0f;
private float upX = 0f;
private float upY = 1.0f;
private float upZ = 0.0f;
private float mZoomLevel = 1f;
private float defaultRotationX = 100.0f; //building otherwise on the wrong side
private float defaultRotationZ = 180.0f; //building otherwise on the wrong side
private float rotationX = defaultRotationX;
private float rotationY = 0.0f;
private float rotationZ = defaultRotationZ;
private float translateX = 0.0f;
private float translateY = 0.0f;
private float translateZ = 0.0f;
private float scaleFactor = 20.0f; //no matter what scale factor -> it's not possible to zoom into the building...
private float ratio;
private float width;
private float height;
private List<IDrawableObject> drawableObjects;
public Model3D model3d;
public MyGLRenderer(Model3D model3d) {
this.model3d = model3d;
getModelScale();
}
private void getModelScale() {
float highestValue = (model3d.width > model3d.height) ? model3d.width
: model3d.height;
modelRatio = 2f / highestValue;
}
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
drawableObjects = ... ; //to much detail, basically getting triangles
}
#Override
public void onDrawFrame(GL10 unused) {
float[] mMVPMatrix = new float[16];
// Draw background color
Matrix.setIdentityM(mModelMatrix, 0);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// model is in origin-solution too big
Matrix.scaleM(mModelMatrix, 0, modelRatio * scaleFactor, modelRatio
* scaleFactor, modelRatio * scaleFactor);
Matrix.translateM(mModelMatrix, 0, translateX, translateY, translateZ);
rotateModel(mModelMatrix, rotationX, rotationY, rotationZ, true);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, offset, eyeX, eyeY, eyeZ / mZoomLevel,
centerX, centerY, centerZ, upX, upY, upZ);
// combine the model with the view matrix
Matrix.multiplyMM(mMVMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, 1, -1,
nearPlaneDistance, farPlaneDistance);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);
for (IDrawableObject d : drawableObjects) {
d.draw(mMVPMatrix);
}
}
private void rotateModel(float[] mModelMatrix, Float x, Float y, Float z,
boolean rotateAroundCenter) {
// translation for rotating the model around its center
if (rotateAroundCenter) {
Matrix.translateM(mModelMatrix, 0, (model3d.width / 2f), 0,
(model3d.height / 2f));
}
if (x != null) {
Matrix.rotateM(mModelMatrix, 0, x, 1.0f, 0.0f, 0.0f);
}
if (y != null) {
Matrix.rotateM(mModelMatrix, 0, y, 0.0f, 1.0f, 0.0f);
}
if (z != null) {
Matrix.rotateM(mModelMatrix, 0, z, 0.0f, 0.0f, 1.0f);
}
// translation back to the origin
if (rotateAroundCenter) {
Matrix.translateM(mModelMatrix, 0, -(model3d.width / 2f), 0,
-(model3d.height / 2f));
}
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
this.width = width;
this.height = height;
ratio = (float) width / height;
}
public static int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
public int getFPS() {
return lastMFPS;
}
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
public void setZoom(float zoom) {
this.mZoomLevel = zoom;
}
public void setDistance(float distance) {
eyeZ = distance;
}
public float getDistance() {
return eyeZ;
}
public float getRotationX() {
return rotationX;
}
public void setRotationX(float rotationX) {
this.rotationX = defaultRotationX + rotationX;
}
public float getRotationY() {
return rotationY;
}
public void setRotationY(float rotationY) {
this.rotationY = rotationY;
}
public float getRotationZ() {
return rotationZ;
}
public void setRotationZ(float rotationZ) {
this.rotationZ = defaultRotationZ + rotationZ;
}
public float getFarPlane() {
return farPlaneDistance;
}
public float getNearPlane() {
return nearPlaneDistance;
}
public void addTranslation(float mPosX, float mPosY) {
this.translateX = mPosX;
this.translateY = mPosY;
}
public void downPressed() {
translateX -= 10;
}
public void upPressed() {
translateX += 10;
}
public void actionMoved(float mPosX, float mPosY) {
float translationX = (mPosX / width);
float translationY = -(mPosY / height);
addTranslation(translationX, translationY);
}
public float getmZoomLevel() {
return mZoomLevel;
}
public void setmZoomLevel(float mZoomLevel) {
this.mZoomLevel = mZoomLevel;
}
public float getWidth() {
return width;
}
public float getHeight() {
return height;
}
public void setTranslation(Float x, Float y, Float z) {
if (x != null) {
this.translateX = -x;
}
if (y != null) {
this.translateY = y;
}
if (z != null) {
this.translateZ = -z;
}
}
public void setRotation(Float x, Float y, Float z) {
if (x != null) {
this.rotationX = defaultRotationX + x;
}
if (y != null) {
this.rotationY = y;
}
if (z != null) {
this.rotationZ = defaultRotationZ + z;
}
}
public void setScale(float scale) {
this.mZoomLevel = scale;
}
public float getDefaultRotationX() {
return defaultRotationX;
}
}
Do you see any mistake I'm currently doing? You can also have a look into the github repository: https://github.com/Dalanie/OpenGL-ES/tree/master/buildingGL
First you must define what you mean by "zoom". In the scenario of a perspective projection, there are a few possibilities:
You change the field of view. This is analogous to the zoom of cameras, where zooming results in changing the focal width.
You just scale the model before the projection.
You change the distance of the model (nearer to the camera to zoom in, farer away to zoom out)
The variant 1 is what you did by changing the frustum. In my opinion, that is the most intuitive effect. At least to someone who is used to cameras. ALso note that this has the same effect as upscaling some sub-rectangle of the 2d projected image to fill the whole screen.
Changing eyeZ is approach 3. But now you must be carefol to not move the object out of the viewing volume (which seems to be the issue you are describing). Ideally you would modify the frustum here, too. But to keep the field of view, while moving the near/far planes so that the object always stays inbetween. Note that this requires changing all 6 values of the frustum, what stays the same should be the ratios left/near, right/near, top/near and bottom/near to keep the FOV/aspect you had before.

Categories

Resources