I tried to develop a mobile cardboard application, which renders 3d objects into a camera view (some kind of ar).
I used this project and tried to render a simple cube in the camera:
https://github.com/Sveder/CardboardPassthrough/
I didn't get it working, the background is always black or the app wrecked.
I would be very grateful for any help or suggestions.
Thanks
Thats what i have
Origin CardboardPassthrough
here is the working code, with the cubes
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Bundle;
import android.os.Vibrator;
import android.util.Log;
import android.view.KeyEvent;
import com.google.vrtoolkit.cardboard.*;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class Card extends CardboardActivity implements CardboardView.StereoRenderer, SurfaceTexture.OnFrameAvailableListener {
private static final float CAMERA_Z = 0.01f;
private static final float TIME_DELTA = 0.3f;
private static final float YAW_LIMIT = 0.12f;
private static final float PITCH_LIMIT = 0.12f;
//---------------------------------------------------
private int intCurrentI = -1;
private int intCurrentI1 = -1;
//---------------------------------------------------
// We keep the light always position just above the user.
private final float[] mLightPosInWorldSpace = new float[]{0.0f, 2.0f, 0.0f, 1.0f};
private final float[] mLightPosInEyeSpace = new float[4];
private static final int COORDS_PER_VERTEX = 3;
private final WorldLayoutData DATA = new WorldLayoutData();
private FloatBuffer mCubeVertices;
private FloatBuffer mCubeColors;
private FloatBuffer mCubeFoundColors;
private FloatBuffer mCubeNormals;
private int mGlProgram;
private int mPositionParam;
private int mNormalParam;
private int mColorParam;
private int mModelViewProjectionParam;
private int mLightPosParam;
private int mModelViewParam;
private int mModelParam;
private int mIsFloorParam;
private float[] mModelCube;
private float[] mCamera;
private float[] mView;
private float[] mHeadView;
private float[] mModelViewProjection;
private float[] mModelView;
private float[] mModelCube2;
private float[] mModelFloor;
private float mObjectDistance = 12f;
private float mFloorDepth = 20f;
private Vibrator mVibrator;
private CardboardOverlayView mOverlayView;
private SurfaceTexture surface;
private Camera camera;
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private final String vertexShaderCode =
"attribute vec4 position;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{" +
"gl_Position = position;" +
"textureCoordinate = inputTextureCoordinate;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"varying vec2 textureCoordinate; \n" +
"uniform samplerExternalOES s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
//" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n" +
"}";
private int texture;
private CardboardView cardboardView;
/**
* Converts a raw text file, saved as a resource, into an OpenGL ES shader
*
* #param type The type of shader we will be creating.
* #param resId The resource ID of the raw text file about to be turned into a shader.
* #return
*/
private int loadGLShader(int type, int resId) {
String code = readRawTextFile(resId);
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
/**
* Checks if we've had an error inside of OpenGL ES, and if so what that error is.
*
* #param func
*/
private static void checkGLError(String func) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
throw new RuntimeException(func + ": glError " + error);
}
}
/**
* Sets the view to our CardboardView and initializes the transformation matrices we will use
* to render our scene.
*
* #param savedInstanceState
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.common_ui);
cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
/*********************/
cardboardView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
/*********************/
cardboardView.setRenderer(this);
setCardboardView(cardboardView);
/* 2014-10-16 */
mModelCube2 = new float[16];
/* 2014-10-16 */
mModelCube = new float[16];
mCamera = new float[16];
mView = new float[16];
mModelViewProjection = new float[16];
mModelView = new float[16];
mModelFloor = new float[16];
mHeadView = new float[16];
mVibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);
mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
mOverlayView.show3DToast("VR-Test");
}
#Override
public void onRendererShutdown() {
}
#Override
public void onSurfaceChanged(int width, int height) {
}
/**
* Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
* arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
*
* #param config The EGL configuration used when creating the surface.
*/
#Override
public void onSurfaceCreated(EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bbVertices = ByteBuffer.allocateDirect(DATA.CUBE_COORDS.length * 4);
bbVertices.order(ByteOrder.nativeOrder());
mCubeVertices = bbVertices.asFloatBuffer();
mCubeVertices.put(DATA.CUBE_COORDS);
mCubeVertices.position(0);
ByteBuffer bbColors = ByteBuffer.allocateDirect(DATA.CUBE_COLORS.length * 4);
bbColors.order(ByteOrder.nativeOrder());
mCubeColors = bbColors.asFloatBuffer();
mCubeColors.put(DATA.CUBE_COLORS);
mCubeColors.position(0);
ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(DATA.CUBE_FOUND_COLORS.length * 4);
bbFoundColors.order(ByteOrder.nativeOrder());
mCubeFoundColors = bbFoundColors.asFloatBuffer();
mCubeFoundColors.put(DATA.CUBE_FOUND_COLORS);
mCubeFoundColors.position(0);
ByteBuffer bbNormals = ByteBuffer.allocateDirect(DATA.CUBE_NORMALS.length * 4);
bbNormals.order(ByteOrder.nativeOrder());
mCubeNormals = bbNormals.asFloatBuffer();
mCubeNormals.put(DATA.CUBE_NORMALS);
mCubeNormals.position(0);
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
int gridShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);
mGlProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mGlProgram, vertexShader);
GLES20.glAttachShader(mGlProgram, gridShader);
GLES20.glLinkProgram(mGlProgram);
texture = createTexture();
startCamera(texture);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// Object first appears directly in front of user
Matrix.setIdentityM(mModelCube, 0);
Matrix.translateM(mModelCube, 0, 0, 0, -mObjectDistance);
Matrix.setIdentityM(mModelCube2, 0);
Matrix.translateM(mModelCube2, 0, -10.0f, -10.0f, -mObjectDistance - 12.0f);
Matrix.setIdentityM(mModelFloor, 0);
Matrix.translateM(mModelFloor, 0, 0, -mFloorDepth, 0); // Floor appears below user
checkGLError("onSurfaceCreated");
}
/**
* Converts a raw text file into a string.
*
* #param resId The resource ID of the raw text file about to be turned into a shader.
* #return
*/
private String readRawTextFile(int resId) {
InputStream inputStream = getResources().openRawResource(resId);
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
return sb.toString();
} catch (IOException e) {
e.printStackTrace();
}
return "";
}
/**
* Prepares OpenGL ES before we draw a frame.
*
* #param headTransform The head transformation in the new frame.
*/
#Override
public void onNewFrame(HeadTransform headTransform) {
GLES20.glUseProgram(mGlProgram);
mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos");
mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");
mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");
// Build the Model part of the ModelView matrix.
Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
Matrix.rotateM(mModelCube2, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
//--------------------------------------
// Build the camera matrix and apply it to the ModelView.
Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
headTransform.getHeadView(mHeadView, 0);
checkGLError("onReadyToDraw");
}
/**
* Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
* a parameter.
*
* #param transform The transformations to apply to render this eye.
*/
#Override
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
mPositionParam = GLES20.glGetAttribLocation(mGlProgram, "a_Position");
mNormalParam = GLES20.glGetAttribLocation(mGlProgram, "a_Normal");
mColorParam = GLES20.glGetAttribLocation(mGlProgram, "a_Color");
GLES20.glEnableVertexAttribArray(mPositionParam);
GLES20.glEnableVertexAttribArray(mNormalParam);
GLES20.glEnableVertexAttribArray(mColorParam);
checkGLError("mColorParam");
// Apply the eye transformation to the camera.
Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);
// Set the position of the light
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1],
mLightPosInEyeSpace[2]);
// Build the ModelView and ModelViewProjection matrices
// for calculating cube position and light.
Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube, 0);
Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
mModelView, 0);
drawCube(1);
//--------------------------------------
Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube2, 0);
Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
mModelView, 0);
drawCube(0);
//--------------------------------------
}
#Override
public void onFinishFrame(Viewport viewport) {
}
public void drawCube(int i1) {
// This is not the floor!
GLES20.glUniform1f(mIsFloorParam, 0f);
// Set the Model in the shader, used to calculate lighting
if (i1 == 1) {
GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelCube, 0);
} else if (i1 == 0) {
//--2014-10-16 ??--------------------------------
GLES20.glUniformMatrix4fv(mModelParam, 1, false, mModelCube2, 0);
//-------------------------------------------------
}
// Set the ModelView in the shader, used to calculate lighting
GLES20.glUniformMatrix4fv(mModelViewParam, 1, false, mModelView, 0);
// Set the position of the cube
GLES20.glVertexAttribPointer(mPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, 0, mCubeVertices);
// Set the ModelViewProjection matrix in the shader.
GLES20.glUniformMatrix4fv(mModelViewProjectionParam, 1, false, mModelViewProjection, 0);
// Set the normal positions of the cube, again for shading
GLES20.glVertexAttribPointer(mNormalParam, 3, GLES20.GL_FLOAT,
false, 0, mCubeNormals);
if (isLookingAtObject(i1)) {
GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
0, mCubeFoundColors);
if (i1 == 1)
intCurrentI1 = i1;
else
intCurrentI1 = -1;
intCurrentI = i1;
System.out.println("drawCube->intCurrentI2:" + intCurrentI);
} else {
GLES20.glVertexAttribPointer(mColorParam, 4, GLES20.GL_FLOAT, false,
0, mCubeColors);
intCurrentI = -1;
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
checkGLError("Drawing cube");
if (intCurrentI1 != -1)
intCurrentI = intCurrentI1;
System.out.println("drawCube_out_if->intCurrentI4:" + intCurrentI);
}
private boolean isLookingAtObject(int i1) {
float[] initVec = {0, 0, 0, 1.0f};
float[] objPositionVec = new float[4];
System.out.println("isLookingAtObject1->i1:" + i1);
// Convert object space to camera space. Use the headView from onNewFrame.
if (i1 == 1) {
Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube, 0);
Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0);
intCurrentI = i1;
} else if (i1 == 0) {
Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube2, 0);
Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0);
intCurrentI = i1;
}
float pitch = (float) Math.atan2(objPositionVec[1], -objPositionVec[2]);
float yaw = (float) Math.atan2(objPositionVec[0], -objPositionVec[2]);
boolean bool1 = (Math.abs(pitch) < PITCH_LIMIT) && (Math.abs(yaw) < YAW_LIMIT);
return bool1;
}
public void startCamera(int texture) {
surface = new SurfaceTexture(texture);
surface.setOnFrameAvailableListener(this);
camera = Camera.open();
try {
camera.setPreviewTexture(surface);
camera.startPreview();
} catch (IOException ioe) {
Log.w("MainActivity", "CAM LAUNCH FAILED");
}
}
static private int createTexture() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
this.cardboardView.requestRender();
}
}
Edit 11.07.16
I cut the vertex in half, to see posible vertex behind it.
But i think the problem is vertex and fragment-shader
The Cube is only showing up, when im using this vertex and fragment-shader
simple_fragment.shader
precision mediump float;
varying vec4 v_Color;
void main() {
gl_FragColor = v_Color;
}
light_vertex.shader
uniform mat4 u_MVP;
uniform mat4 u_MVMatrix;
uniform mat4 u_Model;
uniform vec3 u_LightPos;
uniform float u_IsFloor;
attribute vec4 a_Position;
attribute vec4 a_Color;
attribute vec3 a_Normal;
varying vec4 v_Color;
varying vec3 v_Grid;
varying float v_isFloor;
void main()
{
vec3 modelVertex = vec3(u_Model * a_Position);
v_Grid = modelVertex;
vec3 modelViewVertex = vec3(u_MVMatrix * a_Position);
vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));
float distance = length(u_LightPos - modelViewVertex);
vec3 lightVector = normalize(u_LightPos - modelViewVertex);
float diffuse = max(dot(modelViewNormal, lightVector), 0.5 );
diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));
v_Color = a_Color * diffuse;
gl_Position = u_MVP * a_Position;
v_isFloor = u_IsFloor;
}
and the camera is only showing up when im using this shaders
vertex.shader
attribute vec4 position;
attribute vec2 inputTextureCoordinate;
varying vec2 textureCoordinate;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate;
}
fragment.shader
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 textureCoordinate;
varying vec4 v_Color;
uniform samplerExternalOES s_texture;
void main(void) {
gl_FragColor = texture2D( s_texture, textureCoordinate );
}
i don't know how to fix the shaders
I would suggest that you disable glEnable(GL_DEPTH_TEST) to render background objects in the foreground then switch between shaders using:
GLES20.glUseProgram();
For your example this might be:
#Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
//Cube
ByteBuffer bbVertices = ByteBuffer.allocateDirect(DATA.CUBE_COORDS.length * 4);
bbVertices.order(ByteOrder.nativeOrder());
mCubeVertices = bbVertices.asFloatBuffer();
mCubeVertices.put(DATA.CUBE_COORDS);
mCubeVertices.position(0);
ByteBuffer bbColors = ByteBuffer.allocateDirect(DATA.CUBE_COLORS.length * 4);
bbColors.order(ByteOrder.nativeOrder());
mCubeColors = bbColors.asFloatBuffer();
mCubeColors.put(DATA.CUBE_COLORS);
mCubeColors.position(0);
ByteBuffer bbFoundColors = ByteBuffer.allocateDirect(DATA.CUBE_FOUND_COLORS.length * 4);
bbFoundColors.order(ByteOrder.nativeOrder());
mCubeFoundColors = bbFoundColors.asFloatBuffer();
mCubeFoundColors.put(DATA.CUBE_FOUND_COLORS);
mCubeFoundColors.position(0);
ByteBuffer bbNormals = ByteBuffer.allocateDirect(DATA.CUBE_NORMALS.length * 4);
bbNormals.order(ByteOrder.nativeOrder());
mCubeNormals = bbNormals.asFloatBuffer();
mCubeNormals.put(DATA.CUBE_NORMALS);
mCubeNormals.position(0);
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mCameraProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mCameraProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mCameraProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mCameraProgram);
vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, R.raw.light_vertex);
fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, R.raw.grid_fragment);
mCubeProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mCubeProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mCubeProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mCubeProgram);
texture = createTexture();
startCamera(texture);
Matrix.setIdentityM(mModelCube, 0);
Matrix.translateM(mModelCube, 0, 0, 0, -mObjectDistance);
checkGLError("onSurfaceCreated");
}
#Override
public void onNewFrame(HeadTransform headTransform) {
GLES20.glUseProgram(mCubeProgram);
mModelViewProjectionParam = GLES20.glGetUniformLocation(mCubeProgram, "u_MVP");
mLightPosParam = GLES20.glGetUniformLocation(mCubeProgram, "u_LightPos");
mModelViewParam = GLES20.glGetUniformLocation(mCubeProgram, "u_MVMatrix");
mModelParam = GLES20.glGetUniformLocation(mCubeProgram, "u_Model");
mIsFloorParam = GLES20.glGetUniformLocation(mCubeProgram, "u_IsFloor");
// Build the Model part of the ModelView matrix.
Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
// Build the camera matrix and apply it to the ModelView.
Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
headTransform.getHeadView(mHeadView, 0);
GLES20.glUseProgram(mCameraProgram);
float[] mtx = new float[16];
//GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surface.updateTexImage();
surface.getTransformMatrix(mtx);
}
#Override
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//Camera
GLES20.glUseProgram(mCameraProgram);
GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
mPositionHandle = GLES20.glGetAttribLocation(mCameraProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mCameraProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mCameraProgram, "s_texture");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
//cube
GLES20.glUseProgram(mCubeProgram);
mPositionParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Position");
mNormalParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Normal");
mColorParam = GLES20.glGetAttribLocation(mCubeProgram, "a_Color");
GLES20.glEnableVertexAttribArray(mPositionParam);
GLES20.glEnableVertexAttribArray(mNormalParam);
GLES20.glEnableVertexAttribArray(mColorParam);
// Set the position of the light
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mView, 0, mLightPosInWorldSpace, 0);
GLES20.glUniform3f(mLightPosParam, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1],
mLightPosInEyeSpace[2]);
Matrix.multiplyMM(mModelView, 0, mView, 0, mModelCube, 0);
Matrix.multiplyMM(mModelViewProjection, 0, transform.getPerspective(), 0,
mModelView, 0);
drawCube(1);
Matrix.multiplyMM(mView, 0, transform.getEyeView(), 0, mCamera, 0);
Just an open suggestion. I developed an AR project for a University assignment I had, a couple of months ago. In my case I used a tool called Vuforia and integrated it with Unity for it to work on mobile devices. You can get your app to work on both Android and iOS devices. The latest releases of both Unity and Vuforia both help in the development of AR projects, since it is currently at its hype.
Depending on the work you need you AR project to perform, and your experience with Unity, the learning curve increases. In my case, I augmented the construction of a roof for a neolithical site. I also used a third party software called makehuman and Blender to create a walking human being. In all of my project, I didn't need to touch a line of code at all :)
Hope this helps.
Related
I'm new to developing Android games using OpenGL ES 2.0.
I recently made a simple game where players must touch hidden numbers respectively that appeared within 3 seconds to win a level.
Each level contains numbers as many as two more than level amounts (For example, level 5 has 5 + 2 numbers).
The game works fine but not enough fine as I expected in two issues:
1 - The textures shown in my game, compared with the image shown in my Image Viewer application, are low in quality.
This is the screenshot of my game:
image
And this is the screenshot of my Image Viewer application:
image
This is my Texture Class:
package com.theNumbers.openGLES;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
public class OpenGLESTexture {
private final Context mActivityContext;
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
" v_TexCoordinate = a_TexCoordinate;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_FragColor = texture2D(u_Texture, v_TexCoordinate);" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mMVPMatrixHandle;
static final int COORDS_PER_VERTEX = 2;
private float[] spriteCoords = new float[8];
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
private final int vertexStride = COORDS_PER_VERTEX * 4;
private final float[] mTextureSize = new float[2];
public OpenGLESTexture(final Context activityContext, final int resourceId, final short width, final short height) {
mActivityContext = activityContext;
mTextureSize[0] = width;
mTextureSize[1] = height;
spriteCoords[0] = width / 2f;
spriteCoords[1] = height / 2f;
spriteCoords[2] = -width / 2f;
spriteCoords[3] = height / 2f;
spriteCoords[4] = -width / 2f;
spriteCoords[5] = -height / 2f;
spriteCoords[6] = width / 2f;
spriteCoords[7] = -height / 2f;
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(spriteCoords);
vertexBuffer.position(0);
final float[] cubeTextureCoordinateData =
{
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = OpenGLESRenderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = OpenGLESRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
mTextureDataHandle = loadTexture(mActivityContext, resourceId);
}
public void draw(float[] mvpMatrix)
{
GLES20.glUseProgram(shaderProgram);
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glUniform1i(mTextureUniformHandle, 0);
GLES20.glUniform2fv(GLES20.glGetUniformLocation(shaderProgram, "u_TextureSize"), 1, mTextureSize, 0);//
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
}
And this is my Renderer Class:
package com.theNumbers.openGLES;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import com.theNumbers.game.AssetsManager;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
public class OpenGLESRenderer implements GLSurfaceView.Renderer {
private final Context mActivityContext;
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
private boolean onCreate = true;
public static float[] mStaticMVPMatrix = new float[16];
public volatile float mAngle;
public OpenGLESRenderer(final Context context) {
mActivityContext = context;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
AssetsManager.loadAssets(mActivityContext);
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
AssetsManager.equalizeMatrices(mMVPMatrix);
if (onCreate) {
onCreate = false;
AssetsManager.load(mActivityContext);
AssetsManager.save(mActivityContext);
AssetsManager.createGUI();
AssetsManager.updateLines();
AssetsManager.mDoesFileExist = false;
}
AssetsManager.updateTime();
AssetsManager.drawGUI();
AssetsManager.drawRooms(mActivityContext);
AssetsManager.mIsTouched = false;
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
float aspectRatio = (float) width / height;
GLES20.glViewport(0, 0, width, height);
Matrix.frustumM(mProjMatrix, 0, -aspectRatio * 960, aspectRatio * 960, -960, 960, 3, 7);
AssetsManager.updateLines();
}
public static int loadShader(int type, String shaderCode) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
You can get the complete source code and the APK file here.
2 - Others can easily copy the textures I used in my game.
Just change the APK suffix to the ZIP suffix and open it to see all of my textures.
How can I solve these two issues that I explained above?
I'm using OpenGL in Android to visualize a large dataset point cloud in an array. At first, I wanted to show some point on the screen to check if the pipeline is correct. I used VBO to render it but it shows only one point in the middle of the screen. I have implemented the below code which I found in the internet here.
Here is my PointRenderer class
public class PointRenderer implements GLSurfaceView.Renderer {
private float[] mModelMatrix = new float[16];
private float[] mViewMatrix = new float[16];
private float[] mProjectionMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
float[] vertices = {
0.0f, 0.5f, 0.0f,
-100.5f, -100.5f, 0.0f,
100.5f, -100.5f, 0.0f
};
FloatBuffer vertexBuf;
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
float eyeX = 0.0f;
float eyeY = 0.0f;
float eyeZ = 0.0f;
float centerX = 0.0f;
float centerY = 0.0f;
float centerZ = -5.0f;
float upX = 0.0f;
float upY = 1.0f;
float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ " gl_PointSize = 10.0; \n"
+ "} \n";
final String fragmentShader =
"precision mediump float; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = vec4(1.0, \n"
+ " 1.0, 1.0, 1.0); \n"
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0) {
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0) {
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0) {
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0) {
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 100.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
//Push to the distance - note this will have no effect on a point size
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.multiplyMV(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMV(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
//Send the vertex
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertexBuf);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Draw the point
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, vertices.length/3);
}
}
I suspect the coordinates for two of your points are outside of the visible frustum. Try replacing the 100.5 and -100.5 with something smaller and see if that helps.
You have to Enable both vertex attribute and texture coordinate attribute
GLES20.glEnableVertexAttribArray(mVertexAttrib)
GLES20.glEnableVertexAttribArray(mTexCoordAttrib)
I have a crash in my call to glDrawArrays when trying to render a .obj loaded using a library. I dont know what is happening since it is my first time using openGL ES. My guess was that the number of triangles was wrong and after trying glDrawArrays with 10 as parameter, I realised this might not be the problem.
The code of my renderer:
private class RocketArrowRenderer implements GLSurfaceView.Renderer {
private final int mBytesPerFloat = 4;
private Context mContext;
private FloatBuffer mVertices;
private int mMVPMatrixHandle;
private int mPositionHandle;
private float[] mModelMatrix = new float[16];
private int mColorHandle;
private float[] mMVPMatrix = new float[16];
private final int mStrideBytes = 3 * mBytesPerFloat;
private final int mPositionOffset = 0;
private final int mPositionDataSize = 3;
private final int mColorOffset = 3;
private final int mColorDataSize = 4;
private float[] mViewMatrix = new float[16];
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n" // A constant representing the combined model/view/projection matrix.
+ "attribute vec4 a_Position; \n" // Per-vertex position information we will pass in.
+ "uniform vec4 a_Color; \n" // Per-vertex color information we will pass in.
+ "varying vec4 v_Color; \n" // This will be passed into the fragment shader.
+ "void main() \n" // The entry point for our vertex shader.
+ "{ \n"
+ " v_Color = a_Color; \n" // Pass the color through to the fragment shader.
// It will be interpolated across the triangle.
+ " gl_Position = u_MVPMatrix \n" // gl_Position is a special variable used to store the final position.
+ " * a_Position; \n" // Multiply the vertex by the matrix to get the final point in
+ "} \n"; // normalized screen coordinates.
final String fragmentShader =
"precision mediump float; \n" // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
+ "varying vec4 v_Color; \n" // This is the color from the vertex shader interpolated across the
// triangle per fragment.
+ "void main() \n" // The entry point for our fragment shader.
+ "{ \n"
+ " gl_FragColor = v_Color; \n" // Pass the color directly through the pipeline.
+ "} \n";
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 25.0f;
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = 0.0f;
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
private boolean mObjLoaded = false;
public RocketArrowRenderer(Context context)
{
mContext = context;
new Thread(new Runnable() {
public void run() {
Resources res = mContext.getResources();
InputStream inputStream = res.openRawResource(R.raw.falcon_heavy_obj);
Obj obj = null;
try {
obj = ObjUtils.convertToRenderable(ObjReader.read(inputStream));
} catch (IOException e) {
e.printStackTrace();
}
mVertices = ObjData.getVertices(obj);
mObjLoaded = true;
}
}).start();
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the vertex shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if(fragmentShader != null){
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
GLES20.glCompileShader(fragmentShaderHandle);
final int[] compileStatus2 = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus2, 0);
if(compileStatus2[0] == 0){
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if(fragmentShader == null){
throw new RuntimeException("Error creating fragment shader");
}
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
GLES20.glAttachShader(programHandle, vertexShaderHandle);
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
GLES20.glLinkProgram(programHandle);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
GLES20.glUseProgram(programHandle);
}
private float[] mProjectionMatrix = new float[16];
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 8.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
if(mObjLoaded){
draw();
}
}
private void draw() {
int numberOfTriangles = mVertices.position(0).remaining() / 3;
//mVertices.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, 0);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//and later, in draw
GLES20.glUniform4f(mColorHandle, 1.0f, 0.0f, 0.0f, 1.0f); //red!
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, numberOfTriangles);
}
}
You need to call GLES20.glEnableVertexAttribArray(mPositionHandle);
before using
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, 0);
I'm trying to build a 3D picture viewer as the same in google cardboard demo. It reads in an image from disk and display it a bit differently for each eye to achieve a 3D effect.
I'm in the first step of trying to get the same image to show up for both eyes. Due to other limitations, I'm using google cardboard android SDK instead of unity SDK.
I modified code based on a wonderful cardboard camera see-through project (
https://github.com/Sveder/CardboardPassthrough
How to show 2 camera preview side by side?[For cardboard apps]
), which streams camera data to cardboard.
Anyway, after modifying the code. I can't get the images to show up, though opengl and android doesn't show any error. My basic idea is follows:
use android BitmapFactory read in an image stored in res/ to a bitmap.
For each frame, use GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0) to bind that image to current opengl active texture GLES20.GL_TEXTURE0
display such image
So in function onDrawEye, I set current active texture to be GL_TEXTURE0, and then bind my image texture to GL_TEXTURE_2D.
onDrawEye Code:
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
bindBitmap2D(texture);
checkGLError("bind bit map 2d");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
}
function bindBitmap2D:
public void bindBitmap2D(int texture) {
//must be GL_TEXTUREi
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGLError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
checkGLError("glbindtexture");
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
Bitmap img = BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
//convert bitmap to texture and bind the texture to GL_TEXTURE_2D
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
}
My fragment shader:
private final String fragmentShaderCode2D =
"precision mediump float;\n" +
"varying vec2 textureCoordinate; \n" +
"uniform sampler2D s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
Below is my full code:
package com.sveder.cardboardpassthrough;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Bundle;
import android.util.Log;
import com.google.vrtoolkit.cardboard.*;
import javax.microedition.khronos.egl.EGLConfig;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class MainActivity extends CardboardActivity
implements CardboardView.StereoRenderer, OnFrameAvailableListener {
private static final String TAG = "MainActivity";
private final String vertexShaderCode =
"attribute vec4 position;" +
"attribute vec2 inputTextureCoordinate;" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{" +
"gl_Position = position;" +
"textureCoordinate = inputTextureCoordinate;" +
"}";
//TODO vec4 maybe? sampler 2d?
// pandora box
private final String fragmentShaderCode2D =
"precision mediump float;\n" +
"varying vec2 textureCoordinate; \n" +
"uniform sampler2D s_texture; \n" +
"void main(void) {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer, vertexBuffer2;
private ShortBuffer drawListBuffer, buf2;
private int mProgram;
private int mPositionHandle, mPositionHandle2;
private int mColorHandle;
private int mTextureCoordHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float squareVertices[] = { // in counterclockwise order:
-1.0f, -1.0f, // 0.left - mid
1.0f, -1.0f, // 1. right - mid
-1.0f, 1.0f, // 2. left - top
1.0f, 1.0f, // 3. right - top
};
private short drawOrder[] = {0, 2, 1, 1, 2, 3}; // order to draw vertices
static float textureVertices[] = {
0.0f, 1.0f, // A. left-bottom
1.0f, 1.0f, // B. right-bottom
0.0f, 0.0f, // C. left-top
1.0f, 0.0f // D. right-top
};
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private ByteBuffer indexBuffer; // Buffer for index-array
private int texture;
private CardboardOverlayView mOverlayView;
private CardboardView cardboardView;
private SurfaceTexture surface;
private float[] mView;
public void bindBitmap2D(int texture) {
//must be GL_TEXTUREi
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGLError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
checkGLError("glbindtexture");
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
Bitmap img = BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
//convert bitmap to texture and bind the texture to GL_TEXTURE_2D
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
}
private int createTexture2D() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
return texture[0];
}
/**
* Converts a raw text file, saved as a resource, into an OpenGL ES shader
*
* #param type The type of shader we will be creating.
* #param resId The resource ID of the raw text file about to be turned into a shader.
* #return
*/
private int loadGLShader(int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
private static void checkGLError(String func) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, func + ": glError " + error);
throw new RuntimeException(func + ": glError " + error);
}
}
/**
* Sets the view to our CardboardView and initializes the transformation matrices we will use
* to render our scene.
*
* #param savedInstanceState
*/
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.common_ui);
cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
cardboardView.setRenderer(this);
setCardboardView(cardboardView);
mView = new float[16];
mOverlayView = (CardboardOverlayView) findViewById(R.id.overlay);
mOverlayView.show3DToast("test");
}
#Override
public void onRendererShutdown() {
Log.i(TAG, "onRendererShutdown");
}
#Override
public void onSurfaceChanged(int width, int height) {
Log.i(TAG, "onSurfaceChanged");
}
/**
* Creates the buffers we use to store information about the 3D world. OpenGL doesn't use Java
* arrays, but rather needs data in a format it can understand. Hence we use ByteBuffers.
*
* #param config The EGL configuration used when creating the surface.
*/
#Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well
ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareVertices);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
// use shader to process and load images
int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode2D);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram);
Log.d(TAG, "start binding picture");
texture = createTexture2D();
System.out.println("texture id: " + texture);
bindBitmap2D(texture);
checkGLError("onsurfacecreated");
}
/**
* Prepares OpenGL ES before we draw a frame.
*
* #param headTransform The head transformation in the new frame.
*/
#Override
public void onNewFrame(HeadTransform headTransform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
bindBitmap2D(texture);
GLES20.glUseProgram(mProgram);
}
#Override
public void onFrameAvailable(SurfaceTexture arg0) {
this.cardboardView.requestRender();
}
/**
* Draws a frame for an eye. The transformation for that eye (from the camera) is passed in as
* a parameter.
*
* #param transform The transformations to apply to render this eye.
*/
#Override
public void onDrawEye(EyeTransform transform) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
bindBitmap2D(texture);
checkGLError("bind bit map 2d");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, vertexBuffer);
mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, vertexStride, textureVerticesBuffer);
mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
}
#Override
public void onFinishFrame(Viewport viewport) {
}
}
Again, I'm not seeing any errors. The screen is just black without showing anything.
I am trying to learn OpenGLES 2.0 in android in depth. I am trying to draw a Simple Point at the center of screen but somehow , the point is not showing .
public class MyRenderer implements GLSurfaceView.Renderer {
Context context;
private int mProgram;
private final float[] mViewMatrix=new float[16];
private float[] mProjectionMatrix=new float[16];
private final float[] mPointModelMatrix=new float[16];
private final float[] mMVPMatrix=new float[16];
private final float[] mPointPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};
private final float[] mPointPosInWorldSpace = new float[4];
private final float[] mPointPosInEyeSpace = new float[4];
private int pointMVPMatrixHandle;
private int pointPositionHandle;
public MyRenderer(Context context){
this.context=context;
}
public void onDrawFrame(GL10 arg0) {
// TODO Auto-generated method stub
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mPointModelMatrix, 0);
Matrix.translateM(mPointModelMatrix, 0, 0.0f, 0.0f, -3.0f);
Matrix.multiplyMV(mPointPosInWorldSpace, 0, mPointModelMatrix, 0, mPointPosInModelSpace, 0);
Matrix.multiplyMV(mPointPosInEyeSpace, 0, mViewMatrix, 0, mPointPosInWorldSpace, 0);
GLES20.glUseProgram(mProgram);
drawPoint();
}
private void drawPoint(){
pointMVPMatrixHandle=GLES20.glGetUniformLocation(mProgram, "u_MVPMatrix");
pointPositionHandle=GLES20.glGetAttribLocation(mProgram, "a_position");
GLES20.glVertexAttrib3f(pointPositionHandle, mPointPosInEyeSpace[0], mPointPosInEyeSpace[1], mPointPosInEyeSpace[2]);
GLES20.glDisableVertexAttribArray(pointPositionHandle);
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mPointModelMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the point.
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
// TODO Auto-generated method stub
GLES20.glViewport(0, 0, width, height);
final float ratio=(float)width/height;
Log.d("Ratio is", " "+ratio);
Log.d("Width is"," "+width+" and "+height);
final float left = -ratio;
final float right = ratio;
final float bottom= -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// TODO Auto-generated method stub
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
float eyeX=0.0f;
float eyeY=0.0f;
float eyeZ=-0.5f;
float centerX=0.0f;
float centerY=0.0f;
float centerZ=-5.0f;
float upX=0.0f;
float upY=1.0f;
float upZ=0.0f;
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ);
final String vertexShader=this.getVertexShader();
final String fragmentShader=this.getFragmentShader();
final int vertexShaderHandle=ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle=ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgram=ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, new String[]{"a_position"});
}
private String getVertexShader(){
final String vertexShader="uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ " gl_PointSize = 10.0; \n"
+ "} \n";
return vertexShader;
}
private String getFragmentShader(){
final String fragmentShader="precision mediump float; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = vec4(1.0, \n"
+ " 1.0, 1.0, 1.0); \n"
+ "} \n";
return fragmentShader;
}
}
I am pretty much sure that I am pointing the both eye and point in negative Z direction(further from viewer) . The point should show up as in vertex Shader, point size is 10.0 but somehow, no luck.
Note: ShaderHelper is a class with static method compileShader and createAndLinkProgram where code for compiling shaders, checking for errors are written. (No Errors in Program)
Here is a class I built based around your code for displaying the point. Since you will probably use more than one point eventually it is better to push the points into a vertex array as shown.
package point.example.point;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
public class PointRenderer implements GLSurfaceView.Renderer
{
private float[] mModelMatrix = new float[16];
private float[] mViewMatrix = new float[16];
private float[] mProjectionMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private int mMVPMatrixHandle;
private int mPositionHandle;
float[] vertices = {
0.0f,0.0f,0.0f
};
FloatBuffer vertexBuf;
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
vertexBuf = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
vertexBuf.put(vertices).position(0);
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
float eyeX=0.0f;
float eyeY=0.0f;
float eyeZ=0.0f;
float centerX=0.0f;
float centerY=0.0f;
float centerZ=-5.0f;
float upX=0.0f;
float upY=1.0f;
float upZ=0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, centerX, centerY, centerZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n"
+ "attribute vec4 a_Position; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_Position = u_MVPMatrix \n"
+ " * a_Position; \n"
+ " gl_PointSize = 10.0; \n"
+ "} \n";
final String fragmentShader =
"precision mediump float; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = vec4(1.0, \n"
+ " 1.0, 1.0, 1.0); \n"
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0)
{
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0)
{
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0)
{
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0)
{
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 100.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
Matrix.setIdentityM(mModelMatrix, 0);
//Push to the distance - note this will have no effect on a point size
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -5.0f);
Matrix.multiplyMV(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
Matrix.multiplyMV(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
//Send the vertex
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertexBuf);
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Draw the point
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
}