Related
I created an android app using Vuforia 6. in this app i have 26 different target images and 26 different 3D object (1 Object on 1 target). that works perfectly, but i need to rotate 3d object on use touch.
here is my renderer code:
public class ImageTargetRendererAndroid implements GLSurfaceView.Renderer, SampleAppRendererControl
{
private static final String LOGTAG = "ImageTargetRenderer";
private SampleApplicationSession vuforiaAppSession;
private ImageTargetsAndroid mActivity;
private SampleAppRenderer mSampleAppRenderer;
private Vector<Texture> mTextures;
private int normalHandle;
private int shaderProgramID;
private int vertexHandle;
private int textureCoordHandle;
private int mvpMatrixHandle;
private int texSampler2DHandle;
private ArrayList<AndroidModel> mModel = new ArrayList<AndroidModel>();
//private Teapot mTeapot;
private float kBuildingScale = 12.0f;
private SampleApplication3DModel mBuildingsModel;
private Renderer mRenderer;
boolean mIsActive = false;
private boolean mModelIsLoaded = false;
private static final float OBJECT_SCALE_FLOAT = 3.0f;
ArrayList<String> modelNames = new ArrayList<String>();
private Product mProduct;
private String PackagePath = "";
private String Package3DPath = "";
AndroidModel curModel;
public ImageTargetRendererAndroid(ImageTargetsAndroid activity, SampleApplicationSession session, Product product)
{
mProduct = product;
PackagePath = Globals.getPackagePath(mProduct.getName());
Package3DPath = PackagePath + Constants.SERVER_RESPONSE_TAGS_FOLDER_NAME_3D+ "/";
if(modelNames.size() <= 0){
ArrayList<String> names = Globals.getModelNames(Package3DPath);
for(String name : names){
modelNames.add(name);
}
}
// modelNames.add("A");
// modelNames.add("B");
mActivity = activity;
vuforiaAppSession = session;
// SampleAppRenderer used to encapsulate the use of RenderingPrimitives setting
// the device mode AR/VR and stereo mode
mSampleAppRenderer = new SampleAppRenderer(this, mActivity, Device.MODE.MODE_AR, false, 10f , 5000f);
}
// Called to draw the current frame.
#Override
public void onDrawFrame(GL10 gl)
{
if (!mIsActive)
return;
// Call our function to render content from SampleAppRenderer class
mSampleAppRenderer.render();
}
public void setActive(boolean active)
{
mIsActive = active;
if(mIsActive)
mSampleAppRenderer.configureVideoBackground();
}
// Called when the surface is created or recreated.
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
// Call Vuforia function to (re)initialize rendering after first use
// or after OpenGL ES context was lost (e.g. after onPause/onResume):
vuforiaAppSession.onSurfaceCreated();
mSampleAppRenderer.onSurfaceCreated();
}
// Called when the surface changed size.
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
// Call Vuforia function to handle render surface size changes:
vuforiaAppSession.onSurfaceChanged(width, height);
// RenderingPrimitives to be updated when some rendering change is done
mSampleAppRenderer.onConfigurationChanged(mIsActive);
initRendering();
}
// Function for initializing the renderer.
private void initRendering()
{
String storage = Environment.getExternalStorageDirectory() + "/";
ArrayList<String> objs = Globals.getModelObjects(Package3DPath);
// for(String obj : objs){
// //modelNames.add(Package3DPath + obj);
// mModel.add(new AndroidModel(mActivity,Package3DPath + "/" + obj));
// }
if(modelNames.size() <= 0){
ArrayList<String> names = Globals.getModelNames(Package3DPath);
for(String name : names){
modelNames.add(name);
}
}
for(String modelName : modelNames){
//modelNames.add(Package3DPath + obj);
mModel.add(new AndroidModel(mActivity,Package3DPath + modelName + "." + Constants.FILE_TYPE_OBJ,modelName));
}
// mModel.add(new AndroidModel(mActivity,storage + "A.obj"));
// mModel.add(new AndroidModel(mActivity,storage + "B.obj"));
mRenderer = Renderer.getInstance();
GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);
for (Texture t : mTextures)
{
GLES20.glGenTextures(1, t.mTextureID, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, t.mWidth, t.mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, t.mData);
}
shaderProgramID = SampleUtils.createProgramFromShaderSrc( CubeShaders.CUBE_MESH_VERTEX_SHADER, CubeShaders.CUBE_MESH_FRAGMENT_SHADER);
vertexHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexPosition");
normalHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexNormal");
textureCoordHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexTexCoord");
mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID, "modelViewProjectionMatrix");
texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID, "texSampler2D");
if(!mModelIsLoaded) {
//mTeapot = new Teapot();
// try {
// mBuildingsModel = new SampleApplication3DModel();
// mBuildingsModel.loadModel(mActivity.getResources().getAssets(), "ImageTargets/Buildings.txt");
// mModelIsLoaded = true;
// } catch (IOException e) {
// Log.e(LOGTAG, "Unable to load buildings");
// }
// Hide the Loading Dialog
mActivity.loadingDialogHandler.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}
}
public void updateConfiguration()
{
mSampleAppRenderer.onConfigurationChanged(mIsActive);
}
// The render function called from SampleAppRendering by using RenderingPrimitives views.
// The state is owned by SampleAppRenderer which is controlling it's lifecycle.
// State should not be cached outside this method.
public void renderFrame(State state, float[] projectionMatrix)
{
// Renders video background replacing Renderer.DrawVideoBackground()
mSampleAppRenderer.renderVideoBackground();
// state = mRenderer.begin();
// mRenderer.drawVideoBackground();
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// handle face culling, we need to detect if we are using reflection
// to determine the direction of the culling
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
// Did we find any trackables this frame?
int a = state.getNumTrackableResults();
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
TrackableResult result = state.getTrackableResult(tIdx);
Trackable trackable = result.getTrackable();
Matrix44F modelViewMatrix_Vuforia = Tool.convertPose2GLMatrix(result.getPose());
float[] modelViewMatrix = modelViewMatrix_Vuforia.getData();
// int textureIndex = trackable.getName().equalsIgnoreCase("stones") ? 0 : 1;
// textureIndex = trackable.getName().equalsIgnoreCase("tarmac") ? 2 : textureIndex;
// deal with the modelview and projection matrices
float[] modelViewProjection = new float[16];
String targetdata = ((String) trackable.getUserData()).replace("Current Dataset : ","");
int modelIndex = getTargetIndex(targetdata);
int textureIndex = modelIndex;
curModel = mModel.get(modelIndex);
if (!mActivity.isExtendedTrackingActive()) {
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, curModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES, curModel.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT, curModel.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else {
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
Matrix.rotateM(modelViewMatrix, 0, 90.0f, 1.0f, 0, 0);
Matrix.scaleM(modelViewMatrix, 0, kBuildingScale, kBuildingScale, kBuildingScale);
}
Matrix.multiplyMM(modelViewProjection, 0, projectionMatrix, 0, modelViewMatrix, 0);
// activate the shader program and bind the vertex/normal/tex coords
GLES20.glUseProgram(shaderProgramID);
if (!mActivity.isExtendedTrackingActive()) {
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, curModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES, curModel.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT, curModel.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else {
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
}
SampleUtils.checkGLError("Render Frame");
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mRenderer.end();
}
private void printUserData(Trackable trackable)
{
String userData = (String) trackable.getUserData();
Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
}
public void setTextures(Vector<Texture> textures)
{
mTextures = textures;
}
private int getTargetIndex(String name){
for(int i = 0 ; i < modelNames.size() ; i++){
if(modelNames.get(i).equals(name)){
return i;
}
}
return -1;
}
}
i create object 3d in android using tutorial from learnopengles, and i create cube from the lesson six of that tutorial (texture filtering), after that i want replace the cube with my object (i create the strawberry object). i want my object can display in the view, so i parsing the my object (my object use extension file .obj) to my renderer class, but the object in view is displaying random triangle object.
this is my parsing code :
public ObjLoader(Context mActivityContext) {
FileReader fr;
String str;
ArrayList<Float> tempModelVertices = new ArrayList<Float>();
ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
ArrayList<Integer> facesM = new ArrayList<Integer>();
ArrayList<Integer> facesT = new ArrayList<Integer>();
ArrayList<Integer> facesN = new ArrayList<Integer>();
try {
fr = new FileReader(new File("model/straw_obj"));
BufferedReader br = new BufferedReader(fr);
while((str = br.readLine())!=null){
if(str.startsWith("f")){
String[] strAr = str.replaceAll("f", "").trim().split(" ");
for(String s : strAr){
String[] cornerAr = s.split("/");
facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
}
}
else if(str.startsWith("vt")){
String[] strAr = str.replaceAll("vt", "").trim().split(" ");
tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
}
else if(str.startsWith("vn")){
String[] strAr = str.replaceAll("vn", "").trim().split(" ");
tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
}
else if(str.startsWith("v")){
String[] strAr = str.replaceAll("v", "").trim().split(" ");
tempModelVertices.add(Float.valueOf(strAr[0].trim()));
tempModelVertices.add(Float.valueOf(strAr[1].trim()));
tempModelVertices.add(Float.valueOf(strAr[2].trim()));
}
}
//Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
} catch (IOException e) {
// TODO Auto-generated catch block
Log.v(TAG, "error");
}
Log.v(TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));
ModelPositionData = new float[facesM.size()];
ModelTextureCoordinateData = new float[facesT.size()];
ModelNormalData = new float[facesN.size()];
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
for(int i=0; i<facesT.size(); i++){
ModelTextureCoordinateData[i] = tempTextureVertices.get(facesT.get(i));
}
for(int i=0; i<facesN.size(); i++){
ModelNormalData[i] = tempNormalVertices.get(facesN.get(i));
}
}
and this is how i create the glsurface renderer
public class TesterRenderer implements GLSurfaceView.Renderer{
private static final String TAG = "TesterRenderer";
private final Context mActivityContext;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** Store the accumulated rotation. */
private final float[] mAccumulatedRotation = new float[16];
/** Store the current rotation. */
private final float[] mCurrentRotation = new float[16];
/** A temporary matrix. */
private float[] mTemporaryMatrix = new float[16];
/**
* Stores a copy of the model matrix specifically for the light position.
*/
private float[] mLightModelMatrix = new float[16];
/** Store our model data in a float buffer. */
private final FloatBuffer mModelPositions;
private final FloatBuffer mModelNormals;
private final FloatBuffer mModelTextureCoordinates;
// private final FloatBuffer mModelTextureCoordinatesForPlane;
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;
/** This will be used to pass in the light position. */
private int mLightPosHandle;
/** This will be used to pass in the texture. */
private int mTextureUniformHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model normal information. */
private int mNormalHandle;
/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Size of the normal data in elements. */
private final int mNormalDataSize = 3;
/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;
/** Used to hold a light centered on the origin in model space. We need a 4th coordinate so we can get translations to work when
* we multiply this by our transformation matrices. */
private final float[] mLightPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};
/** Used to hold the current position of the light in world space (after transformation via model matrix). */
private final float[] mLightPosInWorldSpace = new float[4];
/** Used to hold the transformed position of the light in eye space (after transformation via modelview matrix) */
private final float[] mLightPosInEyeSpace = new float[4];
/** This is a handle to our cube shading program. */
private int mProgramHandle;
/** This is a handle to our light point program. */
private int mPointProgramHandle;
/** These are handles to our texture data. */
private int mTextureDataHandle;
// private int mGrassDataHandle;
/** Temporary place to save the min and mag filter, in case the activity was restarted. */
private int mQueuedMinFilter;
private int mQueuedMagFilter;
// These still work without volatile, but refreshes are not guaranteed to happen.
public volatile float mDeltaX;
public volatile float mDeltaY;
public TesterRenderer(final Context activityContext)
{
mActivityContext = activityContext;
ObjLoader obj = new ObjLoader(mActivityContext);
mModelPositions = ByteBuffer.allocateDirect(obj.ModelPositionData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelPositions.put(obj.ModelPositionData).position(0);
mModelNormals = ByteBuffer.allocateDirect(obj.ModelNormalData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelNormals.put(obj.ModelNormalData).position(0);
mModelTextureCoordinates = ByteBuffer.allocateDirect(obj.ModelTextureCoordinateData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelTextureCoordinates.put(obj.ModelTextureCoordinateData).position(0);
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
// Set the background clear color to black.
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
// Enable texture mapping
// GLES20.glEnable(GLES20.GL_TEXTURE_2D);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = -0.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_vertex_shader_tex_and_light);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_fragment_shader_tex_and_light);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[] {"a_Position", "a_Normal", "a_TexCoordinate"});
// Define a simple shader program for our point.
final String pointVertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_vertex_shader);
final String pointFragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_fragment_shader);
final int pointVertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, pointVertexShader);
final int pointFragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, pointFragmentShader);
mPointProgramHandle = ShaderHelper.createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle,
new String[] {"a_Position"});
// Load the texture
mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.strawberry_texture);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
// mGrassDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.noisy_grass_public_domain);
// GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
if (mQueuedMinFilter != 0)
{
setMinFilter(mQueuedMinFilter);
}
if (mQueuedMagFilter != 0)
{
setMagFilter(mQueuedMagFilter);
}
// Initialize the accumulated rotation matrix
Matrix.setIdentityM(mAccumulatedRotation, 0);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 1000.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
long slowTime = SystemClock.uptimeMillis() % 100000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
float slowAngleInDegrees = (360.0f / 100000.0f) * ((int) slowTime);
// Set our per-vertex lighting program.
GLES20.glUseProgram(mProgramHandle);
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mLightPosHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_LightPos");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
// Calculate position of the light. Rotate and then push into the distance.
Matrix.setIdentityM(mLightModelMatrix, 0);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, -2.0f);
Matrix.rotateM(mLightModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, 3.5f);
Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);
// Draw a cube.
// Translate the cube into the screen.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);
// Set a matrix that contains the current rotation.
Matrix.setIdentityM(mCurrentRotation, 0);
Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f);
mDeltaX = 0.0f;
mDeltaY = 0.0f;
// Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result.
Matrix.multiplyMM(mTemporaryMatrix, 0, mCurrentRotation, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16);
// Rotate the cube taking the overall rotation into account.
Matrix.multiplyMM(mTemporaryMatrix, 0, mModelMatrix, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mModelMatrix, 0, 16);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mModelTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mModelTextureCoordinates);
drawModel();
// Draw a plane
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -2.0f, -5.0f);
Matrix.scaleM(mModelMatrix, 0, 25.0f, 1.0f, 25.0f);
Matrix.rotateM(mModelMatrix, 0, slowAngleInDegrees, 0.0f, 1.0f, 0.0f);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
//GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
drawModel();
GLES20.glUseProgram(mPointProgramHandle);
drawLight();
}
public void setMinFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
}
else
{
mQueuedMinFilter = filter;
}
}
public void setMagFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
}
else
{
mQueuedMagFilter = filter;
}
}
private void drawModel()
{
// Pass in the position information
GLES20.glEnableVertexAttribArray(mPositionHandle);
mModelPositions.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, mModelPositions);
// Pass in the normal information
GLES20.glEnableVertexAttribArray(mNormalHandle);
mModelNormals.position(0);
GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false,
0, mModelNormals);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in the light position in eye space.
GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
}
/**
* Draws a point representing the position of the light.
*/
private void drawLight()
{
final int pointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointProgramHandle, "u_MVPMatrix");
final int pointPositionHandle = GLES20.glGetAttribLocation(mPointProgramHandle, "a_Position");
// Pass in the position.
GLES20.glVertexAttrib3f(pointPositionHandle, mLightPosInModelSpace[0], mLightPosInModelSpace[1], mLightPosInModelSpace[2]);
// Since we are not using a buffer object, disable vertex arrays for this attribute.
GLES20.glDisableVertexAttribArray(pointPositionHandle);
// Pass in the transformation matrix.
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mLightModelMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the point.
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
}
can someone help me fix this ?
It looks like there is a problem with the way you reorder the coordinates based on the indices in the faces:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
Each position consists of 3 coordinates. This loop copies only one value per position, though. It should look something like this:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[3 * i ] = tempModelVertices.get(3 * facesM.get(i) );
ModelPositionData[3 * i + 1] = tempModelVertices.get(3 * facesM.get(i) + 1);
ModelPositionData[3 * i + 2] = tempModelVertices.get(3 * facesM.get(i) + 2);
}
You will also need to adjust the allocation accordingly:
ModelPositionData = new float[3 * facesM.size()];
and make the equivalent changes for the normals and texture coordinates.
I want to rotate the object z axis so I am using the below code but its not rotating at particular position its rotation just go back and appear come near. I think there is wrong with the values in GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);. Please help me to set the correct values of these So that rotation works well.
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
private class Renderer implements GLSurfaceView.Renderer {
public Renderer() {
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setZOrderOnTop(true);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glClearColor(0.0f,0.0f,0.0f, 0.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
mViewWidth = (float)w;
mViewHeight = (float)h;
gl.glViewport(0,0,w,h);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 45, mViewWidth/mViewHeight, 0.1f, 100f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
}
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glPushMatrix();
gl.glDisable(GL10.GL_DITHER);
GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);
//draw_model
gl.glPushMatrix();
if(mOrigin != null && mRotate != null) {
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
}
if(mModel != null) {
mModel.draw(gl, mContext);
if(!RendererView.textureFileName.equals(""))
mModel.bindTextures(mContext, gl);
}
gl.glPopMatrix();
gl.glPopMatrix();
if(isPictureTake) {
w = getWidth();
h = getHeight();
b = new int[w*(y+h)];
bt = new int[w*h];
IntBuffer ib = IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(0, 0, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
createBitmapFromGLSurface(mContext);
isPictureTake = false;
}
}
}
ObjLoader.java
package com.amplimesh.models;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.StringTokenizer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
import com.amplimesh.renderer.RendererView;
import com.amplimesh.util.Point3;
/**
* Object Loader and draw the texture and object.
* #author Ajay
*/
public class ObjModel {
/**
* It fill the texture into the mesh
* #param context
* #param gl
*/
public void bindTextures(Context context, GL10 gl) {
Bitmap bitmap;
try {
InputStream is = context.getAssets().open("textures/"+RendererView.textureFileName);
bitmap = BitmapFactory.decodeStream(is);
if(bitmap != null) {
// generate one texture pointer
gl.glGenTextures(1, mTextures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
//gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
//gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
// Clean up
bitmap.recycle();
}
} catch (java.io.IOException e) {
return;
}
}
/**
* It draw the object.
* #param gl
*/
public void draw(GL10 gl, Context mContext) {
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glEnableClientState(GL10.GL_NORMAL_ARRAY);
for (Model model : mModels) {
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, model.v);
if (model.vt != null && mTextures != null) {
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextures[0]);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, model.vt);
}
if (model.vn != null) {
gl.glNormalPointer(GL10.GL_FLOAT, 0, model.vn);
}
gl.glDrawArrays(GL10.GL_TRIANGLES, 0, model.v_size);
}
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
/**
* It Load the object from stream.
* #param is
* #param texture_name
* #return
* #throws IOException
*/
public static ObjModel loadFromStream(InputStream is, String texture_name) throws IOException {
ObjModel obj = ObjLoader.loadFromStream(is);
return obj;
}
private Model mModels[];
private int mTextures[] = new int[1];;
/**
* It read the the obj file.
* #author Ajay
*/
private static class ObjLoader {
public static ObjModel loadFromStream(InputStream is) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
ObjModel obj = new ObjModel();
ArrayList<Point3> v = new ArrayList<Point3>();
ArrayList<Point3> vt = new ArrayList<Point3>();
ArrayList<Point3> vn = new ArrayList<Point3>();
ArrayList<Face> f = new ArrayList<Face>();
ArrayList<Model> o = new ArrayList<Model>();
boolean o_pending=false;
while(reader.ready()) {
String line = reader.readLine();
if (line == null)
break;
StringTokenizer tok = new StringTokenizer(line);
String cmd = tok.nextToken();
if (cmd.equals("o")) {
if (o_pending) {
Model model = new Model();
model.fill(f, vt.size() > 0, vn.size() > 0);
o.add(model);
}
else {
o_pending=true;
}
}
else
if (cmd.equals("v")) {
v.add(read_point(tok));
}
else
if (cmd.equals("vn")) {
vn.add(read_point(tok));
}
else
if (cmd.equals("vt")) {
vt.add(read_point(tok));
}
else
if (cmd.equals("f")) {
if (tok.countTokens() != 3)
continue;
Face face = new Face(3);
while (tok.hasMoreTokens()) {
StringTokenizer face_tok = new StringTokenizer(tok.nextToken(), "/");
int v_idx = -1;
int vt_idx = -1;
int vn_idx = -1;
v_idx = Integer.parseInt(face_tok.nextToken());
if (face_tok.hasMoreTokens()) vt_idx = Integer.parseInt(face_tok.nextToken());
if (face_tok.hasMoreTokens()) vn_idx = Integer.parseInt(face_tok.nextToken());
//Log.v("objmodel", "face: "+v_idx+"/"+vt_idx+"/"+vn_idx);
face.addVertex(
v.get(v_idx-1),
vt_idx == -1 ? null : vt.get(vt_idx-1),
vn_idx == -1 ? null : vn.get(vn_idx-1)
);
}
f.add(face);
}
}
if (o_pending) {
Model model = new Model();
model.fill(f, vt.size() > 0, vn.size() > 0);
o.add(model);
}
obj.mModels = new Model[o.size()];
o.toArray(obj.mModels);
return obj;
}
private static Point3 read_point(StringTokenizer tok) {
Point3 ret = new Point3();
if (tok.hasMoreTokens()) {
ret.x = Float.parseFloat(tok.nextToken());
if (tok.hasMoreTokens()) {
ret.y = Float.parseFloat(tok.nextToken());
if (tok.hasMoreTokens()) {
ret.z = Float.parseFloat(tok.nextToken());
}
}
}
return ret;
}
}
private static class Face {
Point3 v[];
Point3 vt[];
Point3 vn[];
int size;
int count;
public Face(int size) {
this.size = size;
this.count = 0;
this.v = new Point3[size];
this.vt = new Point3[size];
this.vn = new Point3[size];
}
public boolean addVertex(Point3 v, Point3 vt, Point3 vn) {
if (count >= size)
return false;
this.v[count] = v;
this.vt[count] = vt;
this.vn[count] = vn;
count++;
return true;
}
public void pushOnto(FloatBuffer v_buffer, FloatBuffer vt_buffer, FloatBuffer vn_buffer) {
int i;
for (i=0; i<size; i++) {
v_buffer.put(v[i].x); v_buffer.put(v[i].y); v_buffer.put(v[i].z);
if (vt_buffer != null && vt[i] != null) {
vt_buffer.put(vt[i].x); vt_buffer.put(vt[i].y);
}
if (vn_buffer != null && vn[i] != null) {
vn_buffer.put(vn[i].x); vn_buffer.put(vn[i].y); vn_buffer.put(vn[i].z);
}
}
}
}
/**
* It hold the vertex buffer, vertex normal and texture.
* #author Ajay
*/
private static class Model {
public FloatBuffer v;
public FloatBuffer vt;
public FloatBuffer vn;
public int v_size;
public void fill(ArrayList<Face> faces, boolean has_tex, boolean has_normals) {
int f_len = faces.size();
this.v_size = f_len * 3;
ByteBuffer tBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
tBuf.order(ByteOrder.nativeOrder());
this.v = tBuf.asFloatBuffer();
if (has_tex) {
ByteBuffer vtBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
vtBuf.order(ByteOrder.nativeOrder());
this.vt = vtBuf.asFloatBuffer();
}
if (has_normals) {
ByteBuffer vnBuf = ByteBuffer.allocateDirect(this.v_size*3 * 4);
vnBuf.order(ByteOrder.nativeOrder());
this.vn = vnBuf.asFloatBuffer();
}
int i;
for (i=0; i < f_len; i++) {
Face face = faces.get(i);
face.pushOnto(this.v, this.vt, this.vn);
}
this.v.rewind();
if (this.vt != null)
this.vt.rewind();
if (this.vn != null)
this.vn.rewind();
}
}
}
To rotate an object at a specific position around an axis, you first need to translate the object's center to the origin. In your example, you have an object at (mOrigin) which may be relative to another translation (your camera's position).
Instead of translating your object to its final position and then rotating, you need to translate it to (0,0,0), rotate and then translate to the final position.
In the simplest case this would be:
gl.glRotatef (mRotate.x, 1f, 0f, 0f);
gl.glRotatef (mRotate.y, 0f, 1f, 0f);
gl.glRotatef (mRotate.z, 0f, 0f, 1f);
gl.glTranslatef (mOrigin.x, mOrigin.y, mOrigin.z);
In the more complicated case, where your object is relative to the camera, you would have to do something like this:
gl.glTranslatef (-Camera.x, -Camera.y, -Camera.z);
gl.glRotatef (mRotate.x, 1f, 0f, 0f);
gl.glRotatef (mRotate.y, 0f, 1f, 0f);
gl.glRotatef (mRotate.z, 0f, 0f, 1f);
gl.glTranslatef (Camera.x + mOrigin.x, Camera.y + mOrigin.y, Camera.z + mOrigin.z);
The rotation component of the Glulookat seems fine so I dont see any problem there that may cause issues.
First, from the link you provided, it seems to me that you want to rotate about the Y axis although you mentioned the rotation about the Z axis this can give different results. https://www.dropbox.com/s/ozt7beo4gz5q293/demo2__A.avi
Second, what are the values you are using within the m_Rotate vector? are they in degrees or radians?
third, are you pushing or popping the matrix stack? if so how? or are you feeding your own matrix data into the OpenGL?
also have you made sure to call glLoadIdentity() to set the view matrix back to identity matrix during your draw calls?
TBH there can be many areas that may cause issue. To narrow down the possible problem area, do the followings in your draw function;
stop using GluLookAt.
remove all push and pop calls,
firstly make sure to call
glLoadIdentity();
translate your camera behind the object (in your GLuLookAt this is set to 0,0,10).
glTranslatef(0.0f,0.0f,-10.0f);
then do simple rotation around the Z axis by;
glRotatef(mRotate.z, 0.0f, 0.0f, 1.0f);
EDIT: you call onDrawFrame to draw things on the scene as far as I can tell. . and in this call where you need to do these changes to figure out where your problem is.
instead of this;
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glPushMatrix();
gl.glDisable(GL10.GL_DITHER);
GLU.gluLookAt(gl, 0, 0, 10, 0, 0, 0, 0, 1, 0);
//draw_model
gl.glPushMatrix();
if(mOrigin != null && mRotate != null) {
gl.glTranslatef(mOrigin.x, mOrigin.y, mOrigin.z);
gl.glRotatef(mRotate.x, 1f, 0f, 0f);
gl.glRotatef(mRotate.y, 0f, 1f, 0f);
gl.glRotatef(mRotate.z, 0f, 0f, 1f);
}
...
gl.glPopMatrix();
gl.glPopMatrix();
}
try something simpler like this.
public void onDrawFrame(GL10 gl) {
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glDisable(GL10.GL_DITHER);
gl.glMatrixMode(GL10.GL_MODELVIEW); //making sure OpenGL currently in model view
gl.glLoadIdentity(); //clear the model view matrix to identity matrix
gl.glTranslatef(0.0f, 0.0f, -10.0f); //move 10 unit backwards so as if camera moves backwards.
gl.glRotatef(90.0f,0.0f,0.0f,1.0f) //rotate 90 degree around the z axis.
//draw_model
...
//gl.glPopMatrix();
//gl.glPopMatrix();
}
by doing so your possible problematic area reduced alot. and you can start figuring out the problem. if the above code works, then possible errors reduced alot.
it could be that;
you setting the matrix mode other then model view somewhere else and not setting it back in your draw call,
you are not clearing the identity matrix in your draw call.
your origin and rotation values are not correctly done.
BTW: Im not an expert in java but in c++ and objective c, we can not do 1f to set a value to float. we must use 1.0f otherwise compiler will complain.
I've a problem with convertion of camera preview in Android from YUV format to RGB. The purpose of conversion is to apply some effects. I try to convert by fragment shader because convertion by native code is slow (about 14fps). The reference which I've used is http://jyrom.tistory.com/m/post/view/id/187. I try to port this code to Android platform, but the result is black-green rectangles. But, I can watch some form through the output which I get. Could you please try to help me to resolve this issue. I believe this is popular problem: apply effects to camera preview. I also give a link to my project for testing: https://dl.dropbox.com/u/12829395/application/FilterGL/FilterGL.zip.
Thank you.
UPDATED:
This is my onPreviewFrame method:
public void onPreviewFrame(byte[] data, Camera camera) {
yBuffer.put(data);
yBuffer.position(0);
System.arraycopy(data, U_INDEX, uData, 0, LENGTH_4 * 2);
uBuffer.put(uData);
uBuffer.position(0);
System.arraycopy(data, V_INDEX, vData, 0, LENGTH_4);
vBuffer.put(vData);
vBuffer.position(0);
}
This is how I bind byte arrays to OpenGL texture in onDrawFrame method:
GLES20.glUniform1i(yTexture, 1);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
320, 240, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glUniform1i(uTexture, 2);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
160, 120, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, uBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glUniform1i(vTexture, 3);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
160, 120, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, vBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
And this is my fragment shader code:
#ifdef GL_ES
precision highp float;
#endif
varying vec2 v_texCoord;
uniform sampler2D y_texture;
uniform sampler2D u_texture;
uniform sampler2D v_texture;
void main()
{
float nx,ny,r,g,b,y,u,v;
nx=v_texCoord.x;
ny=v_texCoord.y;
y=texture2D(y_texture,v_texCoord).r;
u=texture2D(u_texture,v_texCoord).r;
v=texture2D(v_texture,v_texCoord).r;
y=1.1643*(y-0.0625);
u=u-0.5;
v=v-0.5;
r=y+1.5958*v;
g=y-0.39173*u-0.81290*v;
b=y+2.017*u;
gl_FragColor = vec4(r,g,b,1.0);
}
I don't know if you solved your problem.
I used your code and I solved in this mode.
public class MyRenderer implements Renderer{
public static final int recWidth = Costanti.recWidth;
public static final int recHeight = Costanti.recHeight;
private static final int U_INDEX = recWidth*recHeight;
private static final int V_INDEX = recWidth*recHeight*5/4;
private static final int LENGTH = recWidth*recHeight;
private static final int LENGTH_4 = recWidth*recHeight/4;
private int previewFrameWidth = 256;
private int previewFrameHeight = 256;
private int[] yTextureNames;
private int[] uTextureNames;
private int[] vTextureNames;
private MainActivity activity;
private FloatBuffer mVertices;
private ShortBuffer mIndices;
private int mProgramObject;
private int mPositionLoc;
private int mTexCoordLoc;
private int yTexture;
private int uTexture;
private int vTexture;
private final float[] mVerticesData = { -1.f, 1.f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.f, -1.f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
1.f, -1.f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
1.f, 1.f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
private final short[] mIndicesData = { 0, 1, 2, 0, 2, 3 };
private ByteBuffer yBuffer;
private ByteBuffer uBuffer;
private ByteBuffer vBuffer;
private IntBuffer frameBuffer;
private IntBuffer renderBuffer;
private IntBuffer parameterBufferWidth;
private IntBuffer parameterBufferHeigth;
byte[] ydata = new byte[LENGTH];
byte[] uData = new byte[LENGTH_4];
byte[] vData = new byte[LENGTH_4];
public MyRenderer(MainActivity activity) {
this.activity = activity;
mVertices = ByteBuffer.allocateDirect(mVerticesData.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mVertices.put(mVerticesData).position(0);
mIndices = ByteBuffer.allocateDirect(mIndicesData.length * 2)
.order(ByteOrder.nativeOrder()).asShortBuffer();
mIndices.put(mIndicesData).position(0);
yBuffer = MyGraphUtils.makeByteBuffer(LENGTH);
uBuffer = MyGraphUtils.makeByteBuffer(LENGTH_4/* * 2*/);
vBuffer = MyGraphUtils.makeByteBuffer(LENGTH_4);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glActiveTexture(GLES20.GL_ACTIVE_TEXTURE);
GLES20.glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.d("debug", "on surface created");
// Define a simple shader program for our point.
final String vShaderStr = readTextFileFromRawResource(activity, R.raw.v_simple);
final String fShaderStr = readTextFileFromRawResource(activity, R.raw.f_convert);
frameBuffer = IntBuffer.allocate(1);
renderBuffer= IntBuffer.allocate(1);
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
GLES20.glGenFramebuffers(1, frameBuffer);
GLES20.glGenRenderbuffers(1, renderBuffer);
GLES20.glActiveTexture(GLES20.GL_ACTIVE_TEXTURE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.get(0));
GLES20.glClear(0);
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderBuffer.get(0));
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16,
320, 240);
parameterBufferHeigth = IntBuffer.allocate(1);
parameterBufferWidth = IntBuffer.allocate(1);
GLES20.glGetRenderbufferParameteriv(GLES20.GL_RENDERBUFFER, GLES20.GL_RENDERBUFFER_WIDTH, parameterBufferWidth);
GLES20.glGetRenderbufferParameteriv(GLES20.GL_RENDERBUFFER, GLES20.GL_RENDERBUFFER_HEIGHT, parameterBufferHeigth);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_RENDERBUFFER, renderBuffer.get(0));
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)!=GLES20.GL_FRAMEBUFFER_COMPLETE){
Log.d("debug", "gl frame buffer status != frame buffer complete");
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glClear(0);
mProgramObject = loadProgram(vShaderStr, fShaderStr);
// Get the attribute locations
mPositionLoc = GLES20.glGetAttribLocation(mProgramObject, "a_position");
mTexCoordLoc = GLES20.glGetAttribLocation(mProgramObject, "a_texCoord");
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
yTexture = GLES20.glGetUniformLocation(mProgramObject, "y_texture");
yTextureNames = new int[1];
GLES20.glGenTextures(1, yTextureNames, 0);
int yTextureName = yTextureNames[0];
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
uTexture = GLES20.glGetUniformLocation(mProgramObject, "u_texture");
uTextureNames = new int[1];
GLES20.glGenTextures(1, uTextureNames, 0);
int uTextureName = uTextureNames[0];
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
vTexture = GLES20.glGetUniformLocation(mProgramObject, "v_texture");
vTextureNames = new int[1];
GLES20.glGenTextures(1, vTextureNames, 0);
int vTextureName = vTextureNames[0];
GLES20.glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
}
#Override
public final void onDrawFrame(GL10 gl) {
Log.d("debug", "on Draw frame");
// Clear the color buffer
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Use the program object
GLES20.glUseProgram(mProgramObject);
// Load the vertex position
mVertices.position(0);
GLES20.glVertexAttribPointer(mPositionLoc, 3, GLES20.GL_FLOAT, false, 5*4, mVertices);
// Load the texture coordinate
mVertices.position(3);
GLES20.glVertexAttribPointer(mTexCoordLoc, 2, GLES20.GL_FLOAT, false, 5*4, mVertices);
GLES20.glEnableVertexAttribArray(mPositionLoc);
GLES20.glEnableVertexAttribArray(mTexCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTextureNames[0]);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
320, 240, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTextureNames[0]);
GLES20.glUniform1i(yTexture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTextureNames[0]);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
160, 120, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, uBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1+2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTextureNames[0]);
GLES20.glUniform1i(uTexture, 2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTextureNames[0]);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
160, 120, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, vBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1+1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTextureNames[0]);
GLES20.glUniform1i(vTexture, 1);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, mIndices);
}
public void setPreviewFrameSize(int realWidth, int realHeight) {
previewFrameHeight = realHeight;
previewFrameWidth = realWidth;
}
public static String readTextFileFromRawResource(final Context context, final int resourceId) {
final InputStream inputStream = context.getResources().openRawResource(resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
final BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String nextLine;
final StringBuilder body = new StringBuilder();
try {
while ((nextLine = bufferedReader.readLine()) != null) {
body.append(nextLine);
body.append('\n');
}
} catch (IOException e) {
return null;
}
return body.toString();
}
public static int loadShader(int type, String shaderSrc) {
int shader;
int[] compiled = new int[1];
// Create the shader object
shader = GLES20.glCreateShader(type);
if (shader == 0) {
return 0;
}
// Load the shader source
GLES20.glShaderSource(shader, shaderSrc);
// Compile the shader
GLES20.glCompileShader(shader);
// Check the compile status
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("ESShader", GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
return 0;
}
return shader;
}
public static int loadProgram(String vertShaderSrc, String fragShaderSrc) {
int vertexShader;
int fragmentShader;
int programObject;
int[] linked = new int[1];
// Load the vertex/fragment shaders
vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertShaderSrc);
if (vertexShader == 0) {
return 0;
}
fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragShaderSrc);
if (fragmentShader == 0) {
GLES20.glDeleteShader(vertexShader);
return 0;
}
// Create the program object
programObject = GLES20.glCreateProgram();
if (programObject == 0) {
return 0;
}
GLES20.glAttachShader(programObject, vertexShader);
GLES20.glAttachShader(programObject, fragmentShader);
// Link the program
GLES20.glLinkProgram(programObject);
// Check the link status
GLES20.glGetProgramiv(programObject, GLES20.GL_LINK_STATUS, linked, 0);
if (linked[0] == 0) {
Log.e("ESShader", "Error linking program:");
Log.e("ESShader", GLES20.glGetProgramInfoLog(programObject));
GLES20.glDeleteProgram(programObject);
return 0;
}
// Free up no longer needed shader resources
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
return programObject;
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
System.arraycopy(data, 0, ydata, 0, LENGTH);
yBuffer.put(ydata);
yBuffer.position(0);
System.arraycopy(data, U_INDEX, uData, 0, LENGTH_4);
uBuffer.put(uData);
uBuffer.position(0);
System.arraycopy(data, V_INDEX, vData, 0, LENGTH_4);
vBuffer.put(vData);
vBuffer.position(0);
}
}
Not sure if you have already fixed this problem.My answer
By default Camera output is NV12, but in fragment shader YUV to RGB you are using YV12 -> RGB.
You will have to do
setPreviewFormat(ImageFormat.YV12);, or may be use some other shader
There are 3 textures , make sure you do
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, muTextureName)
before call to any glTexImage2D. and glTexSubImage2D
You can also use glTexSubImage2D with every frame and glTexImage2D once.
size of U and V is same , atleast for YV12,
System.arraycopy(data, U_INDEX, uData, 0, LENGTH_4 * 2);
should be
System.arraycopy(data, U_INDEX, uData, 0, LENGTH_4);
change the size accordingly in the code.
For the fastest and most optimized way, just use the common GL Extention
//Fragment Shader
#extension GL_OES_EGL_image_external : require
uniform samplerExternalOES u_Texture;
Than in Java
surfaceTexture = new SurfaceTexture(textureIDs[0]);
try {
someCamera.setPreviewTexture(surfaceTexture);
} catch (IOException t) {
Log.e(TAG, "Cannot set preview texture target!");
}
someCamera.startPreview();
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
In Java GL Thread
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureIDs[0]);
GLES20.glUniform1i(uTextureHandle, 0);
The color conversion is already done for you.
You can do what ever you want right in the Fragment shader.
Hope that saves you some time in your research.
Finally made your project display camera previews.
I found 2 problems:
1. Before binding and changing surface characteristics you have to call GLES20.glActiveTexture(GLES20.surfacenumber);
2. More important and hidden problem is that
GLES20.glTexImage2D() does not work with width and height, which are not power of 2 numbers.
After loading texture with size, for example, 1024X1024, you should call GLES20.glTexSubImage2D()
Good luck!
I applied the solution form How to render Android's YUV-NV21 camera image on the background in libgdx with OpenGLES 2.0 in real-time? to the project shared in the question and got a working project. If you are like me searching for tutorial code that does YUV to RGB conversion by fragment shader, you can simply do the following steps to get a working example.
Download the project https://dl.dropbox.com/u/12829395/application/FilterGL/FilterGL.zip and unzip.
Replace file GLRenderer.java and res/raw/f_convert.glsl entirely by the code shared below.
Open the project in Eclipse, or import the project to Android Studio.
The main issues of the code in the question are:
without GLES20.glActiveTexture(GLES20.GL_TEXTURE1);, yBuffer is not passed to GL.
the YUV data takes YUV-NV21 format, and u_texture and v_texture wasn't passed and handled correctly in the shader. Refer to this post for more information.
Now the corrected code: please replace GLRenderer.java with
package com.filtergl.shader;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import android.util.Log;
public class GLRenderer
implements Renderer, PreviewCallback {
private static final int LENGTH = 76800;
private static final int LENGTH_2 = 38400;
private ActivityFilterGL activity;
private FloatBuffer mVertices;
private ShortBuffer mIndices;
private int previewFrameWidth = 256;
private int previewFrameHeight = 256;
private int mProgramObject;
private int mPositionLoc;
private int mTexCoordLoc;
// private int mSamplerLoc;
private int yTexture;
private int uTexture;
private int vTexture;
private final float[] mVerticesData = { -1.f, 1.f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.f, -1.f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
1.f, -1.f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
1.f, 1.f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
private final short[] mIndicesData = { 0, 1, 2, 0, 2, 3 };
private ByteBuffer frameData = null;
private ByteBuffer yBuffer;
private ByteBuffer uBuffer;
public GLRenderer(ActivityFilterGL activity) {
this.activity = activity;
mVertices = ByteBuffer.allocateDirect(mVerticesData.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mVertices.put(mVerticesData).position(0);
mIndices = ByteBuffer.allocateDirect(mIndicesData.length * 2)
.order(ByteOrder.nativeOrder()).asShortBuffer();
mIndices.put(mIndicesData).position(0);
yBuffer = GraphicsUtil.makeByteBuffer(LENGTH);
uBuffer = GraphicsUtil.makeByteBuffer(LENGTH_2);
}
#Override
public final void onDrawFrame(GL10 gl) {
// Clear the color buffer
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Use the program object
GLES20.glUseProgram(mProgramObject);
// Load the vertex position
mVertices.position(0);
GLES20.glVertexAttribPointer(mPositionLoc, 3, GLES20.GL_FLOAT, false, 5 * 4, mVertices);
// Load the texture coordinate
mVertices.position(3);
GLES20.glVertexAttribPointer(mTexCoordLoc, 2, GLES20.GL_FLOAT, false, 5 * 4, mVertices);
GLES20.glEnableVertexAttribArray(mPositionLoc);
GLES20.glEnableVertexAttribArray(mTexCoordLoc);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glUniform1i(yTexture, 1);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
320, 240, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glUniform1i(uTexture, 2);
GLES20.glTexImage2D( GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA,
160, 120, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, uBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, mIndices);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Define a simple shader program for our point.
final String vShaderStr = readTextFileFromRawResource(activity, R.raw.v_simple);
final String fShaderStr = readTextFileFromRawResource(activity, R.raw.f_convert);
// Load the shaders and get a linked program object
mProgramObject = loadProgram(vShaderStr, fShaderStr);
// Get the attribute locations
mPositionLoc = GLES20.glGetAttribLocation(mProgramObject, "a_position");
mTexCoordLoc = GLES20.glGetAttribLocation(mProgramObject, "a_texCoord");
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
yTexture = GLES20.glGetUniformLocation(mProgramObject, "y_texture");
int[] yTextureNames = new int[1];
GLES20.glGenTextures(1, yTextureNames, 0);
int yTextureName = yTextureNames[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTextureName);
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
uTexture = GLES20.glGetUniformLocation(mProgramObject, "u_texture");
int[] uTextureNames = new int[1];
GLES20.glGenTextures(1, uTextureNames, 0);
int uTextureName = uTextureNames[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTextureName);
// Set the background clear color to black.
GLES20.glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
}
public void setPreviewFrameSize(int realWidth, int realHeight) {
previewFrameHeight = realHeight;
previewFrameWidth = realWidth;
// frameData = GraphicsUtil.makeByteBuffer(previewFrameHeight * previewFrameWidth * 3);
}
public static String readTextFileFromRawResource(final Context context, final int resourceId) {
final InputStream inputStream = context.getResources().openRawResource(resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
final BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String nextLine;
final StringBuilder body = new StringBuilder();
try {
while ((nextLine = bufferedReader.readLine()) != null) {
body.append(nextLine);
body.append('\n');
}
} catch (IOException e) {
return null;
}
return body.toString();
}
public static int loadShader(int type, String shaderSrc) {
int shader;
int[] compiled = new int[1];
// Create the shader object
shader = GLES20.glCreateShader(type);
if (shader == 0) {
return 0;
}
// Load the shader source
GLES20.glShaderSource(shader, shaderSrc);
// Compile the shader
GLES20.glCompileShader(shader);
// Check the compile status
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("ESShader", GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
return 0;
}
return shader;
}
public static int loadProgram(String vertShaderSrc, String fragShaderSrc) {
int vertexShader;
int fragmentShader;
int programObject;
int[] linked = new int[1];
// Load the vertex/fragment shaders
vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertShaderSrc);
if (vertexShader == 0) {
return 0;
}
fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragShaderSrc);
if (fragmentShader == 0) {
GLES20.glDeleteShader(vertexShader);
return 0;
}
// Create the program object
programObject = GLES20.glCreateProgram();
if (programObject == 0) {
return 0;
}
GLES20.glAttachShader(programObject, vertexShader);
GLES20.glAttachShader(programObject, fragmentShader);
// Link the program
GLES20.glLinkProgram(programObject);
// Check the link status
GLES20.glGetProgramiv(programObject, GLES20.GL_LINK_STATUS, linked, 0);
if (linked[0] == 0) {
Log.e("ESShader", "Error linking program:");
Log.e("ESShader", GLES20.glGetProgramInfoLog(programObject));
GLES20.glDeleteProgram(programObject);
return 0;
}
// Free up no longer needed shader resources
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
return programObject;
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
yBuffer.put(data, 0, LENGTH);
yBuffer.position(0);
uBuffer.put(data, LENGTH, LENGTH/2);
uBuffer.position(0);
}
}
and replace f_convert.glsl with
#ifdef GL_ES
precision highp float;
#endif
varying vec2 v_texCoord;
uniform sampler2D y_texture;
uniform sampler2D u_texture;
void main()
{
float r, g, b, y, u, v;
//We had put the Y values of each pixel to the R,G,B components by
//GL_LUMINANCE, that's why we're pulling it from the R component,
//we could also use G or B
y = texture2D(y_texture, v_texCoord).r;
//We had put the U and V values of each pixel to the A and R,G,B
//components of the texture respectively using GL_LUMINANCE_ALPHA.
//Since U,V bytes are interspread in the texture, this is probably
//the fastest way to use them in the shader
u = texture2D(u_texture, v_texCoord).a - 0.5;
v = texture2D(u_texture, v_texCoord).r - 0.5;
//The numbers are just YUV to RGB conversion constants
r = y + 1.13983*v;
g = y - 0.39465*u - 0.58060*v;
b = y + 2.03211*u;
gl_FragColor = vec4(r,g,b,1.0);
}
I have checked the textures individually, they work fine.
Each of the textures are 128*128 pixels, I am working on Motorola MILESTONE mobile,
and have tested all basic examples (from line to cubemap), got stuck while multitexturing
rectangle.
The shader works well if I do not bind any one of the textures, but the problem starts with two textures for multitexturing.
private float[] myRotateMatrix = new float[16];
private float[] myViewMatrix = new float[16];
private float[] myProjectionMatrix = new float[16];
private float[] myMVPMatrix = new float[16];
private int aPositionLocation;
private int uMVPLocation;
private int aTextureCoordLocation;
private FloatBuffer rectangleVFB;
private ShortBuffer rectangleISB;
private FloatBuffer textureCFB;
private int program;
private int textureId1;
private int textureId2;
private int uSampler1Location;
private int uSampler2Location;
private void initShapes() {
float[] rectangleVFA = {-1,-1,0, 1,-1,0, 1,1,0, -1,1,0};
short[] rectangleISA = {0,1,2, 0,3,2};
float[] textureCFA = {0,0, 1,0, 1,1, 0,1};
ByteBuffer rectangleVBB = ByteBuffer.allocateDirect(rectangleVFA.length * 4);
rectangleVBB.order(ByteOrder.nativeOrder());
rectangleVFB = rectangleVBB.asFloatBuffer();
rectangleVFB.put(rectangleVFA);
rectangleVFB.position(0);
ByteBuffer rectangleIBB = ByteBuffer.allocateDirect(rectangleISA.length * 2);
rectangleIBB.order(ByteOrder.nativeOrder());
rectangleISB = rectangleIBB.asShortBuffer();
rectangleISB.put(rectangleISA);
rectangleISB.position(0);
ByteBuffer textureCBB = ByteBuffer.allocateDirect(textureCFA.length * 4);
textureCBB.order(ByteOrder.nativeOrder());
textureCFB = textureCBB.asFloatBuffer();
textureCFB.put(textureCFA);
textureCFB.position(0);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
checkError("glViewport");
float ratio = (float) width / height;
Matrix.setLookAtM(myViewMatrix, 0, 0, 0, 6, 0, 0, 0, 0, 1, 0);
Matrix.frustumM(myProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
aPositionLocation = GLES20.glGetAttribLocation(program, "aPosition");
checkError("glGetAttribLocation");
uMVPLocation = GLES20.glGetUniformLocation(program, "uMVP");
checkError("glGetUniformLocation");
aTextureCoordLocation = GLES20.glGetAttribLocation(program, "aTextureCoord");
checkError("glGetAttribLocation");
uSampler1Location = GLES20.glGetUniformLocation(program, "uSampler1");
checkError("glGetUniformLocation");
uSampler2Location = GLES20.glGetUniformLocation(program, "uSampler2");
checkError("glGetUniformLocation");
int[] textures = new int[2];
GLES20.glGenTextures(2, textures, 0);
checkError("glGenTextures");
textureId1 = textures[0];
textureId2 = textures[1];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId1);
checkError("glBindTexture");
InputStream is1 = context.getResources().openRawResource(R.drawable.brick1);
Bitmap img1;
try {
img1 = BitmapFactory.decodeStream(is1);
}finally {
try {
is1.close();
}catch (IOException ioe) {
}
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img1, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId2);
checkError("glBindTexture");
InputStream is2 = context.getResources().openRawResource(R.drawable.brick2);
Bitmap img2;
try {
img2 = BitmapFactory.decodeStream(is2);
}finally {
try {
is2.close();
}catch (IOException ioe) {
}
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img2, 0);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
checkError("glClear");
GLES20.glUseProgram(program);
checkError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId1);
checkError("glBindTexture");
GLES20.glUniform1i(uSampler1Location, 0);
checkError("glUniform1i");
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
checkError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId2);
checkError("glBindTexture");
GLES20.glUniform1i(uSampler2Location, 1);
checkError("glUniform1i");
Matrix.setIdentityM(myRotateMatrix, 0);
Matrix.rotateM(myRotateMatrix, 0, touchX, 0, 1, 0);
Matrix.rotateM(myRotateMatrix, 0, touchY, 1, 0, 0);
Matrix.multiplyMM(myMVPMatrix, 0, myViewMatrix, 0, myRotateMatrix, 0);
Matrix.multiplyMM(myMVPMatrix, 0, myProjectionMatrix, 0, myMVPMatrix, 0);
GLES20.glVertexAttribPointer(aPositionLocation, 3, GLES20.GL_FLOAT, false, 12, rectangleVFB);
checkError("glVertexAttribPointer");
GLES20.glEnableVertexAttribArray(aPositionLocation);
checkError("glEnableVertexAttribArray");
GLES20.glVertexAttribPointer(aTextureCoordLocation, 2, GLES20.GL_FLOAT, false, 8, textureCFB);
checkError("glVertexAttribPointer");
GLES20.glEnableVertexAttribArray(aTextureCoordLocation);
checkError("glEnableVertexAttribArray");
GLES20.glUniformMatrix4fv(uMVPLocation, 1, false, myMVPMatrix, 0);
checkError("glUniformMatrix4fv");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, rectangleISB);
checkError("glDrawElements");
}
In OpenGL ES 2.0, after every call to glBindTexture and before texImage2D the parameters for that texture have to be specified seperately.
So, if there are 2 textures (as in multitexturing) then it will require 4 glTexParameterf each for texture 1 and 2, total 8.