I am new to OpenGL and ARCore and I am using GoogleARCore Sample as a base to create my application. I am using OpenGL-ES-2.0 version. I able to do Pinch zoom (2 fingers) using android.view.ScaleGestureDetector.SimpleOnScaleGestureListener. By using this library class of Rotation Rotation Gesture I am able to get my rotation degree and it worked well with my 3D Object.
While rotating my 3D object, my object gets scaled too. I want to stop my scaling while the user is doing the rotation. How can I achieve this? Or how can I pass my both scaling and rotation in a different method to update their respective matrix? I do not want to use any 3D party library for this.
Please help me with this. Below is my code and suggest me where I am doing anything wrong.
ScaleGesture
private class CustomScaleGesture extends ScaleGestureDetector.SimpleOnScaleGestureListener {
#Override
public boolean onScale(ScaleGestureDetector detector) {
DebugHelper.log("detector.getScaleFactor(): " + detector.getScaleFactor() + " scaleFactor = " + scaleFactor);
scaleFactor *= detector.getScaleFactor();
DebugHelper.log("final scaleFactor: " + scaleFactor);
return true;
}
}
RotationGesture
private class RotateListener extends RotateGestureDetector.SimpleOnRotateGestureListener {
#Override
public boolean onRotate(RotateGestureDetector detector) {
DebugHelper.log("RotateListener called..");
mRotationDegrees -= detector.getRotationDegreesDelta();
DebugHelper.log("RotateListener: " + mRotationDegrees);
return true;
}
}
MainActivity
public class MyARActivity extends BaseActivity<MyActivityArBinding> implements GLSurfaceView.Renderer {
//AR Variables
private int mWidth;
private int mHeight;
private boolean capturePicture = false;
private boolean installRequested;
private boolean moving;
float[] projmtx = new float[16];
float[] viewmtx = new float[16];
private Session session;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private ObjectRenderer virtualObject;
private ObjectRenderer virtualObjectShadow;
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private PointCloudRenderer pointCloud = new PointCloudRenderer();
// Temporary matrix allocated here to reduce number of allocations for each frame.
private float[] anchorMatrix = new float[16];
// Tap handling and UI.
private ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private ArrayList<Anchor> anchors = new ArrayList<>();
//load and manipulate obj
private SQLiteHelper sqlHelper;
private boolean isObjectChanged = false;
private String objectPath;
private List<CharacterModel> characterModelList = new ArrayList<>();
private boolean isFirstTimeLoad = true;
//Gestures
private float mRotationDegrees = 0.f;
private RotateGestureDetector mRotateDetector;
private float scaleFactor = 1.0f;
private ScaleGestureDetector scaleDetector;
private GestureDetector gestureDetector;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHeaderVisible(false);
doDefaults();
}
private void doDefaults() {
binding.setPresenter(this);
sqlHelper = SQLiteHelper.getInstance(this);
load3DCharacters();
initAR();
}
#SuppressLint("ClickableViewAccessibility")
private void initAR() {
displayRotationHelper = new DisplayRotationHelper(this);
scaleDetector = new ScaleGestureDetector(this, new CustomScaleGesture());
mRotateDetector = new RotateGestureDetector(getApplicationContext(), new RotateListener());
// Set up tap listener.
gestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
#Override
public boolean onSingleTapUp(MotionEvent e) {
if (anchors.size() <= 0) {
onSingleTap(e);
}
return true;
}
#Override
public boolean onDown(MotionEvent e) {
return true;
}
});
binding.surfaceView.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
DebugHelper.log("binding.surfaceView.setOnTouchListener called..");
mRotateDetector.onTouchEvent(event);
scaleDetector.onTouchEvent(event);
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
moving = true;
DebugHelper.log("ACTION_DOWN");
break;
case MotionEvent.ACTION_UP:
DebugHelper.log("ACTION_UP");
moving = false;
break;
case MotionEvent.ACTION_MOVE:
DebugHelper.log("ACTION_MOVE");
if (anchors.size() > 0) {
onSecondTouch(event);
}
break;
}
return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
binding.surfaceView.setPreserveEGLContextOnPause(true);
binding.surfaceView.setEGLContextClientVersion(2);
binding.surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
binding.surfaceView.setRenderer(this);
binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private void onSecondTouch(MotionEvent e) {
Log.e("Second Touch", "Executed");
if (e.getPointerCount() > 1) {
scaleDetector.onTouchEvent(e);
} else {
queuedSingleTaps.offer(e);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
DebugHelper.log("onSingleTap()");
queuedSingleTaps.offer(e);
}
private void load3DCharacters() {
CharacterModel model = new CharacterModel();
model.setName("Cat");
model.setObjectPath("cat/cat.obj");
model.setScaleFactor(0.25f);
model.setResourceId(R.drawable.cat);
characterModelList.add(model);
model = new CharacterModel();
model.setName("Old Man");
model.setObjectPath("man/muro.obj");
model.setScaleFactor(0.0085f);
model.setResourceId(R.drawable.old_man);
characterModelList.add(model);
model = new CharacterModel();
model.setName("Bloodwing");
model.setObjectPath("bloodwing/bloodwing.obj");
model.setScaleFactor(0.0009f);
model.setResourceId(R.drawable.bat);
characterModelList.add(model);
}
private void loadObject(CharacterModel model) {
try {
this.objectPath = model.getObjectPath();
this.scaleFactor = model.getScaleFactor();
if (virtualObject == null) {
virtualObject = new ObjectRenderer(objectPath);
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} else {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
isObjectChanged = true;
virtualObject.updateObjectPath(model.getObjectPath());
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
#Override
public void onDrawFrame(GL10 gl) {
if (isObjectChanged) {
isObjectChanged = false;
try {
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
} catch (IOException e) {
e.printStackTrace();
}
return;
}
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) || (trackable instanceof Point && ((Point) trackable).getOrientationMode() == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
//if (!isUpdate) {
DebugHelper.log("Anchor size = " + anchors.size());
if (anchors.size() >= 1) {
anchors.get(0).detach();
anchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
if (anchors.size() > 0) {
DebugHelper.log("anchor list has data");
for (Anchor anchor : anchors) {
anchor.detach();
anchors.remove(anchor);
}
}
Anchor anchor = hit.createAnchor();
if (anchor != null)
anchors.add(anchor);
else
DebugHelper.log("anchor is null");
//}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
if (!capturePicture)
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
{
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
}
}
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING && plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
}
}
// Visualize planes.
if (!capturePicture)
planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
for (Anchor anchor : anchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
if (virtualObject != null) {
//passing my scaleFector and rotationDegree to update my matrix.
virtualObject.updateModelMatrix(anchorMatrix, scaleFactor, mRotationDegrees);
if (viewmtx != null && projmtx != null) {
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
}
}
if (capturePicture) {
capturePicture = false;
onSavePicture();
}
} catch (Throwable t) {
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
ObjectRenderer
public void draw(float[] cameraView, float[] cameraPerspective, float lightIntensity) {
try {
ShaderUtil.checkGLError(TAG, "Before draw");
Matrix.multiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0);
Matrix.multiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0);
Matrix.setRotateM(mRotationMatrix, 0, MyARActivity.rotationDegrees, 0.0f, 1.0f, 0.0f); //rotation degree pass to matrix
Matrix.multiplyMM(mFinalModelViewProjectionMatrix, 0, mModelViewProjectionMatrix, 0, mRotationMatrix, 0);
GLES20.glUseProgram(mProgram);
// Set the lighting environment properties.
Matrix.multiplyMV(mViewLightDirection, 0, mModelViewMatrix, 0, LIGHT_DIRECTION, 0);
normalizeVec3(mViewLightDirection);
GLES20.glUniform4f(mLightingParametersUniform, mViewLightDirection[0], mViewLightDirection[1], mViewLightDirection[2], lightIntensity);
// Set the object material properties.
GLES20.glUniform4f(mMaterialParametersUniform, mAmbient, mDiffuse, mSpecular, mSpecularPower);
// Set the ModelViewProjection matrix in the shader.
GLES20.glUniformMatrix4fv(mModelViewUniform, 1, false, mModelViewMatrix, 0);
GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, mModelViewProjectionMatrix, 0);
GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, mFinalModelViewProjectionMatrix, 0);
if (mBlendMode != null) {
GLES20.glDepthMask(false);
GLES20.glEnable(GLES20.GL_BLEND);
switch (mBlendMode) {
case Shadow:
// Multiplicative blending function for Shadow.
GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA);
break;
case Grid:
// Grid, additive blending function.
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
break;
}
}
if (mObj != null && mObj.getNumMaterialGroups() > 0) {
//Start drawing data from each VAO
for (int i = 0; i < mObj.getNumMaterialGroups(); i++) {
// Attach the object texture.
GLES20.glUniform1i(mTextureUniform, 0);
GLES20.glBindTexture(GL_TEXTURE_2D, mTextures[i]);
GLES30.glBindVertexArray(vectorArrayObjectIds[i]);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, mObj.getMaterialGroup(i).getNumFaces() * 3,
GLES20.GL_UNSIGNED_SHORT, 0);
GLES30.glBindVertexArray(0);
//Unbind texture
GLES20.glBindTexture(GL_TEXTURE_2D, 0);
}
}
if (mBlendMode != null) {
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDepthMask(true);
}
ShaderUtil.checkGLError(TAG, "After draw");
} catch (Exception ex) {
ex.printStackTrace();
}
}
Related
I am new to OpenGL and ARCore and I am using GoogleArCore Sample as a base to create my application. I am able to scale and zoom the 3D object but I am not able to replace the current 3D object with another 3D object. I am already detecting the shadders before loading of a new object.
Basically, I want to reload onSurfaceCreated(GL10 gl, EGLConfig config)
but I do not know how to do it.
Here is my code
public class MyARActivity extends BaseActivity<MyActivityArBinding> implements GLSurfaceView.Renderer {
private static final String TAG = DashboardActivity.class.getSimpleName();
//AR Variables
private int mWidth;
private int mHeight;
private boolean capturePicture = false;
private boolean installRequested;
private boolean moving;
float[] projmtx = new float[16];
float[] viewmtx = new float[16];
private Session session;
private GestureDetector gestureDetector;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private ObjectRenderer virtualObject;// = new ObjectRenderer();
private ObjectRenderer virtualObjectShadow;// = new ObjectRenderer();
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private PointCloudRenderer pointCloud = new PointCloudRenderer();
private ScaleGestureDetector scaleGestureDetector;
private MyScaleGestureDetector_1 myScaleGestureDetector;
// Temporary matrix allocated here to reduce number of allocations for each frame.
private float[] anchorMatrix = new float[16];
// Tap handling and UI.
private ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private ArrayList<Anchor> anchors = new ArrayList<>();
private int[] m3DCharacters = new int[]{R.drawable.cat, R.drawable.old_man, R.drawable.bat};
private SQLiteHelper sqlHelper;
private boolean isUpdate;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHeaderVisible(false);
doDefaults();
}
private void doDefaults() {
binding.setPresenter(this);
sqlHelper = SQLiteHelper.getInstance(this);
initAR();
}
#SuppressLint("ClickableViewAccessibility")
private void initAR() {
displayRotationHelper = new DisplayRotationHelper(this);
myScaleGestureDetector = new MyScaleGestureDetector_1();
scaleGestureDetector = new ScaleGestureDetector(this, myScaleGestureDetector);
// Set up tap listener.
gestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
#Override
public boolean onSingleTapUp(MotionEvent e) {
if (anchors.size() <= 0) {
onSingleTap(e);
}
return true;
}
#Override
public boolean onDown(MotionEvent e) {
return true;
}
});
binding.surfaceView.setOnTouchListener(
new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
moving = true;
DebugHelper.log("ACTION_DOWN");
break;
case MotionEvent.ACTION_UP:
DebugHelper.log("ACTION_UP");
moving = false;
DebugHelper.log("SF at ACTION_UP::::", String.valueOf(myScaleGestureDetector.getScaleFactor()));
break;
case MotionEvent.ACTION_MOVE:
if (anchors.size() > 0) {
onSecondTouch(event);
}
break;
}
return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
binding.surfaceView.setPreserveEGLContextOnPause(true);
binding.surfaceView.setEGLContextClientVersion(2);
binding.surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
binding.surfaceView.setRenderer(this);
binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private void onSavePicture() {
setLoading(true, "Capturing image...");
Log.e("SavePicture Called", "Yes");
int pixelData[] = new int[mWidth * mHeight];
// Read the pixels from the current GL frame.
IntBuffer buf = IntBuffer.wrap(pixelData);
buf.position(0);
GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
// Convert the pixel data from RGBA to what Android wants, ARGB.
int bitmapData[] = new int[pixelData.length];
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
int p = pixelData[i * mWidth + j];
int b = (p & 0x00ff0000) >> 16;
int r = (p & 0x000000ff) << 16;
int ga = p & 0xff00ff00;
bitmapData[(mHeight - i - 1) * mWidth + j] = ga | r | b;
}
}
// Create a bitmap.
Bitmap capturedBitmap = Bitmap.createBitmap(bitmapData, mWidth, mHeight, Bitmap.Config.ARGB_8888);
Bitmap waterBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.watermark_camar);
Bitmap waterMarkBitmap = ImageUtil.getInstance().createWaterMaskRightBottom(this, capturedBitmap, waterBitmap, 10, 5);
sqlHelper.saveToInternalStorage(this, waterMarkBitmap);
setLoading(false, "");
}
/**
* To capture the current AR Frame
*/
public void onCaptureClick() {
if (!ExternalPermissionHelper.hasExtStoragePermission(MyARActivity.this)) {
ExternalPermissionHelper.requestExtStoragePermission(MyARActivity.this);
return;
}
capturePicture = true;
}
/**
* To open and close the character selection horizontal list
*/
public void onCharClick() {
createCharacterScrollView();
}
/**
* To close the entire AR view
*/
public void closeScreen() {
finish();
}
private void createCharacterScrollView() {
try {
binding.LinearTopSlider.setVisibility(View.VISIBLE);
LayoutInflater inflater = LayoutInflater.from(this);
binding.charContainer.removeAllViews();
binding.horizontalChar.scrollTo(0, 0);
for (int i = 0; i < m3DCharacters.length; i++) {
View cell = inflater.inflate(R.layout.item_character, null);
ImageView imgGroup = cell.findViewById(R.id.imgChar);
View view = cell.findViewById(R.id.view);
//String name = "cartoon" + (i + 1);
imgGroup.setImageResource(m3DCharacters[i]); //getResourceId(name)
view.setVisibility(i < m3DCharacters.length - 1 ? View.VISIBLE : View.GONE);
binding.charContainer.addView(cell);
cell.setTag(i);
cell.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
final int position = (int) view.getTag();
switch (position) {
case 0:
loadObject(0.28f, 3.5f, 0.28f, "cat/cat.obj");
break;
case 1:
loadObject(0.0085f, 5f, 1f, "man/muro.obj");
break;
case 2:
loadObject(0.0009f, 2.75f, 0.25f, "bloodwing/bloodwing.obj");
break;
}
}
});
}
binding.charContainer.setVisibility(View.VISIBLE);
} catch (Exception e) {
e.printStackTrace();
}
}
private int getResourceId(String drawableName) {
return getResources().getIdentifier(drawableName, "drawable", getPackageName());
}
private void onSecondTouch(MotionEvent e) {
Log.e("Second Touch", "Executed");
if (e.getPointerCount() > 1) {
scaleGestureDetector.onTouchEvent(e);
} else {
queuedSingleTaps.offer(e);
}
}
#Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
showMsg(message);
Log.e(TAG, "Exception creating session", exception);
return;
}
// Create default config and check if supported.
Config config = new Config(session);
if (!session.isSupported(config)) {
showMsg("This device does not support AR");
}
session.configure(config);
}
showLoadingMessage();
// Note that order matters - see the note in onPause(), the reverse applies here.
session.resume();
binding.surfaceView.onResume();
displayRotationHelper.onResume();
}
#Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
binding.surfaceView.onPause();
session.pause();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
switch (requestCode) {
case CameraPermissionHelper.CAMERA_PERMISSION_CODE:
if (!CameraPermissionHelper.hasCameraPermission(this)) {
showMsg("Camera permission is needed to run this application");
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
break;
case ExternalPermissionHelper.EXT_STORAGE_PERMISSION_CODE:
if (!ExternalPermissionHelper.hasExtStoragePermission(this)) {
showMsg("External storage permission is needed to capture the photo");
if (!ExternalPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
ExternalPermissionHelper.launchPermissionSettings(this);
}
finish();
}
break;
}
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
// Standard Android full-screen functionality.
getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
DebugHelper.log("onSingleTap()");
queuedSingleTaps.offer(e);
}
private void loadCatObject() {
try {
myScaleGestureDetector.setScaleFactor(0.25f);
myScaleGestureDetector.setMinValue(3.5f);
myScaleGestureDetector.setMaxValue(0.25f);
virtualObject = new ObjectRenderer("cat/cat.obj");
//virtualObject.createOnGlThread(this, "cat/cat.obj", "cat/cat.png");
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (IOException ex) {
ex.printStackTrace();
}
}
private void loadBloodwingObject() {
try {
myScaleGestureDetector.setScaleFactor(0.0009f);
myScaleGestureDetector.setMinValue(2.75f);
myScaleGestureDetector.setMaxValue(0.25f);
virtualObject = new ObjectRenderer("bloodwing/bloodwing.obj");
virtualObject.createOnGlThread(this);
//virtualObject.createOnGlThread(this, "bloodwing/bloodwing.obj", "bloodwing/bloodwing.jpg");
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (IOException ex) {
ex.printStackTrace();
}
}
private void loadMan() {
try {
myScaleGestureDetector.setScaleFactor(0.0085f);
myScaleGestureDetector.setMinValue(5f);
myScaleGestureDetector.setMaxValue(1f);
virtualObject = new ObjectRenderer("man/muro.obj");
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} catch (Exception ex) {
ex.printStackTrace();
finish();
}
}
private void loadObject(float scaleFactor, float minValue, float maxValue, String objectPath) {
try {
myScaleGestureDetector.setScaleFactor(scaleFactor);
myScaleGestureDetector.setMinValue(minValue);
myScaleGestureDetector.setMaxValue(maxValue);
if (virtualObject == null) {
virtualObject = new ObjectRenderer(objectPath);
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
} else {
isUpdate = true;
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
virtualObject.updateObjectPath(objectPath);
virtualObject.createOnGlThread(this);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
// Prepare the other rendering objects.
loadObject(0.40f, 3.5f, 0.28f, "cat/cat.obj");
try {
planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
} catch (IOException e) {
Log.e(TAG, "Failed to read plane texture");
}
pointCloud.createOnGlThread(/*context=*/ this);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
}
#Override
public void onDrawFrame(GL10 gl) {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
//if (!isUpdate) {
DebugHelper.log("Anchor size = " + anchors.size());
if (anchors.size() >= 1) {
anchors.get(0).detach();
anchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
if (isUpdate) {
if (anchors.size() > 0) {
DebugHelper.log("anchor list has data");
for (Anchor anchor : anchors) {
anchor.detach();
anchors.remove(anchor);
}
}
}
Anchor anchor = hit.createAnchor();
if (anchor != null)
anchors.add(anchor);
else
DebugHelper.log("anchor is null");
//}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
if (!capturePicture)
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
{
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
//xgfgdfgfgd
//binding.setCharClick(true);
}
}
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING && plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
//dfgdfgdfgdf
//binding.setCharClick(true);
}
}
// Visualize planes.
if (!capturePicture)
planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
for (Anchor anchor : anchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
if (virtualObject != null) {
virtualObject.updateModelMatrix(anchorMatrix, myScaleGestureDetector.getScaleFactor());
if (viewmtx != null && projmtx != null) {
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
}
if (virtualObjectShadow != null) {
virtualObjectShadow.updateModelMatrix(anchorMatrix, myScaleGestureDetector.getScaleFactor());
if (viewmtx != null && projmtx != null)
virtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
}
}
if (capturePicture) {
capturePicture = false;
onSavePicture();
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
private void setLoading(boolean isLoading, String message) {
binding.setLoading(isLoading);
binding.setLoadingMessage(message);
}
private void showSnackbarMessage(String message, boolean finishOnDismiss) {
messageSnackbar =
Snackbar.make(
MyARActivity.this.findViewById(android.R.id.content),
message,
Snackbar.LENGTH_INDEFINITE);
messageSnackbar.getView().setBackgroundColor(0xbf323232);
if (finishOnDismiss) {
messageSnackbar.setAction(
"Dismiss",
new View.OnClickListener() {
#Override
public void onClick(View v) {
messageSnackbar.dismiss();
}
});
messageSnackbar.addCallback(
new BaseTransientBottomBar.BaseCallback<Snackbar>() {
#Override
public void onDismissed(Snackbar transientBottomBar, int event) {
super.onDismissed(transientBottomBar, event);
finish();
}
});
}
messageSnackbar.show();
}
private void showLoadingMessage() {
runOnUiThread(
new Runnable() {
#Override
public void run() {
showSnackbarMessage("Searching for surfaces...", false);
}
});
}
private void hideLoadingMessage() {
runOnUiThread(
new Runnable() {
#Override
public void run() {
if (messageSnackbar != null) {
messageSnackbar.dismiss();
}
messageSnackbar = null;
}
});
}
}
In my ObjectRenderer and BackgroundRenderer is reset my code.
Hee is my reset code
BackgroundRenderer.java
public void resetData() {
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteProgram(quadProgram);
textureId = -1;
}
ObjectRenderer.java
Public void updateObjectPath(String objectPath) {
try {
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteProgram(mProgram);
reset();
this.OBJ_PATH = objectPath;
}
catch (Exception e) {
e.printStackTrace();
}
}
private void reset() {
this.OBJ_PATH = null;
this.mObj = null;
this.mModelMatrix = null;
this.mModelViewMatrix = null;
this.mModelViewProjectionMatrix = null;
this.mViewLightDirection = null;
this.mTextures = null;
this.vectorArrayObjectIds = null;
this.mMaterialParametersUniform = -1;
this.mModelViewProjectionUniform = -1;
this.mVerticesBaseAddress = -1;
this.mTexCoordsBaseAddress = -1;
this.mNormalAttribute = -1;
this.mNormalsBaseAddress = -1;
this.mIndexBufferId = -1;
this.mVertexBufferId = -1;
this.mBlendMode = null;
this.mProgram = -1;
this.mLightingParametersUniform = -1;
this.mIndexCount = -1;
this.mModelViewUniform = -1;
this.mPositionAttribute = -1;
this.mTexCoordAttribute = -1;
this.vertexShader = -1;
this.fragmentShader = -1;
this.mAmbient = 0.3f;
this.mDiffuse = 1.0f;
this.mSpecular = 1.0f;
this.mSpecularPower = 6.0f;
this.mModelMatrix = new float[16];
this.mModelViewMatrix = new float[16];
this.mModelViewProjectionMatrix = new float[16];
this.mViewLightDirection = new float[4];
}
Can anyone guide me to same?
Thanks in advance.
You need to pass new object name and its texture to onDrawFrame method this is because of its work only on GL thread.
Also, note that you're using GL version 2.
glSurfaceView.setEGLContextClientVersion(2);
#Override
public void onDrawFrame(GL10 gl) {
if (isObjChanged) {
isObjChanged = false;
try {
virtualObject.createOnGlThread(getContext(), objName, textureName);
virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
} catch (IOException e) {
e.printStackTrace();
}
return;
}
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = tapHelper.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (anchors.size() >= 1) {
anchors.get(0).detach();
anchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
anchors.add(hit.createAnchor());
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
// The first three components are color scaling factors.
// The last one is the average pixel intensity in gamma space.
final float[] colorCorrectionRgba = new float[4];
frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
pointCloudRenderer.update(pointCloud);
pointCloudRenderer.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbarHelper.isShowing()) {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
messageSnackbarHelper.hide(getActivity());
break;
}
}
}
// Visualize planes.
planeRenderer.drawPlanes(
session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
for (Anchor anchor : anchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
// Update and draw the model and its shadow.
virtualObject.updateModelMatrix(anchorMatrix, scaleFactor);
virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
In OpenGL ES defines the following 6 error values Code:
1280 GL_INVALID_ENUM
1281 GL_INVALID_VALUE
1282 GL_INVALID_OPERATION
1283 GL_STACK_OVERFLOW
1284 GL_STACK_UNDERFLOW
1285 GL_OUT_OF_MEMORY
You got a GL_INVALID_ENUM error, which means you passed an unsupported enum value to a GL function.
I am trying to load the latest ADF and Localize it.If the text is localized then it would be displayed in a textView.
If it is localized then on clicking save button editText would appear and the text of button will be changed to "save Object" on clicking that the text entered would be toasted.
I am using Rajawali 3D surface view to render the Video.
But on running it My screen is black only textView and button are shown.
Localization TextView is also blank.
Here is my MainActivity Code
public class MainActivity extends Activity {
private static final String TAG = MainActivity.class.getSimpleName();
private static final double UPDATE_INTERVAL_MS = 100.0;
private final Object mSharedLock = new Object();
// private static final String sTimestampFormat = "Timestamp: %f";
private static final int SECS_TO_MILLISECS = 1000;
private double mPreviousPoseTimeStamp;
private double mTimeToNextUpdate = UPDATE_INTERVAL_MS;
private TextView mUuidTextView;
private TextView mRelocalizationTextView;
private boolean mIsRelocalized;
private EditText mObjectValueEditText;
private Button mSaveButton;
private boolean mIsLocationSave = false;
private boolean mIsSavePoseData = false;
private static final int INVALID_TEXTURE_ID = 0;
private RajawaliSurfaceView mSurfaceView;
private VideoRenderer mRenderer;
// For all current Tango devices, color camera is in the camera id 0.
private static final int COLOR_CAMERA_ID = 0;
private TangoCameraIntrinsics mIntrinsics;
private Tango mTango;
private TangoConfig mConfig;
private boolean mIsConnected = false;
private double mCameraPoseTimestamp = 0;
// NOTE: Naming indicates which thread is in charge of updating this variable
private int mConnectedTextureIdGlThread = INVALID_TEXTURE_ID;
private AtomicBoolean mIsFrameAvailableTangoThread = new AtomicBoolean(false);
private double mRgbTimestampGlThread;
private int mColorCameraToDisplayAndroidRotation = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startActivityForResult(
Tango.getRequestPermissionIntent(Tango.PERMISSIONTYPE_ADF_LOAD_SAVE),
Tango.TANGO_INTENT_ACTIVITYCODE);
mSurfaceView = (RajawaliSurfaceView) findViewById(R.id.surfaceview);
mObjectValueEditText = (EditText) findViewById(R.id.location_edittext);
mSaveButton = (Button) findViewById(R.id.save_button);
mRenderer = new VideoRenderer(this);
onClickSaveButton();
// Set-up a dummy OpenGL renderer associated with this surface view
DisplayManager displayManager = (DisplayManager) getSystemService(DISPLAY_SERVICE);
if (displayManager != null) {
displayManager.registerDisplayListener(new DisplayManager.DisplayListener() {
#Override
public void onDisplayAdded(int displayId) {}
#Override
public void onDisplayChanged(int displayId) {
synchronized (this) {
Display display = getWindowManager().getDefaultDisplay();
Camera.CameraInfo colorCameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(COLOR_CAMERA_ID, colorCameraInfo);
mColorCameraToDisplayAndroidRotation =
getColorCameraToDisplayAndroidRotation(display.getRotation(),
colorCameraInfo.orientation);
mRenderer.updateColorCameraTextureUv(mColorCameraToDisplayAndroidRotation);
}
}
#Override
public void onDisplayRemoved(int displayId) {}
}, null);
}
setupRenderer();
}
private void onClickSaveButton() {
mSaveButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsLocationSave == false) {
mIsLocationSave = true;
mObjectValueEditText.setVisibility(View.VISIBLE);
mSaveButton.setText(getResources().getString(R.string.save_object));
} else {
mIsLocationSave = false;
String text=mObjectValueEditText.getText().toString();
Toast.makeText(getApplicationContext(),text,Toast.LENGTH_SHORT).show();
mObjectValueEditText.setVisibility(View.INVISIBLE);
//TODO:Add helper logic
mIsSavePoseData=true;
mSaveButton.setText(getResources().getString(R.string.save));
}
}
});
}
#Override
protected void onResume() {
super.onResume();
mSurfaceView.onResume();
// Set render mode to RENDERMODE_CONTINUOUSLY to force getting onDraw callbacks until the
// Tango service is properly set-up and we start getting onFrameAvailable callbacks.
mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
// Initialize Tango Service as a normal Android Service, since we call mTango.disconnect()
// in onPause, this will unbind Tango Service, so every time when onResume gets called, we
// should create a new Tango object.
mTango = new Tango(MainActivity.this, new Runnable() {
// Pass in a Runnable to be called from UI thread when Tango is ready, this Runnable
// will be running on a new thread.
// When Tango is ready, we can call Tango functions safely here only when there is no UI
// thread changes involved.
#Override
public void run() {
// Synchronize against disconnecting while the service is being used in
// the OpenGL thread or in the UI thread.
synchronized (MainActivity.this) {
try {
TangoSupport.initialize();
mConfig = setupTangoConfig(mTango);
mTango.connect(mConfig);
startupTango();
mIsConnected = true;
} catch (TangoOutOfDateException e) {
Log.e(TAG, getString(R.string.exception_out_of_date), e);
} catch (TangoErrorException e) {
Log.e(TAG, getString(R.string.exception_tango_error), e);
} catch (TangoInvalidException e) {
Log.e(TAG, getString(R.string.exception_tango_invalid), e);
}
runOnUiThread(new Runnable() {
#Override
public void run() {
synchronized (MainActivity.this) {
setupTextViewsAndButtons(mTango, true,
true);
}
}
});
}
}
});
}
#Override
protected void onPause() {
super.onPause();
mSurfaceView.onPause();
mIsRelocalized = false;
// Synchronize against disconnecting while the service is being used in the OpenGL
// thread or in the UI thread.
// NOTE: DO NOT lock against this same object in the Tango callback thread.
// Tango.disconnect will block here until all Tango callback calls are finished.
// If you lock against this object in a Tango callback thread it will cause a deadlock.
synchronized (this) {
try {
mIsConnected = false;
mTango.disconnectCamera(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
// We need to invalidate the connected texture ID so that we cause a
// re-connection in the OpenGL thread after resume
mConnectedTextureIdGlThread = INVALID_TEXTURE_ID;
mTango.disconnect();
mIsConnected = false;
} catch (TangoErrorException e) {
Log.e(TAG, getString(R.string.exception_tango_error), e);
}
}
}
/**
* Sets up the tango configuration object. Make sure mTango object is initialized before
* making this call.
*/
private TangoConfig setupTangoConfig(Tango tango) {
// Create a new Tango Configuration and enable the Camera API
TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true);
config.putBoolean(TangoConfig.KEY_BOOLEAN_DRIFT_CORRECTION, true);
config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
config.putBoolean(TangoConfig.KEY_BOOLEAN_LEARNINGMODE, false);
config.putBoolean(TangoConfig.KEY_BOOLEAN_MOTIONTRACKING, true);
// Tango service should automatically attempt to recover when it enters an invalid state.
config.putBoolean(TangoConfig.KEY_BOOLEAN_AUTORECOVERY, true);
// Check for Load ADF/Constant Space relocalization mode.
ArrayList<String> fullUuidList;
// Returns a list of ADFs with their UUIDs.
fullUuidList = tango.listAreaDescriptions();
// Load the latest ADF if ADFs are found.
if (fullUuidList.size() > 0) {
config.putString(TangoConfig.KEY_STRING_AREADESCRIPTION,
fullUuidList.get(fullUuidList.size() - 1));
}
return config;
}
/**
* Set up the callback listeners for the Tango service and obtain other parameters required
* after Tango connection.
* Listen to updates from the RGB camera.
*/
private void startupTango() {
// Lock configuration and connect to Tango
// Select coordinate frame pair
ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>();
framePairs.add(new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
TangoPoseData.COORDINATE_FRAME_DEVICE));
framePairs.add(new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
TangoPoseData.COORDINATE_FRAME_DEVICE));
framePairs.add(new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE));
// Listen for new Tango data
mTango.connectListener(framePairs, new Tango.OnTangoUpdateListener() {
#Override
public void onPoseAvailable(final TangoPoseData pose) {
// Make sure to have atomic access to Tango Data so that UI loop doesn't interfere
// while Pose call back is updating the data.
synchronized (mSharedLock) {
// Check for Device wrt ADF pose, Device wrt Start of Service pose, Start of
// Service wrt ADF pose (This pose determines if the device is relocalized or
// not).
if (pose.baseFrame == TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION
&& pose.targetFrame == TangoPoseData
.COORDINATE_FRAME_START_OF_SERVICE) {
if (pose.statusCode == TangoPoseData.POSE_VALID) {
mIsRelocalized = true;
} else {
mIsRelocalized = false;
}
}
if (mIsSavePoseData&&mIsRelocalized) {
mIsSavePoseData=false;
float [] traslations=pose.getTranslationAsFloats();
Log.i("translation",traslations[0]+" "+traslations[1]+" "+traslations[2]);
}
}
final double deltaTime = (pose.timestamp - mPreviousPoseTimeStamp) *
SECS_TO_MILLISECS;
mPreviousPoseTimeStamp = pose.timestamp;
mTimeToNextUpdate -= deltaTime;
if (mTimeToNextUpdate < 0.0) {
mTimeToNextUpdate = UPDATE_INTERVAL_MS;
runOnUiThread(new Runnable() {
#Override
public void run() {
synchronized (mSharedLock) {
mRelocalizationTextView.setText(mIsRelocalized ?
getString(R.string.localized) :
getString(R.string.not_localized));
}
}
});
}
}
//Deprecated method
#Override
public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
// We are not using onXyzIjAvailable for this app.
}
#Override
public void onPointCloudAvailable(final TangoPointCloudData pointCloudData) {
}
#Override
public void onTangoEvent(final TangoEvent event) {
}
#Override
public void onFrameAvailable(int cameraId) {
Log.d(TAG, "onFrameAvailable");
if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) {
// Now that we are receiving onFrameAvailable callbacks, we can switch
// to RENDERMODE_WHEN_DIRTY to drive the render loop from this callback.
// This will result on a frame rate of approximately 30FPS, in synchrony with
// the RGB camera driver.
// If you need to render at a higher rate (i.e.: if you want to render complex
// animations smoothly) you can use RENDERMODE_CONTINUOUSLY throughout the
// application lifecycle.
if (mSurfaceView.getRenderMode() != GLSurfaceView.RENDERMODE_WHEN_DIRTY) {
mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
mSurfaceView.requestRender();
}
}
});
}
/**
* Sets Texts views to display statistics of Poses being received. This also sets the buttons
* used in the UI. Please note that this needs to be called after TangoService and Config
* objects are initialized since we use them for the SDK related stuff like version number
* etc.
*/
private void setupTextViewsAndButtons(Tango tango, boolean isLearningMode, boolean isLoadAdf) {
mRelocalizationTextView = (TextView) findViewById(R.id.relocalization_textview);
mUuidTextView = (TextView) findViewById(R.id.adf_uuid_textview);
if (isLoadAdf) {
ArrayList<String> fullUuidList;
// Returns a list of ADFs with their UUIDs
fullUuidList = tango.listAreaDescriptions();
if (fullUuidList.size() == 0) {
mUuidTextView.setText(R.string.no_uuid);
} else {
mUuidTextView.setText(getString(R.string.number_of_adfs) + fullUuidList.size()
+ getString(R.string.latest_adf_is)
+ fullUuidList.get(fullUuidList.size() - 1));
}
}
}
/**
* Here is where you would set-up your rendering logic. We're replacing it with a minimalistic,
* dummy example using a standard GLSurfaceView and a basic renderer, for illustration purposes
* only.
*/
private void setupRenderer() {
mRenderer.getCurrentScene().registerFrameCallback(new ASceneFrameCallback() {
#Override
public void onPreFrame(long sceneTime, double deltaTime) {
// into the scene.
// Prevent concurrent access to {#code mIsFrameAvailableTangoThread} from the Tango
// callback thread and service disconnection from an onPause event.
try {
synchronized (MainActivity.this) {
// Don't execute any tango API actions if we're not connected to the service
if (!mIsConnected) {
return;
}
// Set-up scene camera projection to match RGB camera intrinsics.
if (!mRenderer.isSceneCameraConfigured()) {
mRenderer.setProjectionMatrix(
projectionMatrixFromCameraIntrinsics(mIntrinsics,
mColorCameraToDisplayAndroidRotation));
}
// Connect the camera texture to the OpenGL Texture if necessary
// NOTE: When the OpenGL context is recycled, Rajawali may re-generate the
// texture with a different ID.
if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) {
mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR,
mRenderer.getTextureId());
mConnectedTextureIdGlThread = mRenderer.getTextureId();
Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId());
}
if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) {
mRgbTimestampGlThread =
mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
}
// If a new RGB frame has been rendered, update the camera pose to match.
if (mRgbTimestampGlThread > mCameraPoseTimestamp) {
// Calculate the camera color pose at the camera frame update time in
// OpenGL engine.
//
// When drift correction mode is enabled in config file, we must query
// the device with respect to Area Description pose in order to use the
// drift corrected pose.
//
// Note that if you don't want to use the drift corrected pose, the
// normal device with respect to start of service pose is available.
TangoPoseData lastFramePose = TangoSupport.getPoseAtTime(
mRgbTimestampGlThread,
TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR,
TangoSupport.TANGO_SUPPORT_ENGINE_OPENGL,
mColorCameraToDisplayAndroidRotation);
if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) {
// Update the camera pose from the renderer
mRenderer.updateRenderCameraPose(lastFramePose);
mCameraPoseTimestamp = lastFramePose.timestamp;
} else {
// When the pose status is not valid, it indicates the tracking has
// been lost. In this case, we simply stop rendering.
//
// This is also the place to display UI to suggest the user walk
// to recover tracking.
Log.w(TAG, "Can't get device pose at time: " +
mRgbTimestampGlThread);
}
}
}
// Avoid crashing the application due to unhandled exceptions
} catch (TangoErrorException e) {
Log.e(TAG, "Tango API call error within the OpenGL render thread", e);
} catch (Throwable t) {
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
#Override
public void onPreDraw(long sceneTime, double deltaTime) {
}
#Override
public void onPostFrame(long sceneTime, double deltaTime) {
}
#Override
public boolean callPreFrame() {
return true;
}
});
mSurfaceView.setSurfaceRenderer(mRenderer);
}
private static int getColorCameraToDisplayAndroidRotation(int displayRotation,
int cameraRotation) {
int cameraRotationNormalized = 0;
switch (cameraRotation) {
case 90:
cameraRotationNormalized = 1;
break;
case 180:
cameraRotationNormalized = 2;
break;
case 270:
cameraRotationNormalized = 3;
break;
default:
cameraRotationNormalized = 0;
break;
}
int ret = displayRotation - cameraRotationNormalized;
if (ret < 0) {
ret += 4;
}
return ret;
}
/**
* Use Tango camera intrinsics to calculate the projection Matrix for the Rajawali scene.
private static float[] projectionMatrixFromCameraIntrinsics(TangoCameraIntrinsics intrinsics,
int rotation) {
// Adjust camera intrinsics according to rotation
float cx = (float) intrinsics.cx;
float cy = (float) intrinsics.cy;
float width = (float) intrinsics.width;
float height = (float) intrinsics.height;
float fx = (float) intrinsics.fx;
float fy = (float) intrinsics.fy;
switch (rotation) {
case Surface.ROTATION_90:
cx = (float) intrinsics.cy;
cy = (float) intrinsics.width - (float) intrinsics.cx;
width = (float) intrinsics.height;
height = (float) intrinsics.width;
fx = (float) intrinsics.fy;
fy = (float) intrinsics.fx;
break;
case Surface.ROTATION_180:
cx = (float) intrinsics.width - cx;
cy = (float) intrinsics.height - cy;
break;
case Surface.ROTATION_270:
cx = (float) intrinsics.height - (float) intrinsics.cy;
cy = (float) intrinsics.cx;
width = (float) intrinsics.height;
height = (float) intrinsics.width;
fx = (float) intrinsics.fy;
fy = (float) intrinsics.fx;
break;
default:
break;
}
// Uses frustumM to create a projection matrix taking into account calibrated camera
// intrinsic parameter.
// Reference: http://ksimek.github.io/2013/06/03/calibrated_cameras_in_opengl/
float near = 0.1f;
float far = 100;
float xScale = near / fx;
float yScale = near / fy;
float xOffset = (cx - (width / 2.0f)) * xScale;
// Color camera's coordinates has y pointing downwards so we negate this term.
float yOffset = -(cy - (height / 2.0f)) * yScale;
float m[] = new float[16];
Matrix.frustumM(m, 0,
xScale * (float) -width / 2.0f - xOffset,
xScale * (float) width / 2.0f - xOffset,
yScale * (float) -height / 2.0f - yOffset,
yScale * (float) height / 2.0f - yOffset,
near, far);
return m;
}
}
VideoRender.java Code
//Extending RajawaliSurface View
/**
* Renderer that implements a basic augmented reality scene using Rajawali.
* It creates a scene with a background quad taking the whole screen, where the color camera is
*/
public class VideoRenderer extends RajawaliRenderer {
private static final String TAG = VideoRenderer.class.getSimpleName();
private float[] textureCoords0 = new float[]{0.0F, 0.0F, 1.0F, 0.0F, 1.0F, 1.0F, 0.0F, 1.0F};
private float[] textureCoords270 = new float[]{0.0F, 1.0F, 0.0F, 0.0F, 1.0F, 0.0F, 1.0F, 1.0F};
private float[] textureCoords180 = new float[]{1.0F, 1.0F, 0.0F, 1.0F, 0.0F, 0.0F, 1.0F, 0.0F};
private float[] textureCoords90 = new float[]{1.0F, 0.0F, 1.0F, 1.0F, 0.0F, 1.0F, 0.0F, 0.0F};
// Rajawali texture used to render the Tango color camera.
private ATexture mTangoCameraTexture;
// Keeps track of whether the scene camera has been configured.
private boolean mSceneCameraConfigured;
private ScreenQuad mBackgroundQuad = new ScreenQuad();
public VideoRenderer(Context context) {
super(context);
}
#Override
protected void initScene() {
// Create a quad covering the whole background and assign a texture to it where the
// Tango color camera contents will be rendered.
Material tangoCameraMaterial = new Material();
tangoCameraMaterial.setColorInfluence(0);
mBackgroundQuad.getGeometry().setTextureCoords(textureCoords0);
// We need to use Rajawali's {#code StreamingTexture} since it sets up the texture
// for GL_TEXTURE_EXTERNAL_OES rendering
mTangoCameraTexture =
new StreamingTexture("camera", (StreamingTexture.ISurfaceListener) null);
try {
tangoCameraMaterial.addTexture(mTangoCameraTexture);
mBackgroundQuad.setMaterial(tangoCameraMaterial);
} catch (ATexture.TextureException e) {
Log.e(TAG, "Exception creating texture for RGB camera contents", e);
}
getCurrentScene().addChildAt(mBackgroundQuad, 0);
// Add a directional light in an arbitrary direction.
DirectionalLight light = new DirectionalLight(1, 0.2, -1);
light.setColor(1, 1, 1);
light.setPower(0.8f);
light.setPosition(3, 2, 4);
getCurrentScene().addLight(light);
}
/**
* Update background texture's UV coordinates when device orientation is changed. i.e change
* between landscape and portrait mode.
*/
public void updateColorCameraTextureUv(int rotation) {
switch (rotation) {
case Surface.ROTATION_90:
mBackgroundQuad.getGeometry().setTextureCoords(textureCoords90);
break;
case Surface.ROTATION_180:
mBackgroundQuad.getGeometry().setTextureCoords(textureCoords180);
break;
case Surface.ROTATION_270:
mBackgroundQuad.getGeometry().setTextureCoords(textureCoords270);
break;
default:
mBackgroundQuad.getGeometry().setTextureCoords(textureCoords0);
break;
}
}
public void updateRenderCameraPose(TangoPoseData cameraPose) {
float[] rotation = cameraPose.getRotationAsFloats();
float[] translation = cameraPose.getTranslationAsFloats();
Quaternion quaternion = new Quaternion(rotation[3], rotation[0], rotation[1], rotation[2]);
getCurrentCamera().setRotation(quaternion.conjugate());
getCurrentCamera().setPosition(translation[0], translation[1], translation[2]);
}
/**
* It returns the ID currently assigned to the texture where the Tango color camera contents
* should be rendered.
public int getTextureId() {
return mTangoCameraTexture == null ? -1 : mTangoCameraTexture.getTextureId();
}
/**
* We need to override this method to mark the camera for re-configuration (set proper
* projection matrix) since it will be reset by Rajawali on surface changes.
*/
#Override
public void onRenderSurfaceSizeChanged(GL10 gl, int width, int height) {
super.onRenderSurfaceSizeChanged(gl, width, height);
mSceneCameraConfigured = false;
}
public boolean isSceneCameraConfigured() {
return mSceneCameraConfigured;
}
/**
* Sets the projection matrix for the scene camera to match the parameters of the color camera
public void setProjectionMatrix(float[] matrixFloats) {
getCurrentCamera().setProjectionMatrix(new Matrix4(matrixFloats));
}
#Override
public void onOffsetsChanged(float xOffset, float yOffset,
float xOffsetStep, float yOffsetStep,
int xPixelOffset, int yPixelOffset) {
}
#Override
public void onTouchEvent(MotionEvent event) {
}
}
I have and animated background for all screens in the application. I have
created that animated background using surface view and thread.
Here
is my SurfaceView class.
public class FloatingCirclesPanel extends SurfaceView implements
SurfaceHolder.Callback {
private static final String TAG = FloatingCirclesPanel.class
.getSimpleName();
public MainThread thread;
private Circle circle_white1;
private Circle circle_blue1;
private Circle circle_white2;
private Circle circle_blue2;
private Bitmap scaled;
Canvas canvas = null;
private Display mdisp;
private Context context;
private RotateAnimation anim;
private Matrix matrix;
#SuppressLint("NewApi")
public FloatingCirclesPanel(Context context) {
super(context);
this.context = context;
matrix = new Matrix();
// adding the callback (this) to the surface holder to intercept events
getHolder().addCallback(this);
// create droid and load bitmap
circle_white1 = new Circle(BitmapFactory.decodeResource(getResources(),
R.drawable.circle), 50, 50);
mdisp = ((Activity) getContext()).getWindowManager()
.getDefaultDisplay();
Point mdispSize = new Point();
mdisp.getSize(mdispSize);
int maxX = mdispSize.x;
int maxY = mdispSize.y;
Bitmap b = scaleDown(BitmapFactory.decodeResource(getResources(),
R.drawable.circle_img), 300, true);
Bitmap b1 = scaleDown(BitmapFactory.decodeResource(getResources(),
R.drawable.circle_img), 130, true);
Bitmap b2 = scaleDown(
BitmapFactory.decodeResource(getResources(), R.drawable.circle),
100, true);
// droid1 = new Droid(BitmapFactory.decodeResource(getResources(),
// R.drawable.icon), 150, 150);
if (b != null) {
circle_blue1 = new Circle(b, 500, 500);
} else {
circle_blue1 = new Circle(BitmapFactory.decodeResource(
getResources(), R.drawable.circle_img), 500, 500);
}
if (b1 != null) {
circle_white2 = new Circle(b1, 800, 800);
} else {
circle_white2 = new Circle(BitmapFactory.decodeResource(
getResources(), R.drawable.circle), 800, 800);
}
if (b2 != null) {
circle_blue2 = new Circle(b2, 1500, 1500);
} else {
circle_blue2 = new Circle(BitmapFactory.decodeResource(
getResources(), R.drawable.circle_img), 1500, 1500);
}
// droid3 = new Droid(BitmapFactory.decodeResource(getResources(),
// R.drawable.icon), 150, 150);
// create the game loop thread
thread = new MainThread(getHolder(), this, context);
// make the GamePanel focusable so it can handle events
setFocusable(true);
}
public FloatingCirclesPanel(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
public FloatingCirclesPanel(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
public static Bitmap scaleDown(Bitmap realImage, float maxImageSize,
boolean filter) {
float ratio = Math.min((float) maxImageSize / realImage.getWidth(),
(float) maxImageSize / realImage.getHeight());
int width = Math.round((float) ratio * realImage.getWidth());
int height = Math.round((float) ratio * realImage.getHeight());
Bitmap newBitmap = Bitmap.createScaledBitmap(realImage, width, height,
filter);
return newBitmap;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// at this point the surface is created and
// we can safely start the game loop
if (thread != null && (thread.getState() == Thread.State.NEW)) {
thread.setRunning(true);// riga originale
thread.start();// riga originale
}
// after a pause it starts the thread again
else if (thread != null && thread.getState() == Thread.State.TERMINATED) {
thread = new MainThread(getHolder(), this, context);
;
thread.setRunning(true);
thread.start(); // Start a new thread
}
// thread.setRunning(true);
// thread.start();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "Surface is being destroyed");
// tell the thread to shut down and wait for it to finish
// this is a clean shutdown
boolean retry = true;
while (retry) {
try {
thread.interrupt();
retry = false;
thread = null;
} catch (Exception e) {
// try again shutting down the thread
}
}
Log.d(TAG, "Thread was shut down cleanly");
}
public void render(Canvas canvas) {
if (canvas != null) {
Rect dest = new Rect(0, 0, getWidth(), getHeight());
Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG);
paint.setFilterBitmap(true);
paint.setDither(true);
Bitmap bitmap = BitmapFactory.decodeResource(getResources(),
R.drawable.splashscreen);
canvas.drawBitmap(bitmap, null, dest, paint);
// canvas.drawColor(Color.BLUE);
circle_white1.draw(canvas);
circle_blue1.draw(canvas);
circle_white2.draw(canvas);
circle_blue2.draw(canvas);
}
}
private void createAnimation(Canvas canvas) {
anim = new RotateAnimation(0, 360, getWidth() / 2, getHeight() / 2);
anim.setRepeatMode(Animation.RESTART);
anim.setRepeatCount(Animation.INFINITE);
anim.setDuration(10000L);
((Activity) context).runOnUiThread(new Runnable() {
#Override
public void run() {
startAnimation(anim);
}
});
}
/**
* This is the game update method. It iterates through all the objects and
* calls their update method if they have one or calls specific engine's
* update method.
*/
public void update() {
// check collision with right wall if heading right
if (circle_white1.getSpeed().getxDirection() == Speed.DIRECTION_RIGHT
&& circle_white1.getX() + circle_white1.getBitmap().getWidth()
/ 2 >= getWidth()) {
circle_white1.getSpeed().toggleXDirection();
}
// check collision with left wall if heading left
if (circle_white1.getSpeed().getxDirection() == Speed.DIRECTION_LEFT
&& circle_white1.getX() - circle_white1.getBitmap().getWidth()
/ 2 <= 0) {
circle_white1.getSpeed().toggleXDirection();
}
// check collision with bottom wall if heading down
if (circle_white1.getSpeed().getyDirection() == Speed.DIRECTION_DOWN
&& circle_white1.getY() + circle_white1.getBitmap().getHeight()
/ 2 >= getHeight()) {
circle_white1.getSpeed().toggleYDirection();
}
// check collision with top wall if heading up
if (circle_white1.getSpeed().getyDirection() == Speed.DIRECTION_UP
&& circle_white1.getY() - circle_white1.getBitmap().getHeight()
/ 2 <= 0) {
circle_white1.getSpeed().toggleYDirection();
}
// Update the lone droid
circle_white1.update();
if (circle_blue1.getSpeed().getxDirection() == Speed.DIRECTION_RIGHT
&& circle_blue1.getX() + circle_blue1.getBitmap().getWidth()
/ 2 >= getWidth()) {
circle_blue1.getSpeed().toggleXDirection();
}
// check collision with left wall if heading left
if (circle_blue1.getSpeed().getxDirection() == Speed.DIRECTION_LEFT
&& circle_blue1.getX() - circle_blue1.getBitmap().getWidth()
/ 2 <= 0) {
circle_blue1.getSpeed().toggleXDirection();
}
// check collision with bottom wall if heading down
if (circle_blue1.getSpeed().getyDirection() == Speed.DIRECTION_DOWN
&& circle_blue1.getY() + circle_blue1.getBitmap().getHeight()
/ 2 >= getHeight()) {
circle_blue1.getSpeed().toggleYDirection();
}
// check collision with top wall if heading up
if (circle_blue1.getSpeed().getyDirection() == Speed.DIRECTION_UP
&& circle_blue1.getY() - circle_blue1.getBitmap().getHeight()
/ 2 <= 0) {
circle_blue1.getSpeed().toggleYDirection();
}
// Update the lone droid1
circle_blue1.update();
if (circle_white2.getSpeed().getxDirection() == Speed.DIRECTION_RIGHT
&& circle_white2.getX() + circle_white2.getBitmap().getWidth()
/ 2 >= getWidth()) {
circle_white2.getSpeed().toggleXDirection();
}
// check collision with left wall if heading left
if (circle_white2.getSpeed().getxDirection() == Speed.DIRECTION_LEFT
&& circle_white2.getX() - circle_white2.getBitmap().getWidth()
/ 2 <= 0) {
circle_white2.getSpeed().toggleXDirection();
}
// check collision with bottom wall if heading down
if (circle_white2.getSpeed().getyDirection() == Speed.DIRECTION_DOWN
&& circle_white2.getY() + circle_white2.getBitmap().getHeight()
/ 2 >= getHeight()) {
circle_white2.getSpeed().toggleYDirection();
}
// check collision with top wall if heading up
if (circle_white2.getSpeed().getyDirection() == Speed.DIRECTION_UP
&& circle_white2.getY() - circle_white2.getBitmap().getHeight()
/ 2 <= 0) {
circle_white2.getSpeed().toggleYDirection();
}
// Update the lone droid2
circle_white2.update();
if (circle_blue2.getSpeed().getxDirection() == Speed.DIRECTION_RIGHT
&& circle_blue2.getX() + circle_blue2.getBitmap().getWidth()
/ 2 >= getWidth()) {
circle_blue2.getSpeed().toggleXDirection();
}
// check collision with left wall if heading left
if (circle_blue2.getSpeed().getxDirection() == Speed.DIRECTION_LEFT
&& circle_blue2.getX() - circle_blue2.getBitmap().getWidth()
/ 2 <= 0) {
circle_blue2.getSpeed().toggleXDirection();
}
// check collision with bottom wall if heading down
if (circle_blue2.getSpeed().getyDirection() == Speed.DIRECTION_DOWN
&& circle_blue2.getY() + circle_blue2.getBitmap().getHeight()
/ 2 >= getHeight()) {
circle_blue2.getSpeed().toggleYDirection();
}
// check collision with top wall if heading up
if (circle_blue2.getSpeed().getyDirection() == Speed.DIRECTION_UP
&& circle_blue2.getY() - circle_blue2.getBitmap().getHeight()
/ 2 <= 0) {
circle_blue2.getSpeed().toggleYDirection();
}
// Update the lone droid3
circle_blue2.update();
}
}
I am adding that surfaceView into my activity like below
#Override
protected void onStart() {
super.onStart();
View childView = getLayoutInflater().inflate(R.layout.main, null);
circleAnimation = new FloatingCirclesPanel(this);
setContentView(circleAnimation);
final ImageView image = (ImageView) childView.findViewById(R.id.logout);
Now the problem is the app is running slow while navigating through screens in the app.
If I add that SurfaceView code in onStart/onResume entire views are redesigning twice and causing performance issues.
How can I improve performance?
I've been battling with this feature for couple of days now...
It seems, that camera is ignoring(?) focus areas that I've defined. Here is snippets of the code:
Focusing:
protected void focusOnTouch(MotionEvent event) {
if (camera != null) {
Rect rect = calculateFocusArea(event.getX(), event.getY());
Parameters parameters = camera.getParameters();
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(Lists.newArrayList(new Camera.Area(rect, 500)));
camera.setParameters(parameters);
camera.autoFocus(this);
}
}
Focus area calculation:
private Rect calculateFocusArea(float x, float y) {
int left = clamp(Float.valueOf((x / getSurfaceView().getWidth()) * 2000 - 1000).intValue(), focusAreaSize);
int top = clamp(Float.valueOf((y / getSurfaceView().getHeight()) * 2000 - 1000).intValue(), focusAreaSize);
return new Rect(left, top, left + focusAreaSize, top + focusAreaSize);
}
Couple of log events from Camera.AutoFocusCallback#onAutoFocus
Log.d(TAG, String.format("Auto focus success=%s. Focus mode: '%s'. Focused on: %s",
focused,
camera.getParameters().getFocusMode(),
camera.getParameters().getFocusAreas().get(0).rect.toString()));
08-27 11:19:42.240: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-109, 643 - -13, 739)
08-27 11:19:55.514: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(20, 457 - 116, 553)
08-27 11:19:58.037: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-159, 536 - -63, 632)
08-27 11:20:00.129: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-28, 577 - 68, 673)
Visually it looks like focus succeeds on logged area, but the suddenly it loses focus and focus on center (0, 0), or what takes bigger part of SurfaceView is obtained.
focusAreaSize used in calculation is about 210px (96dp).
Testing on HTC One where Camera.getParameters().getMaxNumFocusAreas() is 1.
Initial focus mode (before first tap) is set to FOCUS_MODE_CONTINUOUS_PICTURE.
Am I doing something wrong here?
Tinkering with Camera.Area rectangle size or weight doesn't show any noticeable effect.
My problem was much simpler :)
All I had to do is cancel previously called autofocus. Basically the correct order of actions is this:
protected void focusOnTouch(MotionEvent event) {
if (camera != null) {
camera.cancelAutoFocus();
Rect focusRect = calculateTapArea(event.getX(), event.getY(), 1f);
Rect meteringRect = calculateTapArea(event.getX(), event.getY(), 1.5f);
Parameters parameters = camera.getParameters();
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(Lists.newArrayList(new Camera.Area(focusRect, 1000)));
if (meteringAreaSupported) {
parameters.setMeteringAreas(Lists.newArrayList(new Camera.Area(meteringRect, 1000)));
}
camera.setParameters(parameters);
camera.autoFocus(this);
}
}
Update
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
...
Parameters p = camera.getParameters();
if (p.getMaxNumMeteringAreas() > 0) {
this.meteringAreaSupported = true;
}
...
}
/**
* Convert touch position x:y to {#link Camera.Area} position -1000:-1000 to 1000:1000.
*/
private Rect calculateTapArea(float x, float y, float coefficient) {
int areaSize = Float.valueOf(focusAreaSize * coefficient).intValue();
int left = clamp((int) x - areaSize / 2, 0, getSurfaceView().getWidth() - areaSize);
int top = clamp((int) y - areaSize / 2, 0, getSurfaceView().getHeight() - areaSize);
RectF rectF = new RectF(left, top, left + areaSize, top + areaSize);
matrix.mapRect(rectF);
return new Rect(Math.round(rectF.left), Math.round(rectF.top), Math.round(rectF.right), Math.round(rectF.bottom));
}
private int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}
Beside setting:
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
you need to set:
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
if you want real 'live' auto-focus. Also, it will be good to check available focuses:
List<String> focusModes = parameters.getSupportedFocusModes();
LLog.d("focusModes=" + focusModes);
if (focusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE))
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
On Samsung S6 you must set this with little delay (~ 500 ms) after getting camera preview.
I had this problem today :/
And after hours of struggling, I found the solution!
It's strange, but it appears that setting focus-mode to "macro" right before setting focus-areas solved the problem ;)
params.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);
params.setFocusAreas(focusAreas);
mCamera.setParameters(params);
I have Galaxy S3 with Android 4.1.2
I hope this will work for you either :)
use FOCUS_MODE_FIXED
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
mCamera = Camera.open(mCameraId);
} else {
mCamera = Camera.open();
}
cameraParams = mCamera.getParameters();
// set the focus mode
cameraParams.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
// set Camera parameters
mCamera.setParameters(cameraParams);
Hi, try below code copy and change for yourself
public class CameraActivity extends AppCompatActivity implements Camera.AutoFocusCallback {
private Camera camera;
private FrameLayout fl_camera_preview;
...
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView( R.layout.camera_activity );
//this View, is lens camera
fl_camera_preview = findViewById( R.id.fl_camera_preview );
Button someButtonCapturePicture = findViewById(R.id.someButtonCapturePicture);
pictureCall = getPictureCallback();
//check camera access
if ( getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA) ) {
if ( safeCameraOpen(0) ) {
cameraPreview = new CameraPreview( this, camera );
fl_camera_preview.addView( cameraPreview );
someButtonCapturePicture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
camera.takePicture(null, null, pictureCall);
}
});
} else {
Log.w(TAG, "getCameraInstance: Camera is not available (in use or does not exist)." );
}
}
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
camera = Camera.open( id );
// set some parameters
Camera.Parameters par = camera.getParameters();
List<Camera.Size> supportedPreviewSizes = par.getSupportedPreviewSizes();
for ( Camera.Size cs : supportedPreviewSizes ) {
if ( cs.height == 720 ) {
par.setPictureSize(cs.width, cs.height);
par.setPreviewSize(cs.width, cs.height);
break;
}
}
camera.setParameters(par);
qOpened = ( camera != null );
} catch (Exception e) {
Log.e(TAG, "safeCameraOpen: failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
public void touchFocusCamera( final Rect touchFocusRect ) {
//Convert touche coordinate, in width and height to -/+ 1000 range
final Rect targetFocusRect = new Rect(
touchFocusRect.left * 2000/fl_camera_preview.getWidth() - 1000,
touchFocusRect.top * 2000/fl_camera_preview.getHeight() - 1000,
touchFocusRect.right * 2000/fl_camera_preview.getWidth() - 1000,
touchFocusRect.bottom * 2000/fl_camera_preview.getHeight() - 1000);
final List<Camera.Area> focusList = new ArrayList<Camera.Area>();
Camera.Area focusArea = new Camera.Area(targetFocusRect, 1000);
focusList.add(focusArea);
Camera.Parameters para = camera.getParameters();
List<String> supportedFocusModes = para.getSupportedFocusModes();
if ( supportedFocusModes != null &&
supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO) ) {
try {
para.setFocusAreas(focusList);
para.setMeteringAreas(focusList);
camera.setParameters(para);
camera.autoFocus( CameraActivity.this );
} catch (Exception e) {
Log.e(TAG, "handleFocus: " + e.getMessage() );
}
}
}
#Override
public void onAutoFocus(boolean success, Camera camera) {
if ( success ) {
camera.cancelAutoFocus();
}
float focusDistances[] = new float[3];
camera.getParameters().getFocusDistances(focusDistances);
}
/**
* Get Bitmap from camera
* #return picture
*/
private Camera.PictureCallback getPictureCallback() {
Camera.PictureCallback picture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "onPictureTaken: size bytes photo: " + data.length );
}
};
return picture;
}
...
}
//And SurfaceView with Callback
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraPreview";
SurfaceHolder holder;
Camera camera;
public CameraPreview( Context context, Camera _camera ) {
super(context);
camera = _camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder = getHolder();
holder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
camera.setPreviewDisplay(holder);
camera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if( event.getAction() == MotionEvent.ACTION_DOWN ) {
// Get the pointer's current position
float x = event.getX();
float y = event.getY();
float touchMajor = event.getTouchMajor();
float touchMinor = event.getTouchMinor();
Rect touchRect = new Rect(
(int)(x - touchMajor/2),
(int)(y - touchMinor/2),
(int)(x + touchMajor/2),
(int)(y + touchMinor/2));
((CameraActivity)getContext())
.touchFocusCamera( touchRect );
}
return true;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (this.holder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera.setPreviewDisplay(this.holder);
camera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
...
}
I want to control the box by 2 fingers like below:
I have basic MouseJoint implementation:
public class MyMouseJoint{
OrthographicCamera cam;
World world;
Body groundBody ;
public MouseJoint mouseJoint = null;
Body hitBody = null;
Vector2 target = new Vector2();
Vector3 testPoint = new Vector3();
QueryCallback callback = new QueryCallback() {
#Override
public boolean reportFixture (Fixture fixture) {
// if the hit fixture's body is the ground body we ignore it
if (fixture.getBody() == groundBody) return true;
// if the hit point is inside the fixture of the body
// we report it
if (fixture.testPoint(testPoint.x, testPoint.y)) {
hitBody = fixture.getBody();
return false;
} else
return true;
}
};
public MyMouseJoint(OrthographicCamera cam, World world, Body groundBody){
this.cam=cam;
this.world=world;
this.groundBody = groundBody;
}
//USE THIS FUNCTION IN touchDown
public void createMouseJoint(float x, float y){
// translate the mouse coordinates to world coordinates
testPoint.set(x, y, 0);
cam.unproject(testPoint);
// ask the world which bodies are within the given
// bounding box around the mouse pointer
hitBody = null;
world.QueryAABB(callback, testPoint.x - 0.1f, testPoint.y - 0.1f, testPoint.x + 0.1f, testPoint.y + 0.1f);
if (hitBody != null) {
MouseJointDef def = new MouseJointDef();
def.bodyA = groundBody;
def.bodyB = hitBody;
def.collideConnected = true;
def.target.set(testPoint.x, testPoint.y);
def.maxForce = 10000.0f * hitBody.getMass();
def.frequencyHz=100;
def.dampingRatio=0;
mouseJoint = (MouseJoint)world.createJoint(def);
hitBody.setAwake(true);
}
}
//USE THIS FUNCTION IN touchDragged
public void dragMouseJoint(float x, float y){
if (mouseJoint != null) {
cam.unproject(testPoint.set(x, y, 0));
mouseJoint.setTarget(target.set(testPoint.x, testPoint.y));
}
}
//USE THIS FUNCTION IN touchUp
public void releaseMouseJoint(){
if (mouseJoint != null) {
world.destroyJoint(mouseJoint);
mouseJoint = null;
}
}
}
How can modify this class for using 2 fingers?
What you can do is create an array of Body and initialize hitbody[] with the pointer of your touch. you can change the above code to following code.
In the following function the varable pointer is the pointer if your current touch
public class MyMouseJoint{
OrthographicCamera cam;
World world;
Body groundBody ;
public MouseJoint mouseJoint[] = new MouseJoint[2];
Body hitBody[] = new Body[2];
Vector2 target = new Vector2();
Vector3 testPoint = new Vector3();
Body tempBody;
QueryCallback callback = new QueryCallback() {
#Override
public boolean reportFixture (Fixture fixture) {
// if the hit fixture's body is the ground body we ignore it
if (fixture.getBody() == groundBody) return true;
// if the hit point is inside the fixture of the body
// we report it
if (fixture.testPoint(testPoint.x, testPoint.y)) {
tempBody = fixture.getBody();
return false;
} else
return true;
}
};
public MyMouseJoint(OrthographicCamera cam, World world, Body groundBody){
this.cam=cam;
this.world=world;
this.groundBody = groundBody;
}
//USE THIS FUNCTION IN touchDown
public void createMouseJoint(float x, float y,int pointer){
// translate the mouse coordinates to world coordinates
testPoint.set(x, y, 0);
cam.unproject(testPoint);
// ask the world which bodies are within the given
// bounding box around the mouse pointer
hitBody[pointer] = null;
world.QueryAABB(callback, testPoint.x - 0.1f, testPoint.y - 0.1f, testPoint.x + 0.1f, testPoint.y + 0.1f);
hitBody[pointer] = tempBody;
if (hitBody[pointer] != null) {
MouseJointDef def = new MouseJointDef();
def.bodyA = groundBody;
def.bodyB = hitBody[pointer];
def.collideConnected = true;
def.target.set(testPoint.x, testPoint.y);
def.maxForce = 10000.0f * hitBody[pointer].getMass();
def.frequencyHz=100;
def.dampingRatio=0;
mouseJoint[pointer] = (MouseJoint)world.createJoint(def);
hitBody[pointer].setAwake(true);
}
}
//USE THIS FUNCTION IN touchDragged
public void dragMouseJoint(float x, float y,int pointer){
if (mouseJoint[pointer] != null) {
cam.unproject(testPoint.set(x, y, 0));
mouseJoint[pointer].setTarget(target.set(testPoint.x, testPoint.y));
}
}
//USE THIS FUNCTION IN touchUp
public void releaseMouseJoint(int pointer){
if (mouseJoint[pointer] != null) {
world.destroyJoint(mouseJoint[pointer]);
mouseJoint[pointer] = null;
}
}
}