Integrate Compass With Maps Android - android

i m working on Android Maps and i need to Add Compass functionality in it that is When I click on Compass Button it shows separate compass.
Can any one guide me the Right direction or refrence me any tutorial. I need to add Compass seperatly
thanks in Advance for reading and Replying

The Scenario in My Cass is To Use Separately use The Compass When click on Compass Button
it Will Show
Distance from Current Location(My Location) to the Destination location
Compass
Compass Head Towards North Direction
So to achieve all these I've used the following approach
hope this will help anyone who has the same scenario as I have
The Code Snippet of my Class That will get Distance between two points around the globe and show image as Compass with head towards north direction
public class MapController extends MapActivity implements SensorEventListener {
public static GeoPoint Mypoint, MapPoint;
double km, meter;
public static int kmInDec, meterInDec;
private SensorManager mSensorManager;
private Sensor mAccelerometer, mField;
Location myLoc, mapLoc;
double myLat, myLong, mapLat, mapLong;
public static double lat, lng;
public static double mylat = Double.parseDouble(Constants.DEVICE_OBJ.lat);//31.567615d;
public static double mylon = Double.parseDouble(Constants.DEVICE_OBJ.lng);//74.360962d;
com.google.android.maps.MapController mapController;
public static boolean enabled = false;
#Override
protected void onCreate(Bundle icicle) {
// TODO Auto-generated method stub
super.onCreate(icicle);
setContentView(R.layout.map_layout);
Log.v(Constants.TAG, "In Map Controller");
fetchUI();
}
public void fetchUI() {
myLoc = new Location("Test");
mapLoc = new Location("Test");
mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mField = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
myLat = (31.567615 * 1e6) / 1E6;
myLong = (74.360962d * 1e6) / 1E6;
mapLat = (MapPoint.getLatitudeE6()) / 1E6;
mapLong = (MapPoint.getLongitudeE6()) / 1E6;
myLoc.setLatitude(31.567615);
myLoc.setLongitude(74.360962);
mapLoc.setLatitude(mapLat);
mapLoc.setLongitude(mapLong);
}
protected void onResume() {
super.onResume();
mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI);
mSensorManager.registerListener(this, mField, SensorManager.SENSOR_DELAY_UI);
}
protected void onPause() {
super.onPause();
mSensorManager.unregisterListener(this);
}
#Override
protected boolean isRouteDisplayed() {
// TODO Auto-generated method stub
return false;
}
///Method to calculate the Distance between 2 Geo Points aroud the Globe
public double CalculationByDistance(GeoPoint StartP, GeoPoint EndP) {
double lat1 = StartP.getLatitudeE6() / 1E6;
double lat2 = EndP.getLatitudeE6() / 1E6;
double lon1 = StartP.getLongitudeE6() / 1E6;
double lon2 = EndP.getLongitudeE6() / 1E6;
double dLat = Math.toRadians(lat2 - lat1);
double dLon = Math.toRadians(lon2 - lon1);
double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
Math.sin(dLon / 2) * Math.sin(dLon / 2);
double c = 2 * Math.asin(Math.sqrt(a));
double valueResult = Radius * c;
double km = valueResult / 1;
DecimalFormat newFormat = new DecimalFormat("####");
kmInDec = Integer.valueOf(newFormat.format(km));
meter = valueResult % 1000;
meterInDec = Integer.valueOf(newFormat.format(meter));
Log.i("Radius Value", "" + valueResult + " KM " + kmInDec + " Meter " + meterInDec);
return Radius * c;
}
#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO Auto-generated method stub
}
#Override
public void onSensorChanged(SensorEvent event) {
// TODO Auto-generated method stub
if (myLoc == null) return;
float azimuth = event.values[0];
float baseAzimuth = azimuth;
GeomagneticField geoField = new GeomagneticField(Double
.valueOf(myLoc.getLatitude()).floatValue(), Double
.valueOf(myLoc.getLongitude()).floatValue(),
Double.valueOf(myLoc.getAltitude()).floatValue(),
System.currentTimeMillis());
azimuth -= geoField.getDeclination(); // converts magnetic north into true north
// Store the bearingTo in the bearTo variable
float bearTo = myLoc.bearingTo(mapLoc);
// If the bearTo is smaller than 0, add 360 to get the rotation clockwise.
if (bearTo < 0) {
bearTo = bearTo + 360;
}
//This is where we choose to point it
float direction = bearTo - azimuth;
// If the direction is smaller than 0, add 360 to get the rotation clockwise.
if (direction < 0) {
direction = direction + 360;
}
rotateImageView(compasView, R.drawable.app_icon, direction);
//Set the field
String bearingText = "N";
if ((360 >= baseAzimuth && baseAzimuth >= 337.5) || (0 <= baseAzimuth && baseAzimuth <= 22.5))
bearingText = "N";
else if (baseAzimuth > 22.5 && baseAzimuth < 67.5) bearingText = "NE";
else if (baseAzimuth >= 67.5 && baseAzimuth <= 112.5) bearingText = "E";
else if (baseAzimuth > 112.5 && baseAzimuth < 157.5) bearingText = "SE";
else if (baseAzimuth >= 157.5 && baseAzimuth <= 202.5) bearingText = "S";
else if (baseAzimuth > 202.5 && baseAzimuth < 247.5) bearingText = "SW";
else if (baseAzimuth >= 247.5 && baseAzimuth <= 292.5) bearingText = "W";
else if (baseAzimuth > 292.5 && baseAzimuth < 337.5) bearingText = "NW";
else bearingText = "?";
Distance.setText(kmInDec + " Km " + " " + meterInDec + " m");
}
////This is the method to Show the Image as Compass
private void rotateImageView(ImageView imageView, int drawable, float rotate) {
// Decode the drawable into a bitmap
Bitmap bitmapOrg = BitmapFactory.decodeResource(getResources(),
drawable);
// Get the width/height of the drawable
DisplayMetrics dm = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(dm);
int width = bitmapOrg.getWidth(), height = bitmapOrg.getHeight();
// Initialize a new Matrix
Matrix matrix = new Matrix();
// Decide on how much to rotate
rotate = rotate % 360;
// Actually rotate the image
matrix.postRotate(rotate, width, height);
// recreate the new Bitmap via a couple conditions
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmapOrg, 0, 0, width, height, matrix, true);
//BitmapDrawable bmd = new BitmapDrawable( rotatedBitmap );
//imageView.setImageBitmap( rotatedBitmap );
imageView.setImageDrawable(new BitmapDrawable(getResources(), rotatedBitmap));
imageView.setScaleType(ScaleType.CENTER);
}
}

use this guide:
Google Maps Tutorial
search for compass, try launching a new activity with your needs when pressing on the compass (an overlay)
anyway, the tutorial is very comprehensive, I believe it will solve most of your needs =).

Related

ARCore OpenGL ES Moving Object using onScroll

I'm attempting to add in the moving of objects around with single finger scrolling. (See Google AR Stickers for example) I'm using native ARCore/OpenGL based originally off the ARCore examples. When you slide 1 finger, I want to move the object in 3D space along the X and Z axes.
I can easily create the movement using Translation, however, it performs the movement based on the original camera orientation. If you physically move the phone/camera a few lateral steps, the finger movements no longer match what a user would expect.
So I changed up my code and mapped the finger distanceX and distanceY that is swiped to affect X and Z coordinates depending on the angle of change from the original camera starting point to the current camera point.
The issue that I'm running into is determining the angle at which the camera has been moved. I've been looking at the value from the Camera View matrix:
camera.getViewMatrix(viewmtx, 0);
But the X, Y, and Z coordinates always say 0. I'm assuming this is because it's always making the camera the origin? Does anyone know of a way to calculate the angle of rotation of a camera from a 3D object using the ARCore/OpenGL ES libraries? The red angle in the illustration below (from top-down perspective) is what I'm trying to get. Sorry for the crude drawing:
Here's my code for your reference:
// Handle Gestures - Single touch for Translation and Placement
mGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener()
{
#Override
public boolean onSingleTapUp(MotionEvent e)
{
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY)
{
if (e2.getPointerCount() == 1 && ...)
{
double angle = findCameraAngleFromOrigin();
double speed = 0.005d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
}
EDIT: Update to code
I wasn't able to figure out how to find the distance from the camera points, but I was able to at least find the difference in the phone's rotation, which is closer. I accomplished it by the code below. I'm still not happy with the results, so I'll post updates later on when I find a more effective solution.
private double getDegree(double value1, double value2)
{
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0)
{
Log.d(TAG, "FIRST QUADRANT");
return firstAngle; // first quadrant
}
else if (secondAngle < 0 && firstAngle >= 0)
{
Log.d(TAG, "SECOND QUADRANT");
return 90 + (90 - firstAngle); //second quadrant
}
else if (secondAngle < 0 && firstAngle < 0)
{
Log.d(TAG, "THIRD QUADRANT");
return 180 - firstAngle; //third quadrant
}
else
{
Log.d(TAG, "FOURTH QUADRANT");
return 270 + (90 + firstAngle); //fourth quadrant
}
}
private double findCameraAngleFromOrigin()
{
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) - getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
#Override
public void onDrawFrame(GL10 gl)
{
...
//When creating a new object
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
camera.getDisplayOrientedPose().toMatrix( mOriginCameraMatrix, 0);
//During each draw frame
camera.getDisplayOrientedPose().toMatrix( mCurrentCameraMatrix, 0);
int ac = 0;
for (Anchor anchor : mAnchors)
{
if (anchor.getTrackingState() != TrackingState.TRACKING)
{
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(mAnchorMatrix, 0);
// Update and draw the model
if (mModelSet)
{
if (mScaleFactors.size() <= ac)
{
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac)
{
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac)
{
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac)
{
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
ObjectRenderer virtualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
virtualObject.updateModelMatrix(mAnchorMatrix, mScaleFactors.get(ac));
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
ac++;
}
}
This is kind of hard to track, so I'll probably just post the entire class once I feel more comfortable with the implementation and code cleanup.
public class HelloArActivity extends AppCompatActivity implements GLSurfaceView.Renderer {
private static final String TAG = HelloArActivity.class.getSimpleName();
// Rendering. The Renderers are created here, and initialized when the GL surface is created.
private GLSurfaceView surfaceView;
private boolean installRequested;
private Session session;
private GestureDetector gestureDetector;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private final ObjectRenderer virtualObject = new ObjectRenderer();
private final ObjectRenderer virtualObjectShadow = new ObjectRenderer();
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private final PointCloudRenderer pointCloud = new PointCloudRenderer();
private int mCurrent = -1;
private final List<ObjectRenderer> mVirtualObjects = new ArrayList<ObjectRenderer>();
// Temporary matrix allocated here to reduce number of allocations for each frame.
private final float[] anchorMatrix = new float[16];
//Rotation, Moving, & Scaling
private final List<Float> mRotationThetas = new ArrayList<Float>();
private GestureDetector mGestureDetector;
private ScaleGestureDetector mScaleDetector;
private RotationGestureDetector mRotationDetector;
private final List<Float> mScaleFactors = new ArrayList<Float>();
private final List<Float> mTranslationX = new ArrayList<Float>();
private final List<Float> mTranslationZ = new ArrayList<Float>();
private final float[] mOriginCameraMatrix = new float[16];
private final float[] mCurrentCameraMatrix = new float[16];
private boolean mModelSet = false;
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> mQueuedSingleTaps = new ArrayBlockingQueue<>(16);
private final ArrayList<Anchor> mAnchors = new ArrayList<>();
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private final List<Integer> mAnchorReferences = new ArrayList<Integer>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.surfaceview);
displayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
//Handle Gestures - Multitouch for Scaling and Rotation
mScaleDetector = new ScaleGestureDetector(this, new ScaleGestureDetector.SimpleOnScaleGestureListener() {
#Override
public boolean onScale(ScaleGestureDetector detector) {
if (mScaleFactors.size() > 0) {
mScaleFactors.set(mScaleFactors.size() - 1, Math.max(0.1f, Math.min(detector.getScaleFactor() * mScaleFactors.get(mScaleFactors.size() - 1), 5.0f)));
return true;
}
return false;
}
});
mRotationDetector = new RotationGestureDetector(this, new RotationGestureDetector.OnRotationGestureListener() {
#Override
public void OnRotation(RotationGestureDetector rotationDetector) {
if (mRotationThetas.size() > 0) {
mRotationThetas.set(mRotationThetas.size() - 1, (mRotationThetas.get(mRotationThetas.size() - 1) + (rotationDetector.getAngle() * -0.001f)));
}
}
});
// Set up tap listener.
gestureDetector =
new GestureDetector(
this,
new GestureDetector.SimpleOnGestureListener() {
#Override
public boolean onSingleTapUp(MotionEvent e) {
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (e2.getPointerCount() == 1 && mTranslationX.size() > 0 && mTranslationZ.size() > 0) {
double angle = findCameraAngleFromOrigin();
double speed = 0.001d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
} else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
#Override
public boolean onDown(MotionEvent e) {
return true;
}
});
surfaceView.setOnTouchListener(
new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
boolean retVal = mScaleDetector.onTouchEvent(event);
if (retVal)
mRotationDetector.onTouchEvent(event);
retVal = gestureDetector.onTouchEvent(event) || retVal;
return retVal || gestureDetector.onTouchEvent(event);
//return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
surfaceView.setPreserveEGLContextOnPause(true);
surfaceView.setEGLContextClientVersion(2);
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private double findCameraAngleFromOrigin() {
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) -
getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
private double getDegree(double value1, double value2) {
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0) {
return firstAngle; // first quadrant
} else if (secondAngle < 0 && firstAngle >= 0) {
return 90 + (90 - firstAngle); //second quadrant
} else if (secondAngle < 0 && firstAngle < 0) {
return 180 - firstAngle; //third quadrant
} else {
return 270 + (90 + firstAngle); //fourth quadrant
}
}
#Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
showSnackbarMessage(message, true);
Log.e(TAG, "Exception creating session", exception);
return;
}
// Create default config and check if supported.
Config config = new Config(session);
if (!session.isSupported(config)) {
showSnackbarMessage("This device does not support AR", true);
}
session.configure(config);
}
showLoadingMessage();
// Note that order matters - see the note in onPause(), the reverse applies here.
session.resume();
surfaceView.onResume();
displayRotationHelper.onResume();
}
#Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
surfaceView.onPause();
session.pause();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
if (!CameraPermissionHelper.hasCameraPermission(this)) {
Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
.show();
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
// Standard Android full-screen functionality.
getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
queuedSingleTaps.offer(e);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
// Prepare the other rendering objects.
try {
virtualObject.createOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
virtualObjectShadow.createOnGlThread(/*context=*/ this, "andy_shadow.obj", "andy_shadow.png");
virtualObjectShadow.setBlendMode(BlendMode.Shadow);
virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
} catch (IOException e) {
Log.e(TAG, "Failed to read obj file");
}
try {
planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
} catch (IOException e) {
Log.e(TAG, "Failed to read plane texture");
}
pointCloud.createOnGlThread(/*context=*/ this);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mVirtualObjects.add(virtualObject);
}
#Override
public void onDrawFrame(GL10 gl) {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (mAnchors.size() >= 20)
{
//Alert the user that the maximum has been reached, must call from
//Handler as this is a UI action being done on a worker thread.
new Handler(Looper.getMainLooper())
{
#Override
public void handleMessage(Message message)
{
Toast.makeText(HelloArActivity.this,
"You've reached the maximum!", Toast.LENGTH_LONG).show();
}
};
// Alternatively, you can start detaching, however, a revision to the
// mAnchorReferences and mScalingFactors should be made!
// mAnchors.get(0).detach();
// mAnchors.remove(0);
}
else
{
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3d model
// in the correct position relative both to the world and to the plane.
mScaleFactors.add(1.0f);
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
mAnchorReferences.add(mCurrent);
camera.getDisplayOrientedPose().toMatrix(mOriginCameraMatrix, 0);
}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
}
}
camera.getDisplayOrientedPose().toMatrix(mCurrentCameraMatrix, 0);
// Visualize planes.
planeRenderer.drawPlanes(
session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
int ac = 0;
for (Anchor anchor : mAnchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
if (mScaleFactors.size() <= ac) {
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac) {
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac) {
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac) {
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
// Update and draw the model and its shadow.
ObjectRenderer vitualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
vitualObject.updateModelMatrix(anchorMatrix, mScaleFactors.get(ac));
vitualObject.draw(viewmtx, projmtx, lightIntensity);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
private void rotateYAxisMatrix(float rotationTheta) {
if (rotationTheta != 0.0f) {
anchorMatrix[0] = (float) Math.cos(rotationTheta);
anchorMatrix[2] = (float) Math.sin(rotationTheta);
anchorMatrix[5] = 1;
anchorMatrix[8] = -(float) Math.sin(rotationTheta);
anchorMatrix[10] = (float) Math.cos(rotationTheta);
anchorMatrix[15] = 1;
}
}
private void translateMatrix(float xDistance, float yDistance, float zDistance) {
Matrix.translateM(anchorMatrix, 0, xDistance, yDistance, zDistance);
}

check if Latlng is inside polygon

I'm drawing Polygon on city in order to check if current position is inside this polygon or not, and i'm doing that with below code:-
ArrayList<LatLng> polyLoc = new ArrayList<LatLng>();
polyLoc.add(new LatLng(24.643932, 46.297718));
polyLoc.add(new LatLng(24.695098, 46.555897));
polyLoc.add(new LatLng(24.921971, 46.476246));
polyLoc.add(new LatLng(25.147185, 46.366383));
polyLoc.add(new LatLng(25.155886, 47.249409));
polyLoc.add(new LatLng(24.929444, 47.346913));
polyLoc.add(new LatLng(24.691355, 47.106587));
polyLoc.add(new LatLng(24.449060, 47.219197));
polyLoc.add(new LatLng(24.293947, 46.973377));
polyLoc.add(new LatLng(24.641436, 46.299092));
And i checking if the current position is inside this polygon or not by this way :-
if (hasPermission() && gpsTracker.canGetLocation()) {
if (isPointInPolygon(new LatLng(gpsTracker.getLatitude(), gpsTracker.getLongitude()), polyLoc)) {
cash.setVisibility(View.VISIBLE);
cashIcon.setVisibility(View.VISIBLE);
} else {
cash.setVisibility(View.GONE);
cashIcon.setVisibility(View.GONE);
}
Log.d(TAG, "reservationDialog: " + gpsTracker.getLatitude() + gpsTracker.getLongitude());
}
here is my isPointInPolygon method :
private boolean isPointInPolygon(LatLng tap, ArrayList<LatLng> vertices) {
int intersectCount = 0;
for (int j = 0; j < vertices.size() - 1; j++) {
if (rayCastIntersect(tap, vertices.get(j), vertices.get(j + 1))) {
intersectCount++;
}
}
return ((intersectCount % 2) == 1); // odd = inside, even = outside;
}
private boolean rayCastIntersect(LatLng tap, LatLng vertA, LatLng vertB) {
double aY = vertA.latitude;
double bY = vertB.latitude;
double aX = vertA.longitude;
double bX = vertB.longitude;
double pY = tap.latitude;
double pX = tap.longitude;
if ((aY > pY && bY > pY) || (aY < pY && bY < pY)
|| (aX < pX && bX < pX)) {
return false; // a and b can't both be above or below pt.y, and a or
// b must be east of pt.x
}
double m = (aY - bY) / (aX - bX); // Rise over run
double bee = (-aX) * m + aY; // y = mx + b
double x = (pY - bee) / m; // algebra is neat!
return x > pX;
}
I don't know why it's not working, what i'v missed here?
I don't know why your code didn't work.
But you can use PolyUtil.containsLocation (new LatLng (latitude, longitude), polyLoc, true)
This would return false if the position outside the polygon.
In my project, I'm doing this calculation on the server. I'm using geolib library to do this. If you need to calculate in your app. You can get the logic from the library.
Library:
https://www.npmjs.com/package/geolib
Git: https://github.com/manuelbieh/Geolib

Maps, test if current location is on or near polyline

I'm using google directions api to draw a polyline for a route. Does anyone have any examples of checking if current location is on/near a polyline? Trying to determine if users current location is within x meters of that line and if not i'll make a new request and redraw a new route.
Cheers!
Here is my solution: just add the bdccGeoDistanceAlgorithm class I have created to your project and use bdccGeoDistanceCheckWithRadius method to check if your current location is on or near polyline (polyline equals to a list of LatLng of points)
Your can also get the distance from the method
Class bdccGeoDistanceAlgorithm
import com.google.android.gms.maps.model.LatLng;
import java.util.List;
public class bdccGeoDistanceAlgorithm {
// distance in meters from GLatLng point to GPolyline or GPolygon poly
public static boolean bdccGeoDistanceCheckWithRadius(List<LatLng> poly, LatLng point, int radius)
{
int i;
bdccGeo p = new bdccGeo(point.latitude,point.longitude);
for(i=0; i < (poly.size()-1) ; i++)
{
LatLng p1 = poly.get(i);
bdccGeo l1 = new bdccGeo(p1.latitude,p1.longitude);
LatLng p2 = poly.get(i+1);
bdccGeo l2 = new bdccGeo(p2.latitude,p2.longitude);
double distance = p.function_distanceToLineSegMtrs(l1, l2);
if(distance < radius)
return true;
}
return false;
}
// object
public static class bdccGeo
{
public double lat;
public double lng;
public double x;
public double y;
public double z;
public bdccGeo(double lat, double lon) {
this.lat = lat;
this.lng = lng;
double theta = (lon * Math.PI / 180.0);
double rlat = function_bdccGeoGeocentricLatitude(lat * Math.PI / 180.0);
double c = Math.cos(rlat);
this.x = c * Math.cos(theta);
this.y = c * Math.sin(theta);
this.z = Math.sin(rlat);
}
//returns in meters the minimum of the perpendicular distance of this point from the line segment geo1-geo2
//and the distance from this point to the line segment ends in geo1 and geo2
public double function_distanceToLineSegMtrs(bdccGeo geo1,bdccGeo geo2)
{
//point on unit sphere above origin and normal to plane of geo1,geo2
//could be either side of the plane
bdccGeo p2 = geo1.function_crossNormalize(geo2);
// intersection of GC normal to geo1/geo2 passing through p with GC geo1/geo2
bdccGeo ip = function_bdccGeoGetIntersection(geo1,geo2,this,p2);
//need to check that ip or its antipode is between p1 and p2
double d = geo1.function_distance(geo2);
double d1p = geo1.function_distance(ip);
double d2p = geo2.function_distance(ip);
//window.status = d + ", " + d1p + ", " + d2p;
if ((d >= d1p) && (d >= d2p))
return function_bdccGeoRadiansToMeters(this.function_distance(ip));
else
{
ip = ip.function_antipode();
d1p = geo1.function_distance(ip);
d2p = geo2.function_distance(ip);
}
if ((d >= d1p) && (d >= d2p))
return function_bdccGeoRadiansToMeters(this.function_distance(ip));
else
return function_bdccGeoRadiansToMeters(Math.min(geo1.function_distance(this),geo2.function_distance(this)));
}
// More Maths
public bdccGeo function_crossNormalize(bdccGeo b)
{
double x = (this.y * b.z) - (this.z * b.y);
double y = (this.z * b.x) - (this.x * b.z);
double z = (this.x * b.y) - (this.y * b.x);
double L = Math.sqrt((x * x) + (y * y) + (z * z));
bdccGeo r = new bdccGeo(0,0);
r.x = x / L;
r.y = y / L;
r.z = z / L;
return r;
}
// Returns the two antipodal points of intersection of two great
// circles defined by the arcs geo1 to geo2 and
// geo3 to geo4. Returns a point as a Geo, use .antipode to get the other point
public bdccGeo function_bdccGeoGetIntersection(bdccGeo geo1,bdccGeo geo2, bdccGeo geo3,bdccGeo geo4)
{
bdccGeo geoCross1 = geo1.function_crossNormalize(geo2);
bdccGeo geoCross2 = geo3.function_crossNormalize(geo4);
return geoCross1.function_crossNormalize(geoCross2);
}
public double function_distance(bdccGeo v2)
{
return Math.atan2(v2.function_crossLength(this), v2.function_dot(this));
}
//More Maths
public double function_crossLength(bdccGeo b)
{
double x = (this.y * b.z) - (this.z * b.y);
double y = (this.z * b.x) - (this.x * b.z);
double z = (this.x * b.y) - (this.y * b.x);
return Math.sqrt((x * x) + (y * y) + (z * z));
}
//Maths
public double function_dot(bdccGeo b)
{
return ((this.x * b.x) + (this.y * b.y) + (this.z * b.z));
}
//from Radians to Meters
public double function_bdccGeoRadiansToMeters(double rad)
{
return rad * 6378137.0; // WGS84 Equatorial Radius in Meters
}
// point on opposite side of the world to this point
public bdccGeo function_antipode()
{
return this.function_scale(-1.0);
}
//More Maths
public bdccGeo function_scale(double s)
{
bdccGeo r = new bdccGeo(0,0);
r.x = this.x * s;
r.y = this.y * s;
r.z = this.z * s;
return r;
}
// Convert from geographic to geocentric latitude (radians).
public double function_bdccGeoGeocentricLatitude(double geographicLatitude)
{
double flattening = 1.0 / 298.257223563;//WGS84
double f = (1.0 - flattening) * (1.0 - flattening);
return Math.atan((Math.tan(geographicLatitude) * f));
}
}
}

Compass: Rotate image towards a particular location

I have a requirement to rotate image arrow to particular location. Here is my current code:
public void onSensorChanged(SensorEvent event) {
float azimuth = event.values[0];
Location currentLoc = getHelper().getCurrentLocation();
if(currentLoc == null || cardinal == null || cardinal.getLocation() == null) {
return;
}
azimuth = (float) Math.toDegrees(azimuth);
GeomagneticField geoField = new GeomagneticField(
(float) currentLoc.getLatitude(),
(float) currentLoc.getLongitude(),
(float) currentLoc.getAltitude(),
System.currentTimeMillis());
azimuth += geoField.getDeclination(); // converts magnetic north into true north
float bearing = currentLoc.bearingTo(cardinal.getLocation()); // (it's already in degrees)
float direction = azimuth - bearing;
direction = -direction;
if(compass != null) {
compass.setDirection(direction);
}
}
And here is my custom imageview:
public void onDraw(Canvas canvas) { //
int height = this.getHeight(); //
int width = this.getWidth();
canvas.rotate(direction, width / 2, height / 2); //
super.onDraw(canvas); //
}
And here is the arrow image that I am using:
This code does not work at all. Can anybody points me where I am doing wrong?
I found the solution myself. I am posting the answer for other guys who are looking for same solution: Here is the updated code:
float azimuth = event.values[0];
Location currentLoc = getHelper().getCurrentLocation();
if(currentLoc == null || cardinal == null || cardinal.getLocation() == null) {
return;
}
try {
GeomagneticField geoField = new GeomagneticField(
Double.valueOf(currentLoc.getLatitude()).floatValue(),
Double.valueOf(currentLoc.getLongitude()).floatValue(),
Double.valueOf(currentLoc.getAltitude()).floatValue(),
System.currentTimeMillis() );
float myBearing = currentLoc.bearingTo(cardinal.getLocation());
azimuth += geoField.getDeclination();
azimuth = (myBearing - azimuth) * -1;
azimuth = normalizeDegree(azimuth);
// compass.updateValues(-azimuth);
} catch (Exception e) {
e.printStackTrace();
}
if(compass != null) {
compass.setDirection(-azimuth);
}

Android Geofencing (Polygon)

How to create Polygon Geofence from multiple geo locations(long,lat values) . Also how to track user is entering into this geofence region or exiting from this region on android.
A geofence is simply an array of lat/long points that form a polygon. Once you have a list of lat/long points, you can use a point-inside-polygon check to see if a location is within the polygon.
This is code I have used in my own projects to perform point-in-polygon checks for very large concave polygons (20K+ vertices):
public class PolygonTest
{
class LatLng
{
double Latitude;
double Longitude;
LatLng(double lat, double lon)
{
Latitude = lat;
Longitude = lon;
}
}
bool PointIsInRegion(double x, double y, LatLng[] thePath)
{
int crossings = 0;
LatLng point = new LatLng (x, y);
int count = thePath.length;
// for each edge
for (var i=0; i < count; i++)
{
var a = thePath [i];
var j = i + 1;
if (j >= count)
{
j = 0;
}
var b = thePath [j];
if (RayCrossesSegment(point, a, b))
{
crossings++;
}
}
// odd number of crossings?
return (crossings % 2 == 1);
}
bool RayCrossesSegment(LatLng point, LatLng a, LatLng b)
{
var px = point.Longitude;
var py = point.Latitude;
var ax = a.Longitude;
var ay = a.Latitude;
var bx = b.Longitude;
var by = b.Latitude;
if (ay > by)
{
ax = b.Longitude;
ay = b.Latitude;
bx = a.Longitude;
by = a.Latitude;
}
// alter longitude to cater for 180 degree crossings
if (px < 0) { px += 360; };
if (ax < 0) { ax += 360; };
if (bx < 0) { bx += 360; };
if (py == ay || py == by) py += 0.00000001;
if ((py > by || py < ay) || (px > Math.max(ax, bx))) return false;
if (px < Math.min(ax, bx)) return true;
var red = (ax != bx) ? ((by - ay) / (bx - ax)) : float.MAX_VALUE;
var blue = (ax != px) ? ((py - ay) / (px - ax)) : float.MAX_VALUE;
return (blue >= red);
}
}
In terms of program flow, you will want a background service to do location updates and then perform this check against your lat/long polygon data to see if the location is inside.
In case people are still looking for a Polygon Geofencing check, you can complete this with the GoogleMaps.Util.PolyUtil containsLocation method.

Categories

Resources