I want to detect whether device is moving or device is stationary. I have a situation where i want to execute task when device is not moving.
Right now i have implemented something like this using sensor but i'm not getting device is moving when i try to move device on plain surface like table.
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
double alpha = 0.6;
double[] gravity = new double[3];
gravity[0] = 0;
gravity[1] = 0;
gravity[2] = 0;
gravity[0] = alpha * gravity[0] + (1 - alpha) * sensorEvent.values[0];
gravity[1] = alpha * gravity[1] + (1 - alpha) * sensorEvent.values[1];
gravity[2] = alpha * gravity[2] + (1 - alpha) * sensorEvent.values[2];
double[] linear_acceleration = new double[3];
linear_acceleration[0] = sensorEvent.values[0] - gravity[0];
linear_acceleration[1] = sensorEvent.values[1] - gravity[1];
linear_acceleration[2] = sensorEvent.values[2] - gravity[2];
float magnitude = 0.0f;
magnitude = (float)Math.sqrt(linear_acceleration[0]*linear_acceleration[0]+linear_acceleration[1]*linear_acceleration[1]+linear_acceleration[2]*linear_acceleration[2]);
magnitude = Math.abs(magnitude);
Log.e("Magnitude",magnitude+"");
if(magnitude>0.2){
Log.e("walking","Yes");
}else{
Log.e("walking","No");
}
}
If you are looking for an example there is one below
This code is for walking detection
to get smoother value.
// initialize
private SensorManager sensorMan;
private Sensor accelerometer;
private float[] mGravity;
private double mAccel;
private double mAccelCurrent;
private double mAccelLast;
private boolean sensorRegistered = false;
// onCreate
sensorMan = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
accelerometer = sensorMan.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mAccel = 0.00f;
mAccelCurrent = SensorManager.GRAVITY_EARTH;
mAccelLast = SensorManager.GRAVITY_EARTH;
sensorMan.registerListener(this, accelerometer,
SensorManager.SENSOR_DELAY_NORMAL);
sensorRegistered = true;
// onSensorChanged
private int hitCount = 0;
private double hitSum = 0;
private double hitResult = 0;
private final int SAMPLE_SIZE = 50; // change this sample size as you want, higher is more precise but slow measure.
private final double THRESHOLD = 0.2; // change this threshold as you want, higher is more spike movement
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
mGravity = event.values.clone();
// Shake detection
double x = mGravity[0];
double y = mGravity[1];
double z = mGravity[2];
mAccelLast = mAccelCurrent;
mAccelCurrent = Math.sqrt(x * x + y * y + z * z);
double delta = mAccelCurrent - mAccelLast;
mAccel = mAccel * 0.9f + delta;
if (hitCount <= SAMPLE_SIZE) {
hitCount++;
hitSum += Math.abs(mAccel);
} else {
hitResult = hitSum / SAMPLE_SIZE;
Log.d(TAG, String.valueOf(hitResult));
if (hitResult > THRESHOLD) {
Log.d(TAG, "Walking");
} else {
Log.d(TAG, "Stop Walking");
}
hitCount = 0;
hitSum = 0;
hitResult = 0;
}
}
}
I'm attempting to add in the moving of objects around with single finger scrolling. (See Google AR Stickers for example) I'm using native ARCore/OpenGL based originally off the ARCore examples. When you slide 1 finger, I want to move the object in 3D space along the X and Z axes.
I can easily create the movement using Translation, however, it performs the movement based on the original camera orientation. If you physically move the phone/camera a few lateral steps, the finger movements no longer match what a user would expect.
So I changed up my code and mapped the finger distanceX and distanceY that is swiped to affect X and Z coordinates depending on the angle of change from the original camera starting point to the current camera point.
The issue that I'm running into is determining the angle at which the camera has been moved. I've been looking at the value from the Camera View matrix:
camera.getViewMatrix(viewmtx, 0);
But the X, Y, and Z coordinates always say 0. I'm assuming this is because it's always making the camera the origin? Does anyone know of a way to calculate the angle of rotation of a camera from a 3D object using the ARCore/OpenGL ES libraries? The red angle in the illustration below (from top-down perspective) is what I'm trying to get. Sorry for the crude drawing:
Here's my code for your reference:
// Handle Gestures - Single touch for Translation and Placement
mGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener()
{
#Override
public boolean onSingleTapUp(MotionEvent e)
{
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY)
{
if (e2.getPointerCount() == 1 && ...)
{
double angle = findCameraAngleFromOrigin();
double speed = 0.005d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
}
EDIT: Update to code
I wasn't able to figure out how to find the distance from the camera points, but I was able to at least find the difference in the phone's rotation, which is closer. I accomplished it by the code below. I'm still not happy with the results, so I'll post updates later on when I find a more effective solution.
private double getDegree(double value1, double value2)
{
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0)
{
Log.d(TAG, "FIRST QUADRANT");
return firstAngle; // first quadrant
}
else if (secondAngle < 0 && firstAngle >= 0)
{
Log.d(TAG, "SECOND QUADRANT");
return 90 + (90 - firstAngle); //second quadrant
}
else if (secondAngle < 0 && firstAngle < 0)
{
Log.d(TAG, "THIRD QUADRANT");
return 180 - firstAngle; //third quadrant
}
else
{
Log.d(TAG, "FOURTH QUADRANT");
return 270 + (90 + firstAngle); //fourth quadrant
}
}
private double findCameraAngleFromOrigin()
{
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) - getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
#Override
public void onDrawFrame(GL10 gl)
{
...
//When creating a new object
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
camera.getDisplayOrientedPose().toMatrix( mOriginCameraMatrix, 0);
//During each draw frame
camera.getDisplayOrientedPose().toMatrix( mCurrentCameraMatrix, 0);
int ac = 0;
for (Anchor anchor : mAnchors)
{
if (anchor.getTrackingState() != TrackingState.TRACKING)
{
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(mAnchorMatrix, 0);
// Update and draw the model
if (mModelSet)
{
if (mScaleFactors.size() <= ac)
{
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac)
{
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac)
{
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac)
{
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
ObjectRenderer virtualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
virtualObject.updateModelMatrix(mAnchorMatrix, mScaleFactors.get(ac));
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
ac++;
}
}
This is kind of hard to track, so I'll probably just post the entire class once I feel more comfortable with the implementation and code cleanup.
public class HelloArActivity extends AppCompatActivity implements GLSurfaceView.Renderer {
private static final String TAG = HelloArActivity.class.getSimpleName();
// Rendering. The Renderers are created here, and initialized when the GL surface is created.
private GLSurfaceView surfaceView;
private boolean installRequested;
private Session session;
private GestureDetector gestureDetector;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private final ObjectRenderer virtualObject = new ObjectRenderer();
private final ObjectRenderer virtualObjectShadow = new ObjectRenderer();
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private final PointCloudRenderer pointCloud = new PointCloudRenderer();
private int mCurrent = -1;
private final List<ObjectRenderer> mVirtualObjects = new ArrayList<ObjectRenderer>();
// Temporary matrix allocated here to reduce number of allocations for each frame.
private final float[] anchorMatrix = new float[16];
//Rotation, Moving, & Scaling
private final List<Float> mRotationThetas = new ArrayList<Float>();
private GestureDetector mGestureDetector;
private ScaleGestureDetector mScaleDetector;
private RotationGestureDetector mRotationDetector;
private final List<Float> mScaleFactors = new ArrayList<Float>();
private final List<Float> mTranslationX = new ArrayList<Float>();
private final List<Float> mTranslationZ = new ArrayList<Float>();
private final float[] mOriginCameraMatrix = new float[16];
private final float[] mCurrentCameraMatrix = new float[16];
private boolean mModelSet = false;
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> mQueuedSingleTaps = new ArrayBlockingQueue<>(16);
private final ArrayList<Anchor> mAnchors = new ArrayList<>();
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private final List<Integer> mAnchorReferences = new ArrayList<Integer>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.surfaceview);
displayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
//Handle Gestures - Multitouch for Scaling and Rotation
mScaleDetector = new ScaleGestureDetector(this, new ScaleGestureDetector.SimpleOnScaleGestureListener() {
#Override
public boolean onScale(ScaleGestureDetector detector) {
if (mScaleFactors.size() > 0) {
mScaleFactors.set(mScaleFactors.size() - 1, Math.max(0.1f, Math.min(detector.getScaleFactor() * mScaleFactors.get(mScaleFactors.size() - 1), 5.0f)));
return true;
}
return false;
}
});
mRotationDetector = new RotationGestureDetector(this, new RotationGestureDetector.OnRotationGestureListener() {
#Override
public void OnRotation(RotationGestureDetector rotationDetector) {
if (mRotationThetas.size() > 0) {
mRotationThetas.set(mRotationThetas.size() - 1, (mRotationThetas.get(mRotationThetas.size() - 1) + (rotationDetector.getAngle() * -0.001f)));
}
}
});
// Set up tap listener.
gestureDetector =
new GestureDetector(
this,
new GestureDetector.SimpleOnGestureListener() {
#Override
public boolean onSingleTapUp(MotionEvent e) {
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (e2.getPointerCount() == 1 && mTranslationX.size() > 0 && mTranslationZ.size() > 0) {
double angle = findCameraAngleFromOrigin();
double speed = 0.001d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
} else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
#Override
public boolean onDown(MotionEvent e) {
return true;
}
});
surfaceView.setOnTouchListener(
new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
boolean retVal = mScaleDetector.onTouchEvent(event);
if (retVal)
mRotationDetector.onTouchEvent(event);
retVal = gestureDetector.onTouchEvent(event) || retVal;
return retVal || gestureDetector.onTouchEvent(event);
//return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
surfaceView.setPreserveEGLContextOnPause(true);
surfaceView.setEGLContextClientVersion(2);
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private double findCameraAngleFromOrigin() {
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) -
getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
private double getDegree(double value1, double value2) {
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0) {
return firstAngle; // first quadrant
} else if (secondAngle < 0 && firstAngle >= 0) {
return 90 + (90 - firstAngle); //second quadrant
} else if (secondAngle < 0 && firstAngle < 0) {
return 180 - firstAngle; //third quadrant
} else {
return 270 + (90 + firstAngle); //fourth quadrant
}
}
#Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
showSnackbarMessage(message, true);
Log.e(TAG, "Exception creating session", exception);
return;
}
// Create default config and check if supported.
Config config = new Config(session);
if (!session.isSupported(config)) {
showSnackbarMessage("This device does not support AR", true);
}
session.configure(config);
}
showLoadingMessage();
// Note that order matters - see the note in onPause(), the reverse applies here.
session.resume();
surfaceView.onResume();
displayRotationHelper.onResume();
}
#Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
surfaceView.onPause();
session.pause();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
if (!CameraPermissionHelper.hasCameraPermission(this)) {
Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
.show();
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
// Standard Android full-screen functionality.
getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
queuedSingleTaps.offer(e);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
// Prepare the other rendering objects.
try {
virtualObject.createOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
virtualObjectShadow.createOnGlThread(/*context=*/ this, "andy_shadow.obj", "andy_shadow.png");
virtualObjectShadow.setBlendMode(BlendMode.Shadow);
virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
} catch (IOException e) {
Log.e(TAG, "Failed to read obj file");
}
try {
planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
} catch (IOException e) {
Log.e(TAG, "Failed to read plane texture");
}
pointCloud.createOnGlThread(/*context=*/ this);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mVirtualObjects.add(virtualObject);
}
#Override
public void onDrawFrame(GL10 gl) {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (mAnchors.size() >= 20)
{
//Alert the user that the maximum has been reached, must call from
//Handler as this is a UI action being done on a worker thread.
new Handler(Looper.getMainLooper())
{
#Override
public void handleMessage(Message message)
{
Toast.makeText(HelloArActivity.this,
"You've reached the maximum!", Toast.LENGTH_LONG).show();
}
};
// Alternatively, you can start detaching, however, a revision to the
// mAnchorReferences and mScalingFactors should be made!
// mAnchors.get(0).detach();
// mAnchors.remove(0);
}
else
{
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3d model
// in the correct position relative both to the world and to the plane.
mScaleFactors.add(1.0f);
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
mAnchorReferences.add(mCurrent);
camera.getDisplayOrientedPose().toMatrix(mOriginCameraMatrix, 0);
}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
}
}
camera.getDisplayOrientedPose().toMatrix(mCurrentCameraMatrix, 0);
// Visualize planes.
planeRenderer.drawPlanes(
session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
int ac = 0;
for (Anchor anchor : mAnchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
if (mScaleFactors.size() <= ac) {
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac) {
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac) {
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac) {
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
// Update and draw the model and its shadow.
ObjectRenderer vitualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
vitualObject.updateModelMatrix(anchorMatrix, mScaleFactors.get(ac));
vitualObject.draw(viewmtx, projmtx, lightIntensity);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
private void rotateYAxisMatrix(float rotationTheta) {
if (rotationTheta != 0.0f) {
anchorMatrix[0] = (float) Math.cos(rotationTheta);
anchorMatrix[2] = (float) Math.sin(rotationTheta);
anchorMatrix[5] = 1;
anchorMatrix[8] = -(float) Math.sin(rotationTheta);
anchorMatrix[10] = (float) Math.cos(rotationTheta);
anchorMatrix[15] = 1;
}
}
private void translateMatrix(float xDistance, float yDistance, float zDistance) {
Matrix.translateM(anchorMatrix, 0, xDistance, yDistance, zDistance);
}
I am developing an Augmented reality application with Rajawali lib. My problem is as below.
I want to draw a surface onto the camera view, when i tried the latest ver of rajawali, it didn't work. I spent many days and find out that the latest did not support draw over camera any more. And the rajawali v0.9 work fine. So the following question is apply for v0.9.
When i tried to register SensorEventListener, and in onSensorChanged() I got 3 values which represent 3 dimension of android device, but it was very noise and unstable. I have tried implement low-pass filter but it still noise.
Finally i found this question, but on v0.9, the
getCamera().setOrientation(quaternion)
did not work. I dont know why.
Now i don't know what to do next :(
Here is my codes, it works very well in my project, and I hope it to help you.
// The code snippet of my renderer class
#Override
public void onRender(final long elapsedTime, final double deltaTime) {
mHeadTracker.getLastHeadView(mHeadTransform.getHeadView(), 0);
android.opengl.Matrix.invertM(mHeadTransform.getHeadView(), 0, mHeadTransform.getHeadView(), 0);
Quaternion q = mHeadTransform.getQuaternion(mHeadTransform.getHeadView(), 0);
getCurrentCamera().setOrientation(q);
super.onRender(elapsedTime, deltaTime);
}
// The code snippet of HeadTransform class
private static Quaternion sQuaternion = new Quaternion();
public Quaternion getQuaternion(float[] quaternion, int offset) {
if (offset + 4 > quaternion.length) {
throw new IllegalArgumentException(
"Not enough space to write the result");
}
float[] m = this.mHeadView;
float t = m[0] + m[5] + m[10];
float x;
float y;
float z;
float w;
float s;
if (t >= 0.0F) {
s = (float) Math.sqrt(t + 1.0F);
w = 0.5F * s;
s = 0.5F / s;
x = (m[9] - m[6]) * s;
y = (m[2] - m[8]) * s;
z = (m[4] - m[1]) * s;
} else {
if ((m[0] > m[5]) && (m[0] > m[10])) {
s = (float) Math.sqrt(1.0F + m[0] - m[5] - m[10]);
x = s * 0.5F;
s = 0.5F / s;
y = (m[4] + m[1]) * s;
z = (m[2] + m[8]) * s;
w = (m[9] - m[6]) * s;
} else {
if (m[5] > m[10]) {
s = (float) Math.sqrt(1.0F + m[5] - m[0] - m[10]);
y = s * 0.5F;
s = 0.5F / s;
x = (m[4] + m[1]) * s;
z = (m[9] + m[6]) * s;
w = (m[2] - m[8]) * s;
} else {
s = (float) Math.sqrt(1.0F + m[10] - m[0] - m[5]);
z = s * 0.5F;
s = 0.5F / s;
x = (m[2] + m[8]) * s;
y = (m[9] + m[6]) * s;
w = (m[4] - m[1]) * s;
}
}
}
quaternion[(offset + 0)] = x;
quaternion[(offset + 1)] = y;
quaternion[(offset + 2)] = z;
quaternion[(offset + 3)] = w;
Log.d("facevr", x + "," + y + "," + z + "," + w);
return sQuaternion.setAll(w, x, y, z);
}
I have drawn a Cubic Curve on canvas using
myPath.cubicTo(10, 10, w, h/2, 10, h-10);
I have four ImageView on that screen and I want to move that ImageViews on the drawn curve when I drag that image with touch.
I have referred the links :
Move Image on Curve Path
Move object on Curve
Move imageview on curve
What I get is, Animation to move the Image on Curve with the duration defined by t.
But I want to move that ImageView on touch in direction of that curve area only.
Following is my Screen :
So, I want all the (x,y) co-ordinates of the curve to move ImageView on that curve only.
Else I want an equation to draw a curve so that I can interpolate x value for the touched y value.
I have goggled a lot but didn't succeed.
Any advice or guidance will help me a lot.
Approach
I would suggest a different approach than using bezier as you would need to reproduce the math for it in order to get the positions.
By using simple trigonometry you can achieve the same visual result but in addition have full control of the positions.
Trigonometry
For example:
THIS ONLINE DEMO produces this result (simplified version for sake of demo):
Define an array with the circles and angle positions instead of y and x positions. You can filter angles later if they (e.g. only show angles between -90 and 90 degrees).
Using angles will make sure they stay ordered when moved.
var balls = [-90, -45, 0, 45]; // example "positions"
To replace the Bezier curve you can do this instead:
/// some setup variables
var xCenter = -80, /// X center of circle
yCenter = canvas.height * 0.5, /// Y center of circle
radius = 220, /// radius of circle
x, y; /// to calculate line position
/// draw half circle
ctx.arc(xCenter, yCenter, radius, 0, 2 * Math.PI);
ctx.stroke();
Now we can use an Y value from mouse move/touch etc. to move around the circles:
/// for demo, mousemove - adopt as needed for touch
canvas.onmousemove = function(e) {
/// get Y position which is used as delta to angle
var rect = demo.getBoundingClientRect();
dlt = e.clientY - rect.top;
/// render the circles in new positions
render();
}
The rendering iterates through the balls array and render them in their angle + delta:
for(var i = 0, angle; i < balls.length; i++) {
angle = balls[i];
pos = getPosfromAngle(angle);
/// draw circles etc. here
}
The magic function is this:
function getPosfromAngle(a) {
/// get angle from circle and add delta
var angle = Math.atan2(delta - yCenter, radius) + a * Math.PI / 180;
return [xCenter + radius * Math.cos(angle),
yCenter + radius * Math.sin(angle)];
}
radius is used as a pseudo position. You can replace this with an actual X position but is frankly not needed.
In this demo, to keep it simple, I have only attached mouse move. Move the mouse over the canvas to see the effect.
As this is demo code it's not structured optimal (separate render of background and the circles etc.).
Feel free to adopt and modify to suit your needs.
This code I have used to achieve this functionality and it works perfect as per your requirement...
public class YourActivity extends Activity {
private class ButtonInfo {
public Button btnObj;
public PointF OrigPos;
public double origAngle;
public double currentAngle;
public double minAngle;
public double maxAngle;
boolean isOnClick = false;
}
private int height;
private double radius;
private PointF centerPoint;
private final int NUM_BUTTONS = 4;
private final int FIRST_INDEX = 0;
private final int SECOND_INDEX = 1;
private final int THIRD_INDEX = 2;
private final int FORTH_INDEX = 3;
private final String FIRST_TAG = "FiRST_BUTTON";
private final String SECOND_TAG = "SECOND_BUTTON";
private final String THIRD_TAG = "THIRD_BUTTON";
private final String FORTH_TAG = "FORTH_BUTTON";
private boolean animInProgress = false;
private int currentButton = -1;
private ButtonInfo[] buttonInfoArray = new ButtonInfo[NUM_BUTTONS];
private int curveImageResource = -1;
private RelativeLayout parentContainer;
private int slop;
private boolean initFlag = false;
private int touchDownY = -1;
private int touchDownX = -1;
private int animCount;
private Context context;
#SuppressWarnings("deprecation")
#SuppressLint("NewApi")
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(R.anim.fadeinleft, R.anim.fadeoutleft);
// hide action bar in view
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
Thread.setDefaultUncaughtExceptionHandler(
new MyDefaultExceptionHandler(this, getLocalClassName()));
setContentView(R.layout.your_layout);
context = this;
final ImageView curve_image = (ImageView) findViewById(R.id.imageView1);
parentContainer = (RelativeLayout) findViewById(R.id.llView);
// Set buttons on their location
for (int i = 0; i < NUM_BUTTONS; i++) {
buttonInfoArray[i] = new ButtonInfo();
}
Button img1 = (Button) findViewById(R.id.button_option1);
Button img2 = (Button) findViewById(R.id.button_option2);
Button img3 = (Button) findViewById(R.id.button_option3);
Button img4 = (Button) findViewById(R.id.button_option4);
//1st button
buttonInfoArray[FIRST_INDEX].btnObj = (Button) this
.findViewById(R.id.setting_button_option);
buttonInfoArray[FIRST_INDEX].btnObj.setTag(FIRST_TAG);
// 2nd button
buttonInfoArray[SECOND_INDEX].btnObj = (Button) this
.findViewById(R.id.scanning_button_option);
buttonInfoArray[SECOND_INDEX].btnObj.setTag(SECOND_TAG);
// 3rd button
buttonInfoArray[THIRD_INDEX].btnObj = (Button) this
.findViewById(R.id.manual_button_option);
buttonInfoArray[THIRD_INDEX].btnObj.setTag(THIRD_TAG);
// 4th button
buttonInfoArray[FORTH_INDEX].btnObj = (Button) this
.findViewById(R.id.logout_button_option);
buttonInfoArray[FORTH_INDEX].btnObj.setTag(FORTH_TAG);
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
currentButtonInfo.btnObj.setClickable(false);
}
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
currentButtonInfo.btnObj.bringToFront();
}
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
ViewTreeObserver vtoLayout = parentContainer.getViewTreeObserver();
vtoLayout.addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
if (initFlag == true)
return;
centerPoint = new PointF(0, (parentContainer.getHeight()) / 2);
curve_image.setImageResource(curveImageResource);
ViewTreeObserver vtoCurveImage = curve_image
.getViewTreeObserver();
vtoCurveImage
.addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
if (initFlag == true)
return;
ViewConfiguration vc = ViewConfiguration.get(parentContainer
.getContext());
slop = vc.getScaledTouchSlop();
parentContainer.setOnTouchListener(tlobj);
height = curve_image.getMeasuredHeight();
curve_image.getMeasuredWidth();
radius = (height / 2);
double angleDiff = Math.PI / (NUM_BUTTONS + 1);
double initialAngle = (Math.PI / 2 - angleDiff);
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
currentButtonInfo.origAngle = initialAngle;
initialAngle -= angleDiff;
}
double tempCurrentAngle;
double maxAngle = (-1 * Math.PI / 2);
tempCurrentAngle = maxAngle;
for (int i = NUM_BUTTONS - 1; i >= 0; i--) {
buttonInfoArray[i].maxAngle = tempCurrentAngle;
int buttonHeight = buttonInfoArray[i].btnObj
.getHeight();
if (buttonHeight < 30) {
buttonHeight = 80;
}
tempCurrentAngle = findNextMaxAngle(
tempCurrentAngle,
(buttonHeight + 5));
}
double minAngle = (Math.PI / 2);
tempCurrentAngle = minAngle;
for (int i = 0; i < NUM_BUTTONS; i++) {
buttonInfoArray[i].minAngle = tempCurrentAngle;
int buttonHeight = buttonInfoArray[i].btnObj
.getHeight();
if (buttonHeight < 30) {
buttonHeight = 80;
}
tempCurrentAngle = findNextMinAngle(
tempCurrentAngle, (buttonHeight + 5));
}
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
PointF newPos = getPointByAngle(currentButtonInfo.origAngle);
currentButtonInfo.OrigPos = newPos;
currentButtonInfo.currentAngle = currentButtonInfo.origAngle;
setTranslationX(
currentButtonInfo.btnObj,
(int) currentButtonInfo.OrigPos.x - 50);
setTranslationY(
currentButtonInfo.btnObj,
(int) currentButtonInfo.OrigPos.y - 50);
currentButtonInfo.btnObj.requestLayout();
}
initFlag = true;
}
});
}
});
}
/**
* Find next max angle
* #param inputAngle
* #param yDist
* #return
*/
private double findNextMaxAngle(double inputAngle, int yDist) {
float initYPos = (float) (centerPoint.y - (Math.sin(inputAngle) * radius));
float finalYPos = initYPos - yDist;
float finalXPos = getXPos(finalYPos);
double newAngle = getNewAngle(new PointF(finalXPos, finalYPos));
return newAngle;
}
/**
* Find next min angle
* #param inputAngle
* #param yDist
* #return
*/
private double findNextMinAngle(double inputAngle, int yDist) {
float initYPos = (int) (centerPoint.y - (Math.sin(inputAngle) * radius));
float finalYPos = initYPos + yDist;
float finalXPos = getXPos(finalYPos);
double newAngle = getNewAngle(new PointF(finalXPos, finalYPos));
return newAngle;
}
/**
* Apply reset transformation when user release touch
* #param buttonInfoObj
*/
public void applyResetAnimation(final ButtonInfo buttonInfoObj) {
ValueAnimator animator = ValueAnimator.ofFloat(0, 1); // values from 0
// to 1
animator.setDuration(1000); // 5 seconds duration from 0 to 1
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
#Override
public void onAnimationUpdate(ValueAnimator animation) {
float value = ((Float) (animation.getAnimatedValue()))
.floatValue();
// Set translation of your view here. Position can be calculated
// out of value. This code should move the view in a half
// circle.
double effectiveAngle = buttonInfoObj.origAngle
+ ((buttonInfoObj.currentAngle - buttonInfoObj.origAngle) * (1.0 - value));
PointF newPos = getPointByAngle(effectiveAngle);
setTranslationX(buttonInfoObj.btnObj, newPos.x - 50);
setTranslationY(buttonInfoObj.btnObj, newPos.y - 50);
}
});
animator.addListener(new AnimatorListenerAdapter() {
#Override
public void onAnimationEnd(Animator animation) {
animCount++;
if (animCount == NUM_BUTTONS) {
animCount = 0;
currentButton = -1;
animInProgress = false;
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
setTranslationX(currentButtonInfo.btnObj,
currentButtonInfo.OrigPos.x - 50);
setTranslationY(currentButtonInfo.btnObj,
currentButtonInfo.OrigPos.y - 50);
currentButtonInfo.isOnClick = false;
currentButtonInfo.currentAngle = currentButtonInfo.origAngle;
currentButtonInfo.btnObj.setPressed(false);
currentButtonInfo.btnObj.requestLayout();
}
}
}
});
animator.start();
}
/**
* On Touch start animation
*/
private OnTouchListener tlobj = new OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent motionEvent) {
switch (MotionEventCompat.getActionMasked(motionEvent)) {
case MotionEvent.ACTION_MOVE:
if (currentButton < 0) {
return false;
}
if (animInProgress == true) {
return true;
}
float delta_y = motionEvent.getRawY() - touchDownY;
float delta_x = motionEvent.getRawX() - touchDownX;
updateButtonPos(new PointF((int) delta_x, (int) delta_y));
if (Math.abs(delta_x) > slop || Math.abs(delta_y) > slop) {
buttonInfoArray[currentButton].isOnClick = false;
parentContainer.requestDisallowInterceptTouchEvent(true);
}
return true;
case MotionEvent.ACTION_UP:
animCount = 0;
if (currentButton < 0) {
return false;
}
if(animInProgress == true) {
return true;
}
animInProgress = true;
for (ButtonInfo currentButtonInfo : buttonInfoArray) {
applyResetAnimation(currentButtonInfo);
if (currentButtonInfo.isOnClick) {
// TODO onClick code
String currentTag = (String) currentButtonInfo.btnObj.getTag();
if(currentTag.equalsIgnoreCase(FIRST_TAG)) {
//handle first button click
} else if(currentTag.equalsIgnoreCase(SECOND_TAG)) {
//handle second button click
} else if(currentTag.equalsIgnoreCase(THIRD_TAG)) {
//handle third button click
} else if(currentTag.equalsIgnoreCase(FORTH_TAG)) {
//handle forth button click
}
}
}
return true;
case MotionEvent.ACTION_DOWN:
if (currentButton >= 0) {
return false;
}
if (animInProgress == true) {
return true;
}
animCount = 0;
int buttonIndex = 0;
for (buttonIndex = 0; buttonIndex < NUM_BUTTONS; buttonIndex++) {
final ButtonInfo currentButtonInfo = buttonInfoArray[buttonIndex];
if (isRectHit(currentButtonInfo.btnObj, motionEvent,
currentButtonInfo.OrigPos)) {
currentButton = buttonIndex;
touchDownX = (int) motionEvent.getRawX();
touchDownY = (int) motionEvent.getRawY();
currentButtonInfo.isOnClick = true;
currentButtonInfo.btnObj.setPressed(true);
break;
}
}
if (buttonIndex == NUM_BUTTONS) {
currentButton = -1;
}
break;
default:
break;
}
return false;
}
};
/**
* Get X POS
* #param yPos
* #return
*/
public float getXPos(float yPos) {
float xPos = (float) (centerPoint.x
+ Math.sqrt((radius * radius)
- ((yPos - centerPoint.y) * (yPos - centerPoint.y))));
return xPos;
}
/**
* Get YPos based on X
* #param xPos
* #param isPositive
* #return
*/
public float getYPos(float xPos, boolean isPositive) {
if (isPositive)
return (float) (centerPoint.y - Math.sqrt((radius * radius)
- ((xPos - centerPoint.x) * (xPos - centerPoint.x))));
else
return (float) (centerPoint.y + Math.sqrt((radius * radius)
- ((xPos - centerPoint.x) * (xPos - centerPoint.x))));
}
/**
* Get New angle from define point
* #param newPoint
* #return
*/
private double getNewAngle(PointF newPoint) {
double deltaY = newPoint.y - centerPoint.y;
double deltaX = newPoint.x - centerPoint.x;
double newPointAngle = Math.atan(-1.0 * deltaY / deltaX);
return newPointAngle;
}
/**
* get Point By Angle
* #param angle
* #return
*/
private PointF getPointByAngle(double angle) {
PointF newPos;
double newX = centerPoint.x + Math.cos(angle) * radius;
double newY = (centerPoint.y) - (Math.sin(angle) * radius);
newPos = new PointF((int) newX, (int) newY);
return newPos;
}
/**
* Set new location for passed button
* #param currentButtonIndex
* #param effectiveDelta
* #param percentageCompleted
* #return
*/
private double updateControl(int currentButtonIndex, PointF effectiveDelta,
double percentageCompleted) {
PointF newPos = new PointF();
StringBuilder s1 = new StringBuilder();
double maxAngleForCurrentButton = buttonInfoArray[currentButtonIndex].maxAngle;
double minAngleForCurrentButton = buttonInfoArray[currentButtonIndex].minAngle;
double targetAngleForCurrentButton;
if (effectiveDelta.y > 0) {
targetAngleForCurrentButton = maxAngleForCurrentButton;
} else {
targetAngleForCurrentButton = minAngleForCurrentButton;
}
if (percentageCompleted == -1) {
boolean isYDisplacement = effectiveDelta.y > effectiveDelta.x ? true
: false;
isYDisplacement = true;
if (isYDisplacement) {
float newY = buttonInfoArray[currentButtonIndex].OrigPos.y
+ effectiveDelta.y;
if (newY > (centerPoint.y) + (int) radius) {
newY = (centerPoint.y) + (int) radius;
} else if (newY < (centerPoint.y) - (int) radius) {
newY = (centerPoint.y) - (int) radius;
}
float newX = getXPos(newY);
newPos = new PointF(newX, newY);
s1.append("isYDisplacement true : ");
}
} else {
double effectiveAngle = buttonInfoArray[currentButtonIndex].origAngle
+ ((targetAngleForCurrentButton - buttonInfoArray[currentButtonIndex].origAngle) * percentageCompleted);
newPos = getPointByAngle(effectiveAngle);
s1.append("percentage completed : " + percentageCompleted + " : "
+ effectiveAngle);
}
double newAngle = getNewAngle(newPos);
// For angle, reverse condition, because in 1st quarter, it is +ve, in
// 4th quarter, it is -ve.
if (newAngle < maxAngleForCurrentButton) {
newAngle = maxAngleForCurrentButton;
newPos = getPointByAngle(newAngle);
s1.append("max angle : " + newAngle);
}
if (newAngle > minAngleForCurrentButton) {
newAngle = minAngleForCurrentButton;
newPos = getPointByAngle(newAngle);
s1.append("min angle : " + newAngle);
}
setTranslationX(buttonInfoArray[currentButtonIndex].btnObj,
newPos.x - 50);
setTranslationY(buttonInfoArray[currentButtonIndex].btnObj,
newPos.y - 50);
return newAngle;
}
/**
* Set button Position
* #param deltaPoint
*/
public void updateButtonPos(PointF deltaPoint) {
for (int buttonIndex = 0; buttonIndex < NUM_BUTTONS; buttonIndex++) {
if (currentButton == buttonIndex) {
buttonInfoArray[buttonIndex].currentAngle = updateControl(
buttonIndex, deltaPoint, -1);
double targetAngleForCurrentButton;
if (deltaPoint.y > 0) {
targetAngleForCurrentButton = buttonInfoArray[buttonIndex].maxAngle;
} else {
targetAngleForCurrentButton = buttonInfoArray[buttonIndex].minAngle;
}
double percentageCompleted = (1.0 * (buttonInfoArray[buttonIndex].currentAngle - buttonInfoArray[buttonIndex].origAngle))
/ (targetAngleForCurrentButton - buttonInfoArray[buttonIndex].origAngle);
for (int innerButtonIndex = 0; innerButtonIndex < NUM_BUTTONS; innerButtonIndex++) {
if (innerButtonIndex == buttonIndex)
continue;
buttonInfoArray[innerButtonIndex].currentAngle = updateControl(
innerButtonIndex, deltaPoint, percentageCompleted);
}
break;
}
}
}
/**
* Find whether touch in button's rectanlge or not
* #param v
* #param rect
*/
private static void getHitRect(View v, Rect rect) {
rect.left = (int) com.nineoldandroids.view.ViewHelper.getX(v);
rect.top = (int) com.nineoldandroids.view.ViewHelper.getY(v);
rect.right = rect.left + v.getWidth();
rect.bottom = rect.top + v.getHeight();
}
private boolean isRectHit(View viewObj, MotionEvent motionEvent,
PointF viewOrigPos) {
Rect outRect = new Rect();
int x = (int) motionEvent.getX();
int y = (int) motionEvent.getY();
getHitRect(viewObj, outRect);
if (outRect.contains(x, y)) {
return true;
} else {
return false;
}
}
/**
* On Finish update transition
*/
#Override
public void finish() {
super.finish();
overridePendingTransition(R.anim.activityfinishin, R.anim.activityfinishout);
}
/**
* On Native Back Pressed
*/
#Override
public void onBackPressed() {
super.onBackPressed();
finish();
}
}
How can we convert an Int value to an angle.
int speed = remoteService.getSpeed();
I am getting the speed value from a remote service and I want to convert it to an angle.
How can I do this? Any Idea?
public void getGenginePos(int state,float force, double AOD){
double AODrad=(AOD*0.017444);
switch(state){
case BOAT_IDLE:
//System.out.println("Before Vx = " + vx + ", vy = " + vy + ", f = " + force + ", AOD = " + AOD);
vx = (float)(force * Math.cos(AODrad));
px = px + (vx * dt);
vy = (float) (force * Math.sin(AODrad));
//System.out.println("After Vx = " + vx + ", vy = " + vy);
py = py - (vy * dt);
break;
case BOAT_ACCEL:
temp = force *dt;
vx = (float) (force * Math.cos(AODrad) + temp);//(force * dt));
vy = (float) (force * Math.sin(AODrad) + temp);//(force * dt));
px = px + (vx * dt);
py = py - (vy * dt);
break;
case BOAT_DECEL:
temp = force *dt;
vx = (float) (force * Math.cos(AODrad) - temp);//(force * dt));
vy = (float) (force * Math.sin(AODrad) - temp);//(force * dt));
px = px + (vx * dt);
py = py - (vy * dt);
break;
default: break;
}
}
public void setMeterPos(int rpx,int rpy,int epx,int epy){
RefX= rpx;
RefY= rpy;
EndX = epx;
EndY = epy;
screenwidth=BoatRider.screenWidth;
screenheight=BoatRider.screenHeight;
}
public void setArrowEndX(int x){
EndX = x;
}
public void setArrowEndY(int y){
EndY = y;
}
public float getArrowEndX(){
return EndX;
}
public float getArrowEndY(){
return EndY;
}
public void getGMeterArrowPos(double AOD,float radius){
double AODrad=(AOD*0.017444);
vx=(float)(radius*Math.cos(AODrad));
vy=(float)(radius*Math.sin(AODrad));
float height=screenheight-RefY;
EndX = vx+RefX;
EndY=screenheight-(vy+height);
}
public float getBorderEndX(){
return EndX;
}
public float getBorderEndY(){
return EndY;
}
public void getGBoatBorderPos(double AOD,float radius,float boderRefX,float boderRefY){
double AODrad=(AOD*0.017444);
vx=(float)(radius*Math.cos(AODrad));
vy=(float)(radius*Math.sin(AODrad));
float height=screenheight-boderRefY;
EndX = vx+boderRefX;
EndY=screenheight-(vy+height);
}
}
it will be very useful
int speed = 90;
double degrees = speed ;
double angle = degrees * 2 * Math.PI / 360.0;
Or you can use
int speed = 30;
double degrees = speed;
double toDegree = Math.toDegrees(radians);