Android device orientation without geomagnetic - android

I need to get device orientation. As I know usually used TYPE_ACCELEROMETER and TYPE_MAGNETIC_FIELD sensors. My problem is that SensorManager.getDefaultSensor returns me null for geomagnetic sensor. It returns null for TYPE_ORIENTATION sensor too.
manager = (SensorManager) getSystemService(SENSOR_SERVICE);
Sensor sensorAcc = manager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), //normal object
sensorMagn = manager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); //null
orientationListener = new OrientationSensorListener();
manager.registerListener(orientationListener, sensorAcc, 10);
manager.registerListener(orientationListener, sensorMagn, 10);
I need another ways to get device orientation.

Orientation can be decomposed in three Euler angle : Pitch, Roll and Azimuth.
With only accelerometer datas, you cannot compute your Azimuth, neither the sign of your pitch.
You can try something as this to know something about your pitch and roll :
private final float[] mMagnet = new float[3]; // magnetic field vector
private final float[] mAcceleration = new float[3]; // accelerometer vector
private final float[] mAccMagOrientation = new float[3]; // orientation angles from mAcceleration and mMagnet
private float[] mRotationMatrix = new float[9]; // accelerometer and magnetometer based rotation matrix
public void onSensorChanged(SensorEvent event) {
switch (event.sensor.getType()) {
case Sensor.TYPE_ACCELEROMETER:
System.arraycopy(event.values, 0, mAcceleration, 0, 3); // save datas
calculateAccMagOrientation(); // then calculate new orientation
break;
case Sensor.TYPE_MAGNETIC_FIELD:
System.arraycopy(event.values, 0, mMagnet, 0, 3); // save datas
break;
default: break;
}
}
public void calculateAccMagOrientation() {
if (SensorManager.getRotationMatrix(mRotationMatrix, null, mAcceleration, mMagnet))
SensorManager.getOrientation(mRotationMatrix, mAccMagOrientation);
else { // Most chances are that there are no magnet datas
double gx, gy, gz;
gx = mAcceleration[0] / 9.81f;
gy = mAcceleration[1] / 9.81f;
gz = mAcceleration[2] / 9.81f;
// http://theccontinuum.com/2012/09/24/arduino-imu-pitch-roll-from-accelerometer/
float pitch = (float) -Math.atan(gy / Math.sqrt(gx * gx + gz * gz));
float roll = (float) -Math.atan(gx / Math.sqrt(gy * gy + gz * gz));
float azimuth = 0; // Impossible to guess
mAccMagOrientation[0] = azimuth;
mAccMagOrientation[1] = pitch;
mAccMagOrientation[2] = roll;
mRotationMatrix = getRotationMatrixFromOrientation(mAccMagOrientation);
}
}
public static float[] getRotationMatrixFromOrientation(float[] o) {
float[] xM = new float[9];
float[] yM = new float[9];
float[] zM = new float[9];
float sinX = (float) Math.sin(o[1]);
float cosX = (float) Math.cos(o[1]);
float sinY = (float) Math.sin(o[2]);
float cosY = (float) Math.cos(o[2]);
float sinZ = (float) Math.sin(o[0]);
float cosZ = (float) Math.cos(o[0]);
// rotation about x-axis (pitch)
xM[0] = 1.0f;xM[1] = 0.0f;xM[2] = 0.0f;
xM[3] = 0.0f;xM[4] = cosX;xM[5] = sinX;
xM[6] = 0.0f;xM[7] =-sinX;xM[8] = cosX;
// rotation about y-axis (roll)
yM[0] = cosY;yM[1] = 0.0f;yM[2] = sinY;
yM[3] = 0.0f;yM[4] = 1.0f;yM[5] = 0.0f;
yM[6] =-sinY;yM[7] = 0.0f;yM[8] = cosY;
// rotation about z-axis (azimuth)
zM[0] = cosZ;zM[1] = sinZ;zM[2] = 0.0f;
zM[3] =-sinZ;zM[4] = cosZ;zM[5] = 0.0f;
zM[6] = 0.0f;zM[7] = 0.0f;zM[8] = 1.0f;
// rotation order is y, x, z (roll, pitch, azimuth)
float[] resultMatrix = matrixMultiplication(xM, yM);
resultMatrix = matrixMultiplication(zM, resultMatrix);
return resultMatrix;
}
public static float[] matrixMultiplication(float[] A, float[] B) {
float[] result = new float[9];
result[0] = A[0] * B[0] + A[1] * B[3] + A[2] * B[6];
result[1] = A[0] * B[1] + A[1] * B[4] + A[2] * B[7];
result[2] = A[0] * B[2] + A[1] * B[5] + A[2] * B[8];
result[3] = A[3] * B[0] + A[4] * B[3] + A[5] * B[6];
result[4] = A[3] * B[1] + A[4] * B[4] + A[5] * B[7];
result[5] = A[3] * B[2] + A[4] * B[5] + A[5] * B[8];
result[6] = A[6] * B[0] + A[7] * B[3] + A[8] * B[6];
result[7] = A[6] * B[1] + A[7] * B[4] + A[8] * B[7];
result[8] = A[6] * B[2] + A[7] * B[5] + A[8] * B[8];
return result;
}

To get the angle of rotation when the device is not flat, implements OrientationEventListener. onOrientationChanged will give the device orientation. See https://developer.android.com/reference/android/view/OrientationEventListener.html for detail.

i did something like :
public class MainActivity extends AppCompatActivity
{
SensorManager sensorManager;
Sensor sensor;
ImageView imageViewProtractorPointer;
/////////////////////////////////////
///////////// onResume //////////////
/////////////////////////////////////
#Override
protected void onResume()
{
super.onResume();
// register sensor listener again if return to application
if(sensor !=null) sensorManager.registerListener(sensorListener,sensor,SensorManager.SENSOR_DELAY_NORMAL);
}
/////////////////////////////////////
///////////// onCreate //////////////
/////////////////////////////////////
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
imageViewProtractorPointer = (ImageView)findViewById(R.id.imageView2);
// get the SensorManager
sensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
// get the Sensor ACCELEROMETER
sensor = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
}
/////////////////////////////////////
///////////// onPause ///////////////
/////////////////////////////////////
#Override
protected void onPause()
{
// Unregister the sensor listener to prevent battery drain if not in use
super.onPause();
if(sensor !=null) sensorManager.unregisterListener(sensorListener);
}
/////////////////////////////////////////////
/////////// SensorEventListener /////////////
/////////////////////////////////////////////
SensorEventListener sensorListener = new SensorEventListener()
{
#Override
public void onSensorChanged(SensorEvent sensorEvent)
{
// i will use values from 0 to 9 without decimal
int x = (int)sensorEvent.values[0];
int y = (int)sensorEvent.values[1];
int angle = 0;
if(y>=0 && x<=0) angle = x*10;
if(x<=0 && y<=0) angle = (y*10)-90;
if(x>=0 && y<=0) angle = (-x*10)-180;
if(x>=0 && y>=0) angle = (-y*10)-270;
imageViewProtractorPointer.setRotation((float)angle);
}
#Override
public void onAccuracyChanged(Sensor sensor, int i){}
};
}
if you want to understand about my if statements see this image
for my use i lock the screen in a portrait mode , and a use 2 images to show the angle on screen , this is my screenshot
i'm still have to make it little better , just not enough time for it.
i hope this help , if you need a full code let me know.

You have to add some permissions to the manifest. The docs states:
the default sensor matching the requested type and wakeUp properties if one exists and the application has the necessary permissions, or null otherwise
I know it sounds counter intuitive, but apparently the permissions you need are:
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION"/>
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"/>
<uses-permission android:name="android.permission.ACCESS_MOCK_LOCATION"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
(or a sub-set of those).
See here: manifest.xml when using sensors

https://developer.android.com/guide/topics/sensors/sensors_position.html
https://developer.android.com/guide/topics/sensors/sensors_motion.html
https://developer.android.com/guide/topics/sensors/sensors_overview.html
Which sensor for rotating android phone?

You can try the GEOMAGNETIC_ROTATION_VECTOR like this:
private SensorManager mSensorManager;
private Sensor mSensor;
...
mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GEOMAGNETIC_ROTATION_VECTOR)
And compute the sensor info with this:
...
// Rotation matrix based on current readings from accelerometer and magnetometer.
final float[] rotationMatrix = new float[9];
mSensorManager.getRotationMatrix(rotationMatrix, null, accelerometerReading, magnetometerReading);
// Express the updated rotation matrix as three orientation angles.
final float[] orientationAngles = new float[3];
mSensorManager.getOrientation(rotationMatrix, orientationAngles);
Extracted from android docs: https://developer.android.com/guide/topics/sensors/sensors_position.html
add the proper permissions in the manifest.
Hope this helps.

For anyone who is still confused by this problem, say: you want to get the orientation of your phone (the azimuth, pitch and roll), but sometimes the magnetic field is unstable, so that the orientation you get is also unstable. The answers above may help you get the pitch and roll angle, but you still cannot get the azimuth angle. They said it is impossible. Then you become desperate. So, what should you do to solve this problem?
If you only care about the orientation and you don't care about where north is, here is my suggestion, try this sensor, it works awesome in my case:
TYPE_GAME_ROTATION_VECTOR

Related

How to get smooth orientation data in android

I have an app which uses orientation data which works very well using the pre API-8 method of using a Sensor.TYPE_ORIENTAITON. Smoothing that data was relatively easy.
I am trying to update the code to avoid using this deprecated approach. The new standard approach is to replace the single Sensor.TYPE_ORIENTATION with a Sensor.TYPE_ACCELEROMETER and Sensor.TYPE_MAGENTIC_FIELD combination. As that data is received, it is sent (via SensorManager.getRotationMatrix()) to SensorManager.getOrientation(). This (theoretically) returns the same information as Sensor.TYPE_ORIENTATION did (apart from different units and axis orientation).
However, this approach seems to generate data which is much more jittery (ie noisy) than the deprecated method (which still works). So, if you compare the same information on the same device, the deprecated method provides much less noisy data than the current method.
How do I get the actual same (less noisy) data that the deprecated method used to provide?
To make my question a little clearer: I have read various answers on this subject, and I have tried all sorts of filter: simple KF / IIR low pass as you suggest; median filter between 5 and 19 points, but so far I have yet to get anywhere close to the smoothness of the data the phone supplies via TYPE_ORIENTATION.
Apply a low-pass filter to your sensor output.
This is my low-pass filter method:
private static final float ALPHA = 0.5f;
//lower alpha should equal smoother movement
...
private float[] applyLowPassFilter(float[] input, float[] output) {
if ( output == null ) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}
return output;
}
Apply it like so:
float[] mGravity;
float[] mGeomagnetic;
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)
mGravity = applyLowPassFilter(event.values.clone(), mGravity);
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD)
mGeomagnetic = applyLowPassFilter(event.values.clone(), mGeomagnetic);
if (mGravity != null && mGeomagnetic != null) {
float R[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(R, I, mGravity, mGeomagnetic);
if (success) {
float orientation[] = new float[3];
SensorManager.getOrientation(R, orientation);
azimuth = -orientation[0];
invalidate();
}
}
}
This is obviously code for a compass, remove what you don't need.
Also, take a look at this SE question How to implement low pass filter using java
It turns out that there is another, not particularly documented, way to get orientation data. Hidden in the list of sensor types is TYPE_ROTATION_VECTOR. So, set one up:
Sensor mRotationVectorSensor = sensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
sensorManager.registerListener(this, mRotationVectorSensor, SensorManager.SENSOR_DELAY_GAME);
Then:
#Override
public void onSensorChanged(SensorEvent event) {
final int eventType = event.sensor.getType();
if (eventType != Sensor.TYPE_ROTATION_VECTOR) return;
long timeNow = System.nanoTime();
float mOrientationData[] = new float[3];
calcOrientation(mOrientationData, event.values.clone());
// Do what you want with mOrientationData
}
The key mechanism is going from the incoming rotation data to an orientation vector via a rotation matrix. The slightly frustrating thing is the orientation vector comes from quaternion data in the first place, but I can't see how to get the quaternion delivered direct. (If you ever wondered how quaternions relate to orientatin and rotation information, and why they are used, see here.)
private void calcOrientation(float[] orientation, float[] incomingValues) {
// Get the quaternion
float[] quatF = new float[4];
SensorManager.getQuaternionFromVector(quatF, incomingValues);
// Get the rotation matrix
//
// This is a variant on the code presented in
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToMatrix/
// which has been altered for scaling and (I think) a different axis arrangement. It
// tells you the rotation required to get from the between the phone's axis
// system and the earth's.
//
// Phone axis system:
// https://developer.android.com/guide/topics/sensors/sensors_overview.html#sensors-coords
//
// Earth axis system:
// https://developer.android.com/reference/android/hardware/SensorManager.html#getRotationMatrix(float[], float[], float[], float[])
//
// Background information:
// https://en.wikipedia.org/wiki/Rotation_matrix
//
float[][] rotMatF = new float[3][3];
rotMatF[0][0] = quatF[1]*quatF[1] + quatF[0]*quatF[0] - 0.5f;
rotMatF[0][1] = quatF[1]*quatF[2] - quatF[3]*quatF[0];
rotMatF[0][2] = quatF[1]*quatF[3] + quatF[2]*quatF[0];
rotMatF[1][0] = quatF[1]*quatF[2] + quatF[3]*quatF[0];
rotMatF[1][1] = quatF[2]*quatF[2] + quatF[0]*quatF[0] - 0.5f;
rotMatF[1][2] = quatF[2]*quatF[3] - quatF[1]*quatF[0];
rotMatF[2][0] = quatF[1]*quatF[3] - quatF[2]*quatF[0];
rotMatF[2][1] = quatF[2]*quatF[3] + quatF[1]*quatF[0];
rotMatF[2][2] = quatF[3]*quatF[3] + quatF[0]*quatF[0] - 0.5f;
// Get the orientation of the phone from the rotation matrix
//
// There is some discussion of this at
// http://stackoverflow.com/questions/30279065/how-to-get-the-euler-angles-from-the-rotation-vector-sensor-type-rotation-vecto
// in particular equation 451.
//
final float rad2deg = (float)(180.0 / PI);
orientation[0] = (float)Math.atan2(-rotMatF[1][0], rotMatF[0][0]) * rad2deg;
orientation[1] = (float)Math.atan2(-rotMatF[2][1], rotMatF[2][2]) * rad2deg;
orientation[2] = (float)Math.asin ( rotMatF[2][0]) * rad2deg;
if (orientation[0] < 0) orientation[0] += 360;
}
This seems to give data very similar in feel (I haven't run numeric tests) to the old TYPE_ORIENTATION data: it was usable for motion control of the device with marginal filtering.
There is also helpful information here, and a possible alternative solution here.
Here's what worked out for me using SensorManager.SENSOR_DELAY_GAME for a fast update i.e.
#Override
protected void onResume() {
super.onResume();
sensor_manager.registerListener(this, sensor_manager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_GAME);
sensor_manager.registerListener(this, sensor_manager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_GAME);
}
MOVING AVERAGE
(less efficient)
private float[] gravity;
private float[] geomagnetic;
private float azimuth;
private float pitch;
private float roll;
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)
gravity = moving_average_gravity(event.values);
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD)
geomagnetic = moving_average_geomagnetic(event.values);
if (gravity != null && geomagnetic != null) {
float R[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(R, I, gravity, geomagnetic);
if (success) {
float orientation[] = new float[3];
SensorManager.getOrientation(R, orientation);
azimuth = (float) Math.toDegrees(orientation[0]);
pitch = (float) Math.toDegrees(orientation[1]);
roll = (float) Math.toDegrees(orientation[2]);
//if(roll>-46F && roll<46F)view.setTranslationX((roll/45F)*max_translation); //tilt from -45° to 45° to x-translate a view positioned centrally in a layout, from 0 - max_translation
Log.i("TAG","azimuth: "+azimuth+" | pitch: "+pitch+" | roll: "+roll);
}
}
}
private ArrayList<Float[]> moving_gravity;
private ArrayList<Float[]> moving_geomagnetic;
private static final float moving_average_size=12;//change
private float[] moving_average_gravity(float[] gravity) {
if(moving_gravity ==null){
moving_gravity =new ArrayList<>();
for (int i = 0; i < moving_average_size; i++) {
moving_gravity.add(new Float[]{0F,0F,0F});
}return new float[]{0F,0F,0F};
}
moving_gravity.remove(0);
moving_gravity.add(new Float[]{gravity[0],gravity[1],gravity[2]});
return moving_average(moving_gravity);
}
private float[] moving_average_geomagnetic(float[] geomagnetic) {
if(moving_geomagnetic ==null){
this.moving_geomagnetic =new ArrayList<>();
for (int i = 0; i < moving_average_size; i++) {
moving_geomagnetic.add(new Float[]{0F,0F,0F});
}return new float[]{0F,0F,0F};
}
moving_geomagnetic.remove(0);
moving_geomagnetic.add(new Float[]{geomagnetic[0],geomagnetic[1],geomagnetic[2]});
return moving_average(moving_geomagnetic);
}
private float[] moving_average(ArrayList<Float[]> moving_values){
float[] moving_average =new float[]{0F,0F,0F};
for (int i = 0; i < moving_average_size; i++) {
moving_average[0]+= moving_values.get(i)[0];
moving_average[1]+= moving_values.get(i)[1];
moving_average[2]+= moving_values.get(i)[2];
}
moving_average[0]= moving_average[0]/moving_average_size;
moving_average[1]= moving_average[1]/moving_average_size;
moving_average[2]= moving_average[2]/moving_average_size;
return moving_average;
}
LOW PASS FILTER
(more efficient)
private float[] gravity;
private float[] geomagnetic;
private float azimuth;
private float pitch;
private float roll;
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)
gravity = LPF(event.values.clone(), gravity);
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD)
geomagnetic = LPF(event.values.clone(), geomagnetic);
if (gravity != null && geomagnetic != null) {
float R[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(R, I, gravity, geomagnetic);
if (success) {
float orientation[] = new float[3];
SensorManager.getOrientation(R, orientation);
azimuth = (float) Math.toDegrees(orientation[0]);
pitch = (float) Math.toDegrees(orientation[1]);
roll = (float) Math.toDegrees(orientation[2]);
//if(roll>-46F && roll<46F)view.setTranslationX((roll/45F)*max_translation); //tilt from -45° to 45° to x-translate a view positioned centrally in a layout, from 0 - max_translation
Log.i("TAG","azimuth: "+azimuth+" | pitch: "+pitch+" | roll: "+roll);
}
}
}
private static final float ALPHA = 1/16F;//adjust sensitivity
private float[] LPF(float[] input, float[] output) {
if ( output == null ) return input;
for ( int i=0; i<input.length; i++ ) {
output[i] = output[i] + ALPHA * (input[i] - output[i]);
}return output;
}
N.B
moving average of 12 values instead as per here
low pass filter of ALPHA = 0.0625 instead as per here

Gyroscope Issues with Device Orientation

I'm getting pitch and roll data from my device's gyroscope using this tutorial: http://www.thousand-thoughts.com/2012/03/android-sensor-fusion-tutorial/
All the readings are extremely accurate (there's a filter applied to the code in the tutorial to eliminate gyro drift). Unfortunately, the code only works when my device is placed flat on a surface that is parallel to the ground. The most ideal position for my app to work would be with the top of the device pointing straight up (ie, the device is perpendicular to the ground with the screen facing the user). Whenever I orient my device in this position, the pitch values go to +90 degrees (as expected). What I would like to do is set this position as the 0 degree point (or initial position) for my device so that the pitch readings are 0 degrees when my device is oriented upright (in portrait mode) with the screen facing the user.
I asked the author of the tutorial with help on this issue and he responded:
"If you want to have the upright position as the initial one, you will have to rotate your frame of reference accordingly. The simplest way would be to rotate the resulting rotation matrix by -90 degrees about the x-axis. But you have to be careful about at which point in the algorithm to apply this rotation. Always remember that rotations are not commutative operations. To be more specific on this, I would have to review the code again, since I haven’t worked with it for a while now."
I'm really really confused and stumped as to how to rotate my frame of reference. I guess the bottom line is that I have no idea how to rotate the matrix by -90 degrees about the x-axis. If someone could help me out with this part, it would be fantastic. Here's my code in case anyone would like to refer to it:
public class AttitudeDisplayIndicator extends SherlockActivity implements SensorEventListener {
private SensorManager mSensorManager = null;
// angular speeds from gyro
private float[] gyro = new float[3];
// rotation matrix from gyro data
private float[] gyroMatrix = new float[9];
// orientation angles from gyro matrix
private float[] gyroOrientation = new float[3];
// magnetic field vector
private float[] magnet = new float[3];
// accelerometer vector
private float[] accel = new float[3];
// orientation angles from accel and magnet
private float[] accMagOrientation = new float[3];
// final orientation angles from sensor fusion
private float[] fusedOrientation = new float[3];
// accelerometer and magnetometer based rotation matrix
private float[] rotationMatrix = new float[9];
public static final float EPSILON = 0.000000001f;
private static final float NS2S = 1.0f / 1000000000.0f;
private float timestamp;
private boolean initState = true;
public static final int TIME_CONSTANT = 30;
public static final float FILTER_COEFFICIENT = 0.98f;
private Timer fuseTimer = new Timer();
// The following members are only for displaying the sensor output.
public Handler mHandler;
DecimalFormat d = new DecimalFormat("#.##");
//ADI background image.
private ImageView adiBackground;
//ADI axes.
private ImageView adiAxes;
//ADI frame.
private ImageView adiFrame;
//Layout.
private RelativeLayout layout;
//Pitch and Roll TextViews.
private TextView pitchAngleText;
private TextView bankAngleText;
//Instantaneous output values from sensors as the device moves.
public static double pitch;
public static double roll;
//Matrix for rotating the ADI (roll).
Matrix mMatrix = new Matrix();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_attitude_display_indicator);
gyroOrientation[0] = 0.0f;
gyroOrientation[1] = 0.0f;
gyroOrientation[2] = 0.0f;
// initialise gyroMatrix with identity matrix
gyroMatrix[0] = 1.0f; gyroMatrix[1] = 0.0f; gyroMatrix[2] = 0.0f;
gyroMatrix[3] = 0.0f; gyroMatrix[4] = 1.0f; gyroMatrix[5] = 0.0f;
gyroMatrix[6] = 0.0f; gyroMatrix[7] = 0.0f; gyroMatrix[8] = 1.0f;
// get sensorManager and initialise sensor listeners
mSensorManager = (SensorManager) this.getSystemService(SENSOR_SERVICE);
initListeners();
// wait for one second until gyroscope and magnetometer/accelerometer
// data is initialised then scedule the complementary filter task
fuseTimer.scheduleAtFixedRate(new calculateFusedOrientationTask(),
1000, TIME_CONSTANT);
mHandler = new Handler();
adiBackground = (ImageView) findViewById(R.id.adi_background);
adiFrame = (ImageView) findViewById(R.id.adi_frame);
adiAxes = (ImageView) findViewById(R.id.adi_axes);
layout = (RelativeLayout) findViewById(R.id.adi_layout);
new Color();
layout.setBackgroundColor(Color.rgb(150, 150, 150));
pitchAngleText = (TextView) findViewById(R.id.pitch_angle_text);
bankAngleText = (TextView) findViewById(R.id.bank_angle_text);
}
// This function registers sensor listeners for the accelerometer, magnetometer and gyroscope.
public void initListeners(){
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_FASTEST);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE),
SensorManager.SENSOR_DELAY_FASTEST);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
SensorManager.SENSOR_DELAY_FASTEST);
}
//#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
//#Override
public void onSensorChanged(SensorEvent event) {
switch(event.sensor.getType()) {
case Sensor.TYPE_ACCELEROMETER:
// copy new accelerometer data into accel array and calculate orientation
System.arraycopy(event.values, 0, accel, 0, 3);
calculateAccMagOrientation();
break;
case Sensor.TYPE_GYROSCOPE:
// process gyro data
gyroFunction(event);
break;
case Sensor.TYPE_MAGNETIC_FIELD:
// copy new magnetometer data into magnet array
System.arraycopy(event.values, 0, magnet, 0, 3);
break;
}
}
// calculates orientation angles from accelerometer and magnetometer output
public void calculateAccMagOrientation() {
if(SensorManager.getRotationMatrix(rotationMatrix, null, accel, magnet)) {
SensorManager.getOrientation(rotationMatrix, accMagOrientation);
}
}
// This function is borrowed from the Android reference
// at http://developer.android.com/reference/android/hardware/SensorEvent.html#values
// It calculates a rotation vector from the gyroscope angular speed values.
private void getRotationVectorFromGyro(float[] gyroValues,
float[] deltaRotationVector,
float timeFactor)
{
float[] normValues = new float[3];
// Calculate the angular speed of the sample
float omegaMagnitude =
(float)Math.sqrt(gyroValues[0] * gyroValues[0] +
gyroValues[1] * gyroValues[1] +
gyroValues[2] * gyroValues[2]);
// Normalize the rotation vector if it's big enough to get the axis
if(omegaMagnitude > EPSILON) {
normValues[0] = gyroValues[0] / omegaMagnitude;
normValues[1] = gyroValues[1] / omegaMagnitude;
normValues[2] = gyroValues[2] / omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * timeFactor;
float sinThetaOverTwo = (float)Math.sin(thetaOverTwo);
float cosThetaOverTwo = (float)Math.cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * normValues[0];
deltaRotationVector[1] = sinThetaOverTwo * normValues[1];
deltaRotationVector[2] = sinThetaOverTwo * normValues[2];
deltaRotationVector[3] = cosThetaOverTwo;
}
// This function performs the integration of the gyroscope data.
// It writes the gyroscope based orientation into gyroOrientation.
public void gyroFunction(SensorEvent event) {
// don't start until first accelerometer/magnetometer orientation has been acquired
if (accMagOrientation == null)
return;
// initialisation of the gyroscope based rotation matrix
if(initState) {
float[] initMatrix = new float[9];
initMatrix = getRotationMatrixFromOrientation(accMagOrientation);
float[] test = new float[3];
SensorManager.getOrientation(initMatrix, test);
gyroMatrix = matrixMultiplication(gyroMatrix, initMatrix);
initState = false;
}
// copy the new gyro values into the gyro array
// convert the raw gyro data into a rotation vector
float[] deltaVector = new float[4];
if(timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
System.arraycopy(event.values, 0, gyro, 0, 3);
getRotationVectorFromGyro(gyro, deltaVector, dT / 2.0f);
}
// measurement done, save current time for next interval
timestamp = event.timestamp;
// convert rotation vector into rotation matrix
float[] deltaMatrix = new float[9];
SensorManager.getRotationMatrixFromVector(deltaMatrix, deltaVector);
// apply the new rotation interval on the gyroscope based rotation matrix
gyroMatrix = matrixMultiplication(gyroMatrix, deltaMatrix);
// get the gyroscope based orientation from the rotation matrix
SensorManager.getOrientation(gyroMatrix, gyroOrientation);
}
private float[] getRotationMatrixFromOrientation(float[] o) {
float[] xM = new float[9];
float[] yM = new float[9];
float[] zM = new float[9];
float sinX = (float)Math.sin(o[1]);
float cosX = (float)Math.cos(o[1]);
float sinY = (float)Math.sin(o[2]);
float cosY = (float)Math.cos(o[2]);
float sinZ = (float)Math.sin(o[0]);
float cosZ = (float)Math.cos(o[0]);
// rotation about x-axis (pitch)
xM[0] = 1.0f; xM[1] = 0.0f; xM[2] = 0.0f;
xM[3] = 0.0f; xM[4] = cosX; xM[5] = sinX;
xM[6] = 0.0f; xM[7] = -sinX; xM[8] = cosX;
// rotation about y-axis (roll)
yM[0] = cosY; yM[1] = 0.0f; yM[2] = sinY;
yM[3] = 0.0f; yM[4] = 1.0f; yM[5] = 0.0f;
yM[6] = -sinY; yM[7] = 0.0f; yM[8] = cosY;
// rotation about z-axis (azimuth)
zM[0] = cosZ; zM[1] = sinZ; zM[2] = 0.0f;
zM[3] = -sinZ; zM[4] = cosZ; zM[5] = 0.0f;
zM[6] = 0.0f; zM[7] = 0.0f; zM[8] = 1.0f;
// rotation order is y, x, z (roll, pitch, azimuth)
float[] resultMatrix = matrixMultiplication(xM, yM);
resultMatrix = matrixMultiplication(zM, resultMatrix);
return resultMatrix;
}
private float[] matrixMultiplication(float[] A, float[] B) {
float[] result = new float[9];
result[0] = A[0] * B[0] + A[1] * B[3] + A[2] * B[6];
result[1] = A[0] * B[1] + A[1] * B[4] + A[2] * B[7];
result[2] = A[0] * B[2] + A[1] * B[5] + A[2] * B[8];
result[3] = A[3] * B[0] + A[4] * B[3] + A[5] * B[6];
result[4] = A[3] * B[1] + A[4] * B[4] + A[5] * B[7];
result[5] = A[3] * B[2] + A[4] * B[5] + A[5] * B[8];
result[6] = A[6] * B[0] + A[7] * B[3] + A[8] * B[6];
result[7] = A[6] * B[1] + A[7] * B[4] + A[8] * B[7];
result[8] = A[6] * B[2] + A[7] * B[5] + A[8] * B[8];
return result;
}
class calculateFusedOrientationTask extends TimerTask {
public void run() {
float oneMinusCoeff = 1.0f - FILTER_COEFFICIENT;
/*
* Fix for 179° <--> -179° transition problem:
* Check whether one of the two orientation angles (gyro or accMag) is negative while the other one is positive.
* If so, add 360° (2 * math.PI) to the negative value, perform the sensor fusion, and remove the 360° from the result
* if it is greater than 180°. This stabilizes the output in positive-to-negative-transition cases.
*/
// azimuth
if (gyroOrientation[0] < -0.5 * Math.PI && accMagOrientation[0] > 0.0) {
fusedOrientation[0] = (float) (FILTER_COEFFICIENT * (gyroOrientation[0] + 2.0 * Math.PI) + oneMinusCoeff * accMagOrientation[0]);
fusedOrientation[0] -= (fusedOrientation[0] > Math.PI) ? 2.0 * Math.PI : 0;
}
else if (accMagOrientation[0] < -0.5 * Math.PI && gyroOrientation[0] > 0.0) {
fusedOrientation[0] = (float) (FILTER_COEFFICIENT * gyroOrientation[0] + oneMinusCoeff * (accMagOrientation[0] + 2.0 * Math.PI));
fusedOrientation[0] -= (fusedOrientation[0] > Math.PI)? 2.0 * Math.PI : 0;
}
else {
fusedOrientation[0] = FILTER_COEFFICIENT * gyroOrientation[0] + oneMinusCoeff * accMagOrientation[0];
}
// pitch
if (gyroOrientation[1] < -0.5 * Math.PI && accMagOrientation[1] > 0.0) {
fusedOrientation[1] = (float) (FILTER_COEFFICIENT * (gyroOrientation[1] + 2.0 * Math.PI) + oneMinusCoeff * accMagOrientation[1]);
fusedOrientation[1] -= (fusedOrientation[1] > Math.PI) ? 2.0 * Math.PI : 0;
}
else if (accMagOrientation[1] < -0.5 * Math.PI && gyroOrientation[1] > 0.0) {
fusedOrientation[1] = (float) (FILTER_COEFFICIENT * gyroOrientation[1] + oneMinusCoeff * (accMagOrientation[1] + 2.0 * Math.PI));
fusedOrientation[1] -= (fusedOrientation[1] > Math.PI)? 2.0 * Math.PI : 0;
}
else {
fusedOrientation[1] = FILTER_COEFFICIENT * gyroOrientation[1] + oneMinusCoeff * accMagOrientation[1];
}
// roll
if (gyroOrientation[2] < -0.5 * Math.PI && accMagOrientation[2] > 0.0) {
fusedOrientation[2] = (float) (FILTER_COEFFICIENT * (gyroOrientation[2] + 2.0 * Math.PI) + oneMinusCoeff * accMagOrientation[2]);
fusedOrientation[2] -= (fusedOrientation[2] > Math.PI) ? 2.0 * Math.PI : 0;
}
else if (accMagOrientation[2] < -0.5 * Math.PI && gyroOrientation[2] > 0.0) {
fusedOrientation[2] = (float) (FILTER_COEFFICIENT * gyroOrientation[2] + oneMinusCoeff * (accMagOrientation[2] + 2.0 * Math.PI));
fusedOrientation[2] -= (fusedOrientation[2] > Math.PI)? 2.0 * Math.PI : 0;
}
else {
fusedOrientation[2] = FILTER_COEFFICIENT * gyroOrientation[2] + oneMinusCoeff * accMagOrientation[2];
}
// overwrite gyro matrix and orientation with fused orientation
// to comensate gyro drift
gyroMatrix = getRotationMatrixFromOrientation(fusedOrientation);
System.arraycopy(fusedOrientation, 0, gyroOrientation, 0, 3);
// update sensor output in GUI
mHandler.post(updateOrientationDisplayTask);
}
}
Thanks in advance for your help!
The theory...
I'm not really sure about the format in which your "frame of reference" matrix is represented, but typically rotations are done with matrix multiplication.
Basically, you would take your "frame of reference matrix" and multiply it by a 90 degrees rotation matrix.
Such a matrix can be found on Wikipedia:
Three-dimensional rotation matrices
Since your angle is 90 degrees, your sines and cosines would resolve to 1's or 0's which you can plug directly into the matrix instead of computing the sines and cosines. For example, a matrix that would rotate 90 degrees counter-clockwise about the x axis would look like this:
1 0 0
0 0 1
0 -1 0
Also, please not that matrices like these operate on row vectors of x y z coordinates.
So for example, if you have a point in space that is at (2,5,7) and you would like to rotate it using the above matrix, you would have to do the following operation:
|2 5 7| |1 0 0|
|0 0 1|
|0 -1 0|
Which gives [2 -7 5]
...applied to your code
I have glanced quickly at your code and it seems like the modification you need to make involves the output of calculateAccMagOrientation() because it is used to initialize the orientation of the device.
1: public void calculateAccMagOrientation() {
2: if(SensorManager.getRotationMatrix(rotationMatrix, null, accel, magnet)) {
3: SensorManager.getOrientation(rotationMatrix, accMagOrientation);
4: }
5: }
At line 2 in the above snippet is where you get your initial rotationMatrix. Try multiplying rotationMatrix by a hand crafted 90 degrees rotation matrix before calling getOrientation at line 3. I think this will effectively re-align your reference orientation:
public void calculateAccMagOrientation() {
if(SensorManager.getRotationMatrix(rotationMatrix, null, accel, magnet)) {
rotationMatrix = matrixMultiplication(rotationMatrix, my90DegRotationMatrix);
SensorManager.getOrientation(rotationMatrix, accMagOrientation);
}
}
Please note that depending on how the angles work in Android, you might need to use a 90 degrees clockwise rotation matrix instead of a counter-clockwise.
Alternative solution
It just occurred to me, maybe you could also simply subtract 90 from the final pitch result before displaying it?

Using Android gyroscope instead of accelerometer. I find lots of bits and pieces, but no complete code

The Sensor Fusion video looks great, but there's no code:
http://www.youtube.com/watch?v=C7JQ7Rpwn2k&feature=player_detailpage#t=1315s
Here is my code which just uses accelerometer and compass. I also use a Kalman filter on the 3 orientation values, but that's too much code to show here. Ultimately, this works ok, but the result is either too jittery or too laggy depending on what I do with the results and how low I make the filtering factors.
/** Just accelerometer and magnetic sensors */
public abstract class SensorsListener2
implements
SensorEventListener
{
/** The lower this is, the greater the preference which is given to previous values. (slows change) */
private static final float accelFilteringFactor = 0.1f;
private static final float magFilteringFactor = 0.01f;
public abstract boolean getIsLandscape();
#Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
int type = sensor.getType();
switch (type) {
case Sensor.TYPE_MAGNETIC_FIELD:
mags[0] = event.values[0] * magFilteringFactor + mags[0] * (1.0f - magFilteringFactor);
mags[1] = event.values[1] * magFilteringFactor + mags[1] * (1.0f - magFilteringFactor);
mags[2] = event.values[2] * magFilteringFactor + mags[2] * (1.0f - magFilteringFactor);
isReady = true;
break;
case Sensor.TYPE_ACCELEROMETER:
accels[0] = event.values[0] * accelFilteringFactor + accels[0] * (1.0f - accelFilteringFactor);
accels[1] = event.values[1] * accelFilteringFactor + accels[1] * (1.0f - accelFilteringFactor);
accels[2] = event.values[2] * accelFilteringFactor + accels[2] * (1.0f - accelFilteringFactor);
break;
default:
return;
}
if(mags != null && accels != null && isReady) {
isReady = false;
SensorManager.getRotationMatrix(rot, inclination, accels, mags);
boolean isLandscape = getIsLandscape();
if(isLandscape) {
outR = rot;
} else {
// Remap the coordinates to work in portrait mode.
SensorManager.remapCoordinateSystem(rot, SensorManager.AXIS_X, SensorManager.AXIS_Z, outR);
}
SensorManager.getOrientation(outR, values);
double x180pi = 180.0 / Math.PI;
float azimuth = (float)(values[0] * x180pi);
float pitch = (float)(values[1] * x180pi);
float roll = (float)(values[2] * x180pi);
// In landscape mode swap pitch and roll and invert the pitch.
if(isLandscape) {
float tmp = pitch;
pitch = -roll;
roll = -tmp;
azimuth = 180 - azimuth;
} else {
pitch = -pitch - 90;
azimuth = 90 - azimuth;
}
onOrientationChanged(azimuth,pitch,roll);
}
}
private float[] mags = new float[3];
private float[] accels = new float[3];
private boolean isReady;
private float[] rot = new float[9];
private float[] outR = new float[9];
private float[] inclination = new float[9];
private float[] values = new float[3];
/**
Azimuth: angle between the magnetic north direction and the Y axis, around the Z axis (0 to 359). 0=North, 90=East, 180=South, 270=West
Pitch: rotation around X axis (-180 to 180), with positive values when the z-axis moves toward the y-axis.
Roll: rotation around Y axis (-90 to 90), with positive values when the x-axis moves toward the z-axis.
*/
public abstract void onOrientationChanged(float azimuth, float pitch, float roll);
}
I tried to figure out how to add gyroscope data, but I am just not doing it right. The google doc at http://developer.android.com/reference/android/hardware/SensorEvent.html shows some code to get a delta matrix from the gyroscope data. The idea seems to be that I'd crank down the filters for the accelerometer and magnetic sensors so that they were really stable. That would keep track of the long term orientation.
Then, I'd keep a history of the most recent N delta matrices from the gyroscope. Each time I got a new one I'd drop off the oldest one and multiply them all together to get a final matrix which I would multiply against the stable matrix returned by the accelerometer and magnetic sensors.
This doesn't seem to work. Or, at least, my implementation of it does not work. The result is far more jittery than just the accelerometer. Increasing the size of the gyroscope history actually increases the jitter which makes me think that I'm not calculating the right values from the gyroscope.
public abstract class SensorsListener3
implements
SensorEventListener
{
/** The lower this is, the greater the preference which is given to previous values. (slows change) */
private static final float kFilteringFactor = 0.001f;
private static final float magKFilteringFactor = 0.001f;
public abstract boolean getIsLandscape();
#Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
int type = sensor.getType();
switch (type) {
case Sensor.TYPE_MAGNETIC_FIELD:
mags[0] = event.values[0] * magKFilteringFactor + mags[0] * (1.0f - magKFilteringFactor);
mags[1] = event.values[1] * magKFilteringFactor + mags[1] * (1.0f - magKFilteringFactor);
mags[2] = event.values[2] * magKFilteringFactor + mags[2] * (1.0f - magKFilteringFactor);
isReady = true;
break;
case Sensor.TYPE_ACCELEROMETER:
accels[0] = event.values[0] * kFilteringFactor + accels[0] * (1.0f - kFilteringFactor);
accels[1] = event.values[1] * kFilteringFactor + accels[1] * (1.0f - kFilteringFactor);
accels[2] = event.values[2] * kFilteringFactor + accels[2] * (1.0f - kFilteringFactor);
break;
case Sensor.TYPE_GYROSCOPE:
gyroscopeSensorChanged(event);
break;
default:
return;
}
if(mags != null && accels != null && isReady) {
isReady = false;
SensorManager.getRotationMatrix(rot, inclination, accels, mags);
boolean isLandscape = getIsLandscape();
if(isLandscape) {
outR = rot;
} else {
// Remap the coordinates to work in portrait mode.
SensorManager.remapCoordinateSystem(rot, SensorManager.AXIS_X, SensorManager.AXIS_Z, outR);
}
if(gyroUpdateTime!=0) {
matrixHistory.mult(matrixTmp,matrixResult);
outR = matrixResult;
}
SensorManager.getOrientation(outR, values);
double x180pi = 180.0 / Math.PI;
float azimuth = (float)(values[0] * x180pi);
float pitch = (float)(values[1] * x180pi);
float roll = (float)(values[2] * x180pi);
// In landscape mode swap pitch and roll and invert the pitch.
if(isLandscape) {
float tmp = pitch;
pitch = -roll;
roll = -tmp;
azimuth = 180 - azimuth;
} else {
pitch = -pitch - 90;
azimuth = 90 - azimuth;
}
onOrientationChanged(azimuth,pitch,roll);
}
}
private void gyroscopeSensorChanged(SensorEvent event) {
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if(gyroUpdateTime != 0) {
final float dT = (event.timestamp - gyroUpdateTime) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
float omegaMagnitude = (float)Math.sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ);
// Normalize the rotation vector if it's big enough to get the axis
if(omegaMagnitude > EPSILON) {
axisX /= omegaMagnitude;
axisY /= omegaMagnitude;
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = (float)Math.sin(thetaOverTwo);
float cosThetaOverTwo = (float)Math.cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
}
gyroUpdateTime = event.timestamp;
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector);
// User code should concatenate the delta rotation we computed with the current rotation
// in order to get the updated rotation.
// rotationCurrent = rotationCurrent * deltaRotationMatrix;
matrixHistory.add(deltaRotationMatrix);
}
private float[] mags = new float[3];
private float[] accels = new float[3];
private boolean isReady;
private float[] rot = new float[9];
private float[] outR = new float[9];
private float[] inclination = new float[9];
private float[] values = new float[3];
// gyroscope stuff
private long gyroUpdateTime = 0;
private static final float NS2S = 1.0f / 1000000000.0f;
private float[] deltaRotationMatrix = new float[9];
private final float[] deltaRotationVector = new float[4];
//TODO: I have no idea how small this value should be.
private static final float EPSILON = 0.000001f;
private float[] matrixMult = new float[9];
private MatrixHistory matrixHistory = new MatrixHistory(100);
private float[] matrixTmp = new float[9];
private float[] matrixResult = new float[9];
/**
Azimuth: angle between the magnetic north direction and the Y axis, around the Z axis (0 to 359). 0=North, 90=East, 180=South, 270=West
Pitch: rotation around X axis (-180 to 180), with positive values when the z-axis moves toward the y-axis.
Roll: rotation around Y axis (-90 to 90), with positive values when the x-axis moves toward the z-axis.
*/
public abstract void onOrientationChanged(float azimuth, float pitch, float roll);
}
public class MatrixHistory
{
public MatrixHistory(int size) {
vals = new float[size][];
}
public void add(float[] val) {
synchronized(vals) {
vals[ix] = val;
ix = (ix + 1) % vals.length;
if(ix==0)
full = true;
}
}
public void mult(float[] tmp, float[] output) {
synchronized(vals) {
if(full) {
for(int i=0; i<vals.length; ++i) {
if(i==0) {
System.arraycopy(vals[i],0,output,0,vals[i].length);
} else {
MathUtils.multiplyMatrix3x3(output,vals[i],tmp);
System.arraycopy(tmp,0,output,0,tmp.length);
}
}
} else {
if(ix==0)
return;
for(int i=0; i<ix; ++i) {
if(i==0) {
System.arraycopy(vals[i],0,output,0,vals[i].length);
} else {
MathUtils.multiplyMatrix3x3(output,vals[i],tmp);
System.arraycopy(tmp,0,output,0,tmp.length);
}
}
}
}
}
private int ix = 0;
private boolean full = false;
private float[][] vals;
}
The second block of code contains my changes from the first block of code which add the gyroscope to the mix.
Specifically, the filtering factor for accel is made smaller (making the value more stable). The MatrixHistory class keeps track of the last 100 gyroscope deltaRotationMatrix values which are calculated in the gyroscopeSensorChanged method.
I've seen many questions on this site on this topic. They've helped me get to this point, but I cannot figure out what to do next. I really wish the Sensor Fusion guy had just posted some code somewhere. He obviously had it all put together.
Well, +1 to you for even knowing what a Kalman filter is. If you'd like, I'll edit this post and give you the code I wrote a couple years ago to do what you're trying to do.
But first, I'll tell you why you don't need it.
Modern implementations of the Android sensor stack use Sensor Fusion, as Stan mentioned above. This just means that all of the available data -- accel, mag, gyro -- is collected together in one algorithm, and then all the outputs are read back out in the form of Android sensors.
Edit: I just stumbled on this superb Google Tech Talk on the subject: Sensor Fusion on Android Devices: A Revolution in Motion Processing. Well worth the 45 minutes to watch it if you're interested in the topic.
In essence, Sensor Fusion is a black box. I've looked into the source code of the Android implementation, and it's a big Kalman filter written in C++. Some pretty good code in there, and far more sophisticated than any filter I ever wrote, and probably more sophisticated that what you're writing. Remember, these guys are doing this for a living.
I also know that at least one chipset manufacturer has their own sensor fusion implementation. The manufacturer of the device then chooses between the Android and the vendor implementation based on their own criteria.
Finally, as Stan mentioned above, Invensense has their own sensor fusion implementation at the chip level.
Anyway, what it all boils down to is that the built-in sensor fusion in your device is likely to be superior to anything you or I could cobble together. So what you really want to do is to access that.
In Android, there are both physical and virtual sensors. The virtual sensors are the ones that are synthesized from the available physical sensors. The best-known example is TYPE_ORIENTATION which takes accelerometer and magnetometer and creates roll/pitch/heading output. (By the way, you should not use this sensor; it has too many limitations.)
But the important thing is that newer versions of Android contain these two new virtual sensors:
TYPE_GRAVITY is the accelerometer input with the effect of motion filtered out
TYPE_LINEAR_ACCELERATION is the accelerometer with the gravity component filtered out.
These two virtual sensors are synthesized through a combination of accelerometer input and gyro input.
Another notable sensor is TYPE_ROTATION_VECTOR which is a Quaternion synthesized from accelerometer, magnetometer, and gyro. It represents the full 3-d orientation of the device with the effects of linear acceleration filtered out.
However, Quaternions are a little bit abstract for most people, and since you're likely working with 3-d transformations anyway, your best approach is to combine TYPE_GRAVITY and TYPE_MAGNETIC_FIELD via SensorManager.getRotationMatrix().
One more point: if you're working with a device running an older version of Android, you need to detect that you're not receiving TYPE_GRAVITY events and use TYPE_ACCELEROMETER instead. Theoretically, this would be a place to use your own kalman filter, but if your device doesn't have sensor fusion built in, it probably doesn't have gyros either.
Anyway, here's some sample code to show how I do it.
// Requires 1.5 or above
class Foo extends Activity implements SensorEventListener {
SensorManager sensorManager;
float[] gData = new float[3]; // Gravity or accelerometer
float[] mData = new float[3]; // Magnetometer
float[] orientation = new float[3];
float[] Rmat = new float[9];
float[] R2 = new float[9];
float[] Imat = new float[9];
boolean haveGrav = false;
boolean haveAccel = false;
boolean haveMag = false;
onCreate() {
// Get the sensor manager from system services
sensorManager =
(SensorManager)getSystemService(Context.SENSOR_SERVICE);
}
onResume() {
super.onResume();
// Register our listeners
Sensor gsensor = sensorManager.getDefaultSensor(Sensor.TYPE_GRAVITY);
Sensor asensor = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
Sensor msensor = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
sensorManager.registerListener(this, gsensor, SensorManager.SENSOR_DELAY_GAME);
sensorManager.registerListener(this, asensor, SensorManager.SENSOR_DELAY_GAME);
sensorManager.registerListener(this, msensor, SensorManager.SENSOR_DELAY_GAME);
}
public void onSensorChanged(SensorEvent event) {
float[] data;
switch( event.sensor.getType() ) {
case Sensor.TYPE_GRAVITY:
gData[0] = event.values[0];
gData[1] = event.values[1];
gData[2] = event.values[2];
haveGrav = true;
break;
case Sensor.TYPE_ACCELEROMETER:
if (haveGrav) break; // don't need it, we have better
gData[0] = event.values[0];
gData[1] = event.values[1];
gData[2] = event.values[2];
haveAccel = true;
break;
case Sensor.TYPE_MAGNETIC_FIELD:
mData[0] = event.values[0];
mData[1] = event.values[1];
mData[2] = event.values[2];
haveMag = true;
break;
default:
return;
}
if ((haveGrav || haveAccel) && haveMag) {
SensorManager.getRotationMatrix(Rmat, Imat, gData, mData);
SensorManager.remapCoordinateSystem(Rmat,
SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, R2);
// Orientation isn't as useful as a rotation matrix, but
// we'll show it here anyway.
SensorManager.getOrientation(R2, orientation);
float incl = SensorManager.getInclination(Imat);
Log.d(TAG, "mh: " + (int)(orientation[0]*DEG));
Log.d(TAG, "pitch: " + (int)(orientation[1]*DEG));
Log.d(TAG, "roll: " + (int)(orientation[2]*DEG));
Log.d(TAG, "yaw: " + (int)(orientation[0]*DEG));
Log.d(TAG, "inclination: " + (int)(incl*DEG));
}
}
}
Hmmm; if you happen to have a Quaternion library handy, it's probably simpler just to receive TYPE_ROTATION_VECTOR and convert that to an array.
To the question where to find complete code, here's a default implementation on Android jelly bean: https://android.googlesource.com/platform/frameworks/base/+/jb-release/services/sensorservice/
Start by checking the fusion.cpp/h.
It uses Modified Rodrigues Parameters (close to Euler angles) instead of quaternions. In addition to orientation the Kalman filter estimates gyro drift. For measurement updates it uses magnetometer and, a bit incorrectly, acceleration (specific force).
To make use of the code you should either be a wizard or know the basics of INS and KF. Many parameters have to be fine-tuned for the filter to work. As Edward adequately put, these guys are doing this for living.
At least in google's galaxy nexus this default implementation is left unused and is overridden by Invense's proprietary system.

Compute rotation matrix using the magnetic field

In get rotation matrix value it contains public static boolean getRotationMatrix (float[] R, float[] I, float[] gravity, float[] geomagnetic)
Here how can i calculate the float[] gravity?
I found a sample of code where it calculate the orientation using both Accelerometer and Magnetic field
boolean success = SensorManager.getRotationMatrix(
matrixR,
matrixI,
valuesAccelerometer,
valuesMagneticField);
if(success){
SensorManager.getOrientation(matrixR, matrixValues);
double azimuth = Math.toDegrees(matrixValues[0]);
double pitch = Math.toDegrees(matrixValues[1]);
double roll = Math.toDegrees(matrixValues[2]);
readingAzimuth.setText("Azimuth: " + String.valueOf(azimuth));
readingPitch.setText("Pitch: " + String.valueOf(pitch));
readingRoll.setText("Roll: "+String.valueOf(roll));
}
My questions are :
Is orientation value is the rotation matrix value?
If no then how can i implement this code to get the rotation matrix value using magnetic? field?
To get the rotation matrix i use this code
public void onSensorChanged(SensorEvent sensorEvent) {
if (timestamp != 0) {
final double dT = (sensorEvent.timestamp - timestamp) * NS2S;
double magneticX = sensorEvent.values[0];
double magneticY = sensorEvent.values[1];
double magneticZ = sensorEvent.values[2];
double omegaMagnitude =Math.sqrt(magneticX*magneticX + magneticY*magneticY + magneticZ*magneticZ);
if (omegaMagnitude > EPSILON) {
magneticX /= omegaMagnitude;
magneticY /= omegaMagnitude;
magneticZ /= omegaMagnitude;
}
double thetaOverTwo = omegaMagnitude * dT / 2.0f;
double sinThetaOverTwo =Math.sin(thetaOverTwo);
double cosThetaOverTwo = Math.cos(thetaOverTwo);
deltaRotationVector[0] = (double) (sinThetaOverTwo * magneticX);
deltaRotationVector[1] = (double) (sinThetaOverTwo * magneticY);
deltaRotationVector[2] = (double) (sinThetaOverTwo * magneticZ);
deltaRotationVector[3] = cosThetaOverTwo;
}
double[] deltaRotationMatrix = new double[9];
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector);
}
But the problem is this getRotationMatrixFromVector is says undefine for sensor.Any idea?
Orientation is not a rotation matrix as it only provides you angles related to magnetic North. You can obtain the rotation matrix (Direction Cosine Matrix) that will help you to transform coordinates from your device frame to the Earth's frame this way :
with
= azimuth (radians)
= pitch (radians)
= roll (radians)
I know that this is an old thread but in case it helps, for Android I think the 3x3 rotation matrix is actually given by a variation of the approved answer. To be specific, in Android the rotation matrix is
(cos&#966 cos&#968 - sin&#966 sin&#968 sin&#952) sin&#966 cos&#952 ( cos&#966 sin&#968 + sin&#966 cos&#968 sin&#952)
-(sin&#966 cos&#968 + cos&#966 sin&#968 sin&#952) cos&#966 cos&#952 (-sin&#966 sin&#968 + cos&#966 cos&#968 sin&#952)
-sin&#968 cos&#952 -sin&#952 cos&#966 cos&#952
where
&#966 = azimuth
&#952 = pitch
&#968 = roll
which corresponds to the 3x3 Android rotation matrix R[0] to R[8] (matrixR in the question) via
R[0] R[1] R[2]
R[3] R[4] R[5]
R[6] R[7] R[8]

Transforming accelerometer's data from device's coordinates to real world coordinates

I'm really sorry if this is a very basic question, but I have not choice but ask it: How do you translate the accelerometer data from the device coordinates to real world coordinates?
I mean, assuming that the accelerometer is giving me somenting like (Ax,Ay,Az) -in device's coordinates-, what transformations should I apply to transform the values into (Ax',Ay',Az') -in real world's coordinates-, so I can use the acceleration vector in real worlds coordinates to calculate if the device is accelerating north, east, south-west,etc?
I have been working around this issue during the past few days. At first I thought that it shound't be difficult, but after searching dozens of pages I haven't come up with anything functional.
By the way, here is some code with what I've implemented so far:
private SensorEventListener mSensorEventListener = new SensorEventListener() {
public void onAccuracyChanged(Sensor sensor, int accuracy){
}
public void onSensorChanged(SensorEvent event) {
switch(event.sensor.getType()){
case Sensor.TYPE_ACCELEROMETER:
accelerometervalues = event.values.clone();
AX.setText(accelerometervalues[0]+"");
AY.setText(accelerometervalues[1]+"");
AZ.setText(accelerometervalues[2]+"");
break;
case Sensor.TYPE_ORIENTATION:
orientationvalues = event.values.clone();
azimuth.setText(orientationvalues[0]+"");
pitch.setText(orientationvalues[1]+"");
roll.setText(orientationvalues[2]+"");
break;
case Sensor.TYPE_MAGNETIC_FIELD:
geomagneticmatrix =event.values.clone();
TAX.setText(geomagneticmatrix[0]+"");
TAY.setText(geomagneticmatrix[1]+"");
TAZ.setText(geomagneticmatrix[2]+"");
break;
}
if (geomagneticmatrix != null && accelerometervalues != null) {
float[] R = new float[16];
float[] I = new float[16];
SensorManager.getRotationMatrix(R, I, accelerometervalues, geomagneticmatrix);
//What should I do here to transform the components of accelerometervalues into real world acceleration components??
}
}
};
I have:
A vector of accelerations in native coordinates in accelerometervalues.
A vector of magnetic field values in geomagneticmatrix.
Azimuth, pitch and roll in orientationvalues.
Rotation matrix R.
Inclination matrix I.
I think all the necessary information is there, azimuth, pitch and roll should describe the displacement of the device's coordinate system in relation with the real world coordinate system. Also, I believe that R is/can even be used as a true north vector inside the devices coordinates.
It seems to me that obtaing the values of acceleration in real world is just a mathematical transformation away from those data. I just can't figure it out.
Thanks in advance.
Edited:
I have tried directly multipliying the components of accelerometervalues with the rotation matrix R (trueaccel=accel*R) but it didn't work.
trueacceleration[0]= accelerometervalues[0]*R[0]+accelerometervalues[1]*R[1]+accelerometervalues[2]*R[2];
trueacceleration[1]= accelerometervalues[0]*R[1]+accelerometervalues[1]*R[4]+accelerometervalues[2]*R[7];
trueacceleration[2]= accelerometervalues[0]*R[2]+accelerometervalues[1]*R[5]+accelerometervalues[2]*R[8];
I have also tried multipliying accelerometervalues with the inclination matrix I. Also multipliying with both R and I (trueaccel=accel*R*I) and that didn't work either. Neither does calling to remapcoordinates() and then multiply in any of the previous forms.
Does anybody have an idea about what am I doing wrong?
Oki, I have worked this out mathematically myself so please bear with me.
If you want to translate an acceleration vector accelerationvalues into an acceleration vector trueacceleration expressed in real world's coordinates, once you have azimuth,pitch and roll stored in a orientationvalues vector, just do the following:
trueacceleration[0] =(float) (accelerometervalues[0]*(Math.cos(orientationvalues[2])*Math.cos(orientationvalues[0])+Math.sin(orientationvalues[2])*Math.sin(orientationvalues[1])*Math.sin(orientationvalues[0])) + accelerometervalues[1]*(Math.cos(orientationvalues[1])*Math.sin(orientationvalues[0])) + accelerometervalues[2]*(-Math.sin(orientationvalues[2])*Math.cos(orientationvalues[0])+Math.cos(orientationvalues[2])*Math.sin(orientationvalues[1])*Math.sin(orientationvalues[0])));
trueacceleration[1] = (float) (accelerometervalues[0]*(-Math.cos(orientationvalues[2])*Math.sin(orientationvalues[0])+Math.sin(orientationvalues[2])*Math.sin(orientationvalues[1])*Math.cos(orientationvalues[0])) + accelerometervalues[1]*(Math.cos(orientationvalues[1])*Math.cos(orientationvalues[0])) + accelerometervalues[2]*(Math.sin(orientationvalues[2])*Math.sin(orientationvalues[0])+ Math.cos(orientationvalues[2])*Math.sin(orientationvalues[1])*Math.cos(orientationvalues[0])));
trueacceleration[2] = (float) (accelerometervalues[0]*(Math.sin(orientationvalues[2])*Math.cos(orientationvalues[1])) + accelerometervalues[1]*(-Math.sin(orientationvalues[1])) + accelerometervalues[2]*(Math.cos(orientationvalues[2])*Math.cos(orientationvalues[1])));
Try this, its working for me
private float[] gravityValues = null;
private float[] magneticValues = null;
private SensorManager mSensorManager = null;
private void registerSensorListener(Context context) {
mSensorManager = (SensorManager) context.getSystemService(SENSOR_SERVICE);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_FASTEST);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE),
SensorManager.SENSOR_DELAY_FASTEST);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
SensorManager.SENSOR_DELAY_FASTEST);
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(Sensor.TYPE_GRAVITY),
SensorManager.SENSOR_DELAY_FASTEST);
}
#Override
public void onSensorChanged(SensorEvent event) {
if ((gravityValues != null) && (magneticValues != null)
&& (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)) {
float[] deviceRelativeAcceleration = new float[4];
deviceRelativeAcceleration[0] = event.values[0];
deviceRelativeAcceleration[1] = event.values[1];
deviceRelativeAcceleration[2] = event.values[2];
deviceRelativeAcceleration[3] = 0;
Log.d("Raw Acceleration::","Values: (" + event.values[0] + ", " + event.values[1] + ", " + event.values[2] + ")");
// Change the device relative acceleration values to earth relative values
// X axis -> East
// Y axis -> North Pole
// Z axis -> Sky
float[] R = new float[16], I = new float[16], earthAcc = new float[16];
SensorManager.getRotationMatrix(R, I, gravityValues, magneticValues);
float[] inv = new float[16];
android.opengl.Matrix.invertM(inv, 0, R, 0);
android.opengl.Matrix.multiplyMV(earthAcc, 0, inv, 0, deviceRelativeAcceleration, 0);
Log.d("Earth Acceleration", "Values: (" + earthAcc[0] + ", " + earthAcc[1] + ", " + earthAcc[2] + ")");
} else if (event.sensor.getType() == Sensor.TYPE_GRAVITY) {
gravityValues = event.values;
} else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
magneticValues = event.values;
}
}
You need to be able to know the reference coordinate system that also gives you the orientation of your device within 'real' world coordinates. Without that information, it look impossible to transform your data into anything useful.
For example, does your device have a type of 'directional' sensor that would help make sense of the accelerometer data (gyro & compass for example?)
I am dealing with the same problem. What you can do is, as you have the R[] matrix multiply your acceleration vector and voilá.
float resultVec[] = new float[4];
Matrix.multiplyMV(trueacceleration, 0, R, 0, accelerometervalues, 0);
PS: accelerometervalues must be a 4 field vector, just add 0 to the last field.
This is what I used to map accelrometer data from local(Mobile) frame of reference to Earth frame of reference, to get rid of orientation in dependency. Since in earth frame Z-axis is pointing towards the sky and must show value ~=9.81m/sec^2. One phenomenon that I couldn't understand is when I put phone on the revolving chair any any orientation and rotate at constant speed then XEarth and YEarth values shows rotation with 90 degree phase shift and oscillates like a sin/cosine wave which i assume North and East axis.
public void onSensorChanged(SensorEvent event) {
switch(event.sensor.getType()){
case Sensor.TYPE_ACCELEROMETER:
System.arraycopy(event.values, 0, accel, 0, 3);
//To get Quternion representation of Accelrometer data
SensorManager.getQuaternionFromVector(quatA , event.values);
q1.w = quatA[0]; q1.x = quatA[1]; q1.y = quatA[2]; q1.z = quatA[3];
break;
case Sensor.TYPE_ROTATION_VECTOR:
SensorManager.getRotationMatrixFromVector(rotationMatrix1,event.values);
System.arraycopy(event.values, 0, rotationVector, 0, 3);
SensorManager.getQuaternionFromVector(quat , event.values);
q2.w = quat[0]; q2.x = quat[1]; q2.y = quat[2]; q2.z = quat[3];
rotationMatrix2 = getRotationMatrixFromQuaternion(q2);
rotationResult = matrixMultiplication(accel,rotationMatrix2);
//You can use rotationMatrix1 or rotationMatrix2
break;
//Accel Data rotated as per earth frame of reference
//rotationResult[0];
//rotationResult[1];
//rotationResult[2];
}
private float[] getRotationMatrixFromQuaternion(Quaternion q22) {
// TODO Auto-generated method stub
float [] q = new float[4];
float [] result = new float[9];
q[0] = q22.w;
q[1] = q22.x;
q[2] = q22.y;
q[3] = q22.z;
result[0] = q[0]*q[0] + q[1]*q[1] - q[2]*q[2] -q[3]*q[3];
result[1] = 2 * (q[1]*q[2] - q[0]*q[3]);
result[2] = 2 * (q[1]*q[3] + q[0]*q[2]);
result[3] = 2 * (q[1]*q[2] + q[0]*q[3]);
result[4] = q[0]*q[0] - q[1]*q[1] + q[2]*q[2] - q[3]*q[3];
result[5] = 2 * (q[2]*q[3] - q[0]*q[1]);
result[7] = 2 * (q[2]*q[3] + q[0]*q[1]);
result[6] = 2 * (q[1]*q[3] - q[0]*q[2]);
result[8] = q[0]*q[0] - q[1]*q[1] - q[2]*q[2] + q[3]*q[3];
return result;
}
private float[] matrixMultiplication(float[] A, float[] B) {
float[] result = new float[3];
result[0] = A[0] * B[0] + A[1] * B[1] + A[2] * B[2];
result[1] = A[0] * B[3] + A[1] * B[4] + A[2] * B[5];
result[2] = A[0] * B[6] + A[1] * B[7] + A[2] * B[8];
return result;
}

Categories

Resources