2D values from Quaternion - android

i'm working in a AR application in android with the Epson Moverio BT-200.
I have a quaternion that change his values with my sensor fusion algorithm.
In my application i'm trying to move a 2D item changing his margin left and margin top values when I move my head.
I'd like to know how can I extract, from the quaternion values, only the "horizontal" and "vertical" movements.
I could extract from the quaternion the pitch and roll values, but I read that there are several problems with euler angle. Could I do this only working with quaternions?

This is my actual code. I solved the problem using the Quaternions for the algorithm, and at the end I extract the euler angles from the rotation matrix.
This is the algorithm for take the values from the sensors:
private static final float NS2S = 1.0f / 1000000000.0f;
private final Quaternion deltaQuaternion = new Quaternion();
private Quaternion quaternionGyroscope = new Quaternion();
private Quaternion quaternionRotationVector = new Quaternion();
private long timestamp;
private static final double EPSILON = 0.1f;
private double gyroscopeRotationVelocity = 0;
private boolean positionInitialised = false;
private int panicCounter;
private static final float DIRECT_INTERPOLATION_WEIGHT = 0.005f;
private static final float OUTLIER_THRESHOLD = 0.85f;
private static final float OUTLIER_PANIC_THRESHOLD = 0.65f;
private static final int PANIC_THRESHOLD = 60;
#Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
// Process rotation vector (just safe it)
float[] q = new float[4];
// Calculate angle. Starting with API_18, Android will provide this value as event.values[3], but if not, we have to calculate it manually.
SensorManager.getQuaternionFromVector(q, event.values);
// Store in quaternion
quaternionRotationVector.setXYZW(q[1], q[2], q[3], -q[0]);
if (!positionInitialised) {
// Override
quaternionGyroscope.set(quaternionRotationVector);
positionInitialised = true;
}
} else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
// Process Gyroscope and perform fusion
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if (timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
gyroscopeRotationVelocity = Math.sqrt(axisX * axisX + axisY * axisY + axisZ * axisZ);
// Normalize the rotation vector if it's big enough to get the axis
if (gyroscopeRotationVelocity > EPSILON) {
axisX /= gyroscopeRotationVelocity;
axisY /= gyroscopeRotationVelocity;
axisZ /= gyroscopeRotationVelocity;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
double thetaOverTwo = gyroscopeRotationVelocity * dT / 2.0f;
double sinThetaOverTwo = Math.sin(thetaOverTwo);
double cosThetaOverTwo = Math.cos(thetaOverTwo);
deltaQuaternion.setX((float) (sinThetaOverTwo * axisX));
deltaQuaternion.setY((float) (sinThetaOverTwo * axisY));
deltaQuaternion.setZ((float) (sinThetaOverTwo * axisZ));
deltaQuaternion.setW(-(float) cosThetaOverTwo);
// Move current gyro orientation
deltaQuaternion.multiplyByQuat(quaternionGyroscope, quaternionGyroscope);
// Calculate dot-product to calculate whether the two orientation sensors have diverged
// (if the dot-product is closer to 0 than to 1), because it should be close to 1 if both are the same.
float dotProd = quaternionGyroscope.dotProduct(quaternionRotationVector);
// If they have diverged, rely on gyroscope only (this happens on some devices when the rotation vector "jumps").
if (Math.abs(dotProd) < OUTLIER_THRESHOLD) {
// Increase panic counter
if (Math.abs(dotProd) < OUTLIER_PANIC_THRESHOLD) {
panicCounter++;
}
// Directly use Gyro
setOrientationQuaternionAndMatrix(quaternionGyroscope);
} else {
// Both are nearly saying the same. Perform normal fusion.
// Interpolate with a fixed weight between the two absolute quaternions obtained from gyro and rotation vector sensors
// The weight should be quite low, so the rotation vector corrects the gyro only slowly, and the output keeps responsive.
Quaternion interpolate = new Quaternion();
quaternionGyroscope.slerp(quaternionRotationVector, interpolate, DIRECT_INTERPOLATION_WEIGHT);
// Use the interpolated value between gyro and rotationVector
setOrientationQuaternionAndMatrix(interpolate);
// Override current gyroscope-orientation
quaternionGyroscope.copyVec4(interpolate);
// Reset the panic counter because both sensors are saying the same again
panicCounter = 0;
}
if (panicCounter > PANIC_THRESHOLD) {
Log.d("Rotation Vector",
"Panic counter is bigger than threshold; this indicates a Gyroscope failure. Panic reset is imminent.");
if (gyroscopeRotationVelocity < 3) {
Log.d("Rotation Vector",
"Performing Panic-reset. Resetting orientation to rotation-vector value.");
// Manually set position to whatever rotation vector says.
setOrientationQuaternionAndMatrix(quaternionRotationVector);
// Override current gyroscope-orientation with corrected value
quaternionGyroscope.copyVec4(quaternionRotationVector);
panicCounter = 0;
} else {
Log.d("Rotation Vector",
String.format(
"Panic reset delayed due to ongoing motion (user is still shaking the device). Gyroscope Velocity: %.2f > 3",
gyroscopeRotationVelocity));
}
}
}
timestamp = event.timestamp;
}
}
private void setOrientationQuaternionAndMatrix(Quaternion quaternion) {
Quaternion correctedQuat = quaternion.clone();
// We inverted w in the deltaQuaternion, because currentOrientationQuaternion required it.
// Before converting it back to matrix representation, we need to revert this process
correctedQuat.w(-correctedQuat.w());
synchronized (syncToken) {
// Use gyro only
currentOrientationQuaternion.copyVec4(quaternion);
// Set the rotation matrix as well to have both representations
SensorManager.getRotationMatrixFromVector(currentOrientationRotationMatrix.matrix, correctedQuat.ToArray());
}
}
And this is how I take the euler angles rotation values:
/**
* #return Returns the current rotation of the device in the Euler-Angles
*/
public EulerAngles getEulerAngles() {
float[] angles = new float[3];
float[] remappedOrientationMatrix = new float[16];
SensorManager.remapCoordinateSystem(currentOrientationRotationMatrix.getMatrix(), SensorManager.AXIS_X,
SensorManager.AXIS_Z, remappedOrientationMatrix);
SensorManager.getOrientation(remappedOrientationMatrix, angles);
return new EulerAngles(angles[0], angles[1], angles[2]);
}
I solved my problem with this solution. Now won't be difficult to move my 2d Object with this sensors values. Sorry for lenght of my answer, but I hope that it could be useful for someone :)

Related

Detect Device to Near or away from Ear Android code

I need to know whether device is near to ear or not by using sensors
I tried using proximity, I want to combine accelerator and gyroscope sensors to exactly find the device is near or far from Ear.
Code for Proximity
#Override
public void onSensorChanged(SensorEvent event) {
float distance = event.values[0];
if (event.sensor.getType() == Sensor.TYPE_PROXIMITY) {
if (distance < mProximity.getMaximumRange()) {
iv.setText("Near");
} else {
iv.setText("far");
}
}
}
This is what I got from Android Documentation, I am sure you can dig more to get some answers to your problem, but this should be enough to get you started. You can also do some research about position sensors in android. The documentation is quite useful
// Create a constant to convert nanoseconds to seconds.
private static final float NS2S = 1.0f / 1000000000.0f;
private final float[] deltaRotationVector = new float[4]();
private float timestamp;
public void onSensorChanged(SensorEvent event) {
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if (timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
float omegaMagnitude = sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ);
// Normalize the rotation vector if it's big enough to get the axis
// (that is, EPSILON should represent your maximum allowable margin of error)
if (omegaMagnitude > EPSILON) {
axisX /= omegaMagnitude;
axisY /= omegaMagnitude;
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = sin(thetaOverTwo);
float cosThetaOverTwo = cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
}
timestamp = event.timestamp;
float[] deltaRotationMatrix = new float[9];
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix,
deltaRotationVector);
// User code should concatenate the delta rotation we computed with the current rotation
// in order to get the updated rotation.
// rotationCurrent = rotationCurrent * deltaRotationMatrix;
}
}

How to update the azimuth with the rotation matrix from gyroscope?

Suppose I have my current orientation as (azimuth, pitch, roll). Now I wish to update my orientation with the gyroscope. According to the codes given by the Android development web, I can obtain the so-called deltaRotationMatrix as follows:
// Create a constant to convert nanoseconds to seconds.
private static final float NS2S = 1.0f / 1000000000.0f;
private final float[] deltaRotationVector = new float[4]();
private float timestamp;
public void onSensorChanged(SensorEvent event) {
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if (timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
float omegaMagnitude = sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ);
// Normalize the rotation vector if it's big enough to get the axis
// (that is, EPSILON should represent your maximum allowable margin of error)
if (omegaMagnitude > EPSILON) {
axisX /= omegaMagnitude;
axisY /= omegaMagnitude;
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = sin(thetaOverTwo);
float cosThetaOverTwo = cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
}
timestamp = event.timestamp;
float[] deltaRotationMatrix = new float[9];
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector);
// User code should concatenate the delta rotation we computed with the current rotation
// in order to get the updated rotation.
// rotationCurrent = rotationCurrent * deltaRotationMatrix;
}
}
How should I proceed with this snippet so as to update my orientation?
You just need to multiply the deltaRotationMatrix by the rotationCurrentMatrix and then make a call to SensorManager.getOrientation(). You will need to implement a matrix multiplication method. You will also need an initial currentRotationMatrix, you can use the acceleration sensor and magnetic sensor with SensorManager.getRotationMatrix and SensorManager.getOrientation to get the initial currentRotationMatrix. Alternatively, you could use TYPE_ROTATION_VECTOR to get the initial currentRotationMatrix.
currentRotationMatrix = matrixMultiplication(
currentRotationMatrix,
deltaRotationMatrix);
SensorManager.getOrientation(currentRotationMatrix,
gyroscopeOrientation;
Unfortunately, what you will realize is that even the TYPE_GYROSCOPE sensor which is supposed to be calibrated for drift doesn't do a very good job and the sensor quickly drifts out of rotation with the device. Frustrating.
I have a GitHub repo with all of this implemented here
And a working project on the Play Store here

Android getOrientation Azimuth gets polluted when phone is tilted

I'm having a really annoying problem with a AR view acting like a compass. So when I hold the phone in portrait (so that the screen is pointing to my face), then I call the remapCoordinateSystem that the pitch is 0 when holding it portrait. Then the azimuth (compass functionality) is perfect, but as soon as I tilt the phone the azimuth gets ruined, if I bend forward the azimuth increases and if I bend backwards it decreases.
I use 2 sensors to get the readings, Sensor.TYPE_MAGNETIC_FIELD and Sensor.TYPE_GRAVITY.
I use a lowpassfilter which is pretty basic, it's implemented with an alpha constant and is used directly on the read values from the sensors.
Here is my code:
float[] rotationMatrix = new float[9];
SensorManager.getRotationMatrix(rotationMatrix, null, gravitymeterValues,
magnetometerValues);
float[] remappedRotationMatrix = new float[9];
SensorManager.remapCoordinateSystem(rotationMatrix, SensorManager.AXIS_X,
SensorManager.AXIS_Z, remappedRotationMatrix);
float results[] = new float[3];
SensorManager.getOrientation(remappedRotationMatrix, results);
float azimuth = (float) (results[0] * 180 / Math.PI);
if (azimuth < 0) {
azimuth += 360;
}
float pitch = (float) (results[1] * 180 / Math.PI);
float roll = (float) (results[2] * 180 / Math.PI);
As you see there is no magic here. I call this piece of code when the gravitymeterValues and the magnetometerValues are ready to be used.
My question is how do I stop the azimuth from going crazy when I tilt the phone?
I checked a free app on the Google Play Store, Compass and it hasn't solved this problem, but I hope there is a solution.
I have 2 solutions in mind:
Make the AR view only work in very constrainted pitch angles, right now I have something like pitch >= -5 && pitch <= 30. If this isn't fullfilled the user is shown a screen that asks him/her to rotate the phone to portrait.
Somehow use the pitch to suppress the azimuth, this seems like a pretty device-specific solution though, but of course I'm open for suggestions.
I can also add that I've been searching for a couple of hours for a decent solution and I haven't found any that has given me any better solutions than 2) here.
Thanks in advance!
For complete code see https://github.com/hoananguyen/dsensor
Keep a history and average out, I do not know the correct interpretation of pitch and roll so the following code is for azimuth only.
Class members
private List<float[]> mRotHist = new ArrayList<float[]>();
private int mRotHistIndex;
// Change the value so that the azimuth is stable and fit your requirement
private int mHistoryMaxLength = 40;
float[] mGravity;
float[] mMagnetic;
float[] mRotationMatrix = new float[9];
// the direction of the back camera, only valid if the device is tilted up by
// at least 25 degrees.
private float mFacing = Float.NAN;
public static final float TWENTY_FIVE_DEGREE_IN_RADIAN = 0.436332313f;
public static final float ONE_FIFTY_FIVE_DEGREE_IN_RADIAN = 2.7052603f;
onSensorChanged
#Override
public void onSensorChanged(SensorEvent event)
{
if (event.sensor.getType() == Sensor.TYPE_GRAVITY)
{
mGravity = event.values.clone();
}
else
{
mMagnetic = event.values.clone();
}
if (mGravity != null && mMagnetic != null)
{
if (SensorManager.getRotationMatrix(mRotationMatrix, null, mGravity, mMagnetic))
{
// inclination is the degree of tilt by the device independent of orientation (portrait or landscape)
// if less than 25 or more than 155 degrees the device is considered lying flat
float inclination = (float) Math.acos(mRotationMatrix[8]);
if (inclination < TWENTY_FIVE_DEGREE_IN_RADIAN
|| inclination > ONE_FIFTY_FIVE_DEGREE_IN_RADIAN)
{
// mFacing is undefined, so we need to clear the history
clearRotHist();
mFacing = Float.NaN;
}
else
{
setRotHist();
// mFacing = azimuth is in radian
mFacing = findFacing();
}
}
}
}
private void clearRotHist()
{
if (DEBUG) {Log.d(TAG, "clearRotHist()");}
mRotHist.clear();
mRotHistIndex = 0;
}
private void setRotHist()
{
if (DEBUG) {Log.d(TAG, "setRotHist()");}
float[] hist = mRotationMatrix.clone();
if (mRotHist.size() == mHistoryMaxLength)
{
mRotHist.remove(mRotHistIndex);
}
mRotHist.add(mRotHistIndex++, hist);
mRotHistIndex %= mHistoryMaxLength;
}
private float findFacing()
{
if (DEBUG) {Log.d(TAG, "findFacing()");}
float[] averageRotHist = average(mRotHist);
return (float) Math.atan2(-averageRotHist[2], -averageRotHist[5]);
}
public float[] average(List<float[]> values)
{
float[] result = new float[9];
for (float[] value : values)
{
for (int i = 0; i < 9; i++)
{
result[i] += value[i];
}
}
for (int i = 0; i < 9; i++)
{
result[i] = result[i] / values.size();
}
return result;
}

Removing Noise from recorded accelerometer/gyroscope data

I have seen some answers to how reduce the noise of for example the accelerometer x,y,z values while listening, but my problem is a bit different.
I have some recorded data already (in csv-files) and I would like to remove/reduce the noise afterwards, if that's possible.
Here is the data that was recorded:
X,Y,Z from gyroscope
Delta 0-3 from gyroscope, which was calculated in this way:
axisX = 0;
axisY = 0;
axisZ = 0;
// This timestep's delta rotation to be multiplied by the
// current rotation
// after computing it from the gyro sample data.
if (timestamp != 0) {
final float dT = (event.timestamp - timestamp) * NS2S;
// Axis of the rotation sample, not normalized yet.
axisX = event.values[0];
axisY = event.values[1];
axisZ = event.values[2];
// Calculate the angular speed of the sample
float omegaMagnitude = FloatMath.sqrt(axisX * axisX + axisY
* axisY + axisZ * axisZ);
// Normalize the rotation vector if it's big enough to get
// the axis (that is, EPSILON should represent your maximum
// allowable margin of error)
if (omegaMagnitude > 0.000000001f) {
axisX /= omegaMagnitude;
axisY /= omegaMagnitude;
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the
// timestep in order to get a delta rotation from this
// sample over the timestep We will convert this axis-angle
// representation of the delta rotation into a quaternion
// before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = FloatMath.sin(thetaOverTwo);
float cosThetaOverTwo = FloatMath.cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
}
timestamp = event.timestamp;
float[] deltaRotationMatrix = new float[9];
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix,deltaRotationVector);
Pitch/Roll/Azimuth/Inclination, which was calculated in this way:
// Calculation of the orientation through the
// magnetic-field and accelerometer sensors.
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)
mGravity = event.values;
if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD)
mGeomagnetic = event.values;
if (mGravity != null && mGeomagnetic != null) {
float R[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(R, I, mGravity, mGeomagnetic);
if (success) {
float orientation[] = new float[3];
SensorManager.getOrientation(R, orientation);
// get the current orientation
// orientation consist of: azimut, pitch and roll in radians
azimut = orientation[0] * (180 / (float) java.lang.Math.PI);
pitch = orientation[1] * (180 / (float) java.lang.Math.PI);
roll = orientation[2] * (180 / (float) java.lang.Math.PI);
inclination = SensorManager.getInclination(I) * (180 / (float) java.lang.Math.PI);
}
}
The X/Y/Z from accelerometer wasn't written in the files.
So my question is:
Can I remove the noise from this data?
Thanks in advance.
I do not know if it is too late for you, just write in case you still need it.
You can implement some kind of filter with it. Low pass filter is typical. Otherwise, try Complementary Filter.
Me personally I preferred Kalman Filter, although it is a bit computationally expensive.
Since you don't have the accelerometer recorded, and if I understand correctly what you use is the orientation. I would recommend converting the euler angles to quaternions representation and using averaging to smooth the data, this is not regular averaging, see below.
You can implement rolling window filter by averaging using this this matlab code example:
https://stackoverflow.com/a/29315869/6589074
All the best,
Lev

Using Android gyroscope instead of accelerometer. I find lots of bits and pieces, but no complete code

The Sensor Fusion video looks great, but there's no code:
http://www.youtube.com/watch?v=C7JQ7Rpwn2k&feature=player_detailpage#t=1315s
Here is my code which just uses accelerometer and compass. I also use a Kalman filter on the 3 orientation values, but that's too much code to show here. Ultimately, this works ok, but the result is either too jittery or too laggy depending on what I do with the results and how low I make the filtering factors.
/** Just accelerometer and magnetic sensors */
public abstract class SensorsListener2
implements
SensorEventListener
{
/** The lower this is, the greater the preference which is given to previous values. (slows change) */
private static final float accelFilteringFactor = 0.1f;
private static final float magFilteringFactor = 0.01f;
public abstract boolean getIsLandscape();
#Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
int type = sensor.getType();
switch (type) {
case Sensor.TYPE_MAGNETIC_FIELD:
mags[0] = event.values[0] * magFilteringFactor + mags[0] * (1.0f - magFilteringFactor);
mags[1] = event.values[1] * magFilteringFactor + mags[1] * (1.0f - magFilteringFactor);
mags[2] = event.values[2] * magFilteringFactor + mags[2] * (1.0f - magFilteringFactor);
isReady = true;
break;
case Sensor.TYPE_ACCELEROMETER:
accels[0] = event.values[0] * accelFilteringFactor + accels[0] * (1.0f - accelFilteringFactor);
accels[1] = event.values[1] * accelFilteringFactor + accels[1] * (1.0f - accelFilteringFactor);
accels[2] = event.values[2] * accelFilteringFactor + accels[2] * (1.0f - accelFilteringFactor);
break;
default:
return;
}
if(mags != null && accels != null && isReady) {
isReady = false;
SensorManager.getRotationMatrix(rot, inclination, accels, mags);
boolean isLandscape = getIsLandscape();
if(isLandscape) {
outR = rot;
} else {
// Remap the coordinates to work in portrait mode.
SensorManager.remapCoordinateSystem(rot, SensorManager.AXIS_X, SensorManager.AXIS_Z, outR);
}
SensorManager.getOrientation(outR, values);
double x180pi = 180.0 / Math.PI;
float azimuth = (float)(values[0] * x180pi);
float pitch = (float)(values[1] * x180pi);
float roll = (float)(values[2] * x180pi);
// In landscape mode swap pitch and roll and invert the pitch.
if(isLandscape) {
float tmp = pitch;
pitch = -roll;
roll = -tmp;
azimuth = 180 - azimuth;
} else {
pitch = -pitch - 90;
azimuth = 90 - azimuth;
}
onOrientationChanged(azimuth,pitch,roll);
}
}
private float[] mags = new float[3];
private float[] accels = new float[3];
private boolean isReady;
private float[] rot = new float[9];
private float[] outR = new float[9];
private float[] inclination = new float[9];
private float[] values = new float[3];
/**
Azimuth: angle between the magnetic north direction and the Y axis, around the Z axis (0 to 359). 0=North, 90=East, 180=South, 270=West
Pitch: rotation around X axis (-180 to 180), with positive values when the z-axis moves toward the y-axis.
Roll: rotation around Y axis (-90 to 90), with positive values when the x-axis moves toward the z-axis.
*/
public abstract void onOrientationChanged(float azimuth, float pitch, float roll);
}
I tried to figure out how to add gyroscope data, but I am just not doing it right. The google doc at http://developer.android.com/reference/android/hardware/SensorEvent.html shows some code to get a delta matrix from the gyroscope data. The idea seems to be that I'd crank down the filters for the accelerometer and magnetic sensors so that they were really stable. That would keep track of the long term orientation.
Then, I'd keep a history of the most recent N delta matrices from the gyroscope. Each time I got a new one I'd drop off the oldest one and multiply them all together to get a final matrix which I would multiply against the stable matrix returned by the accelerometer and magnetic sensors.
This doesn't seem to work. Or, at least, my implementation of it does not work. The result is far more jittery than just the accelerometer. Increasing the size of the gyroscope history actually increases the jitter which makes me think that I'm not calculating the right values from the gyroscope.
public abstract class SensorsListener3
implements
SensorEventListener
{
/** The lower this is, the greater the preference which is given to previous values. (slows change) */
private static final float kFilteringFactor = 0.001f;
private static final float magKFilteringFactor = 0.001f;
public abstract boolean getIsLandscape();
#Override
public void onSensorChanged(SensorEvent event) {
Sensor sensor = event.sensor;
int type = sensor.getType();
switch (type) {
case Sensor.TYPE_MAGNETIC_FIELD:
mags[0] = event.values[0] * magKFilteringFactor + mags[0] * (1.0f - magKFilteringFactor);
mags[1] = event.values[1] * magKFilteringFactor + mags[1] * (1.0f - magKFilteringFactor);
mags[2] = event.values[2] * magKFilteringFactor + mags[2] * (1.0f - magKFilteringFactor);
isReady = true;
break;
case Sensor.TYPE_ACCELEROMETER:
accels[0] = event.values[0] * kFilteringFactor + accels[0] * (1.0f - kFilteringFactor);
accels[1] = event.values[1] * kFilteringFactor + accels[1] * (1.0f - kFilteringFactor);
accels[2] = event.values[2] * kFilteringFactor + accels[2] * (1.0f - kFilteringFactor);
break;
case Sensor.TYPE_GYROSCOPE:
gyroscopeSensorChanged(event);
break;
default:
return;
}
if(mags != null && accels != null && isReady) {
isReady = false;
SensorManager.getRotationMatrix(rot, inclination, accels, mags);
boolean isLandscape = getIsLandscape();
if(isLandscape) {
outR = rot;
} else {
// Remap the coordinates to work in portrait mode.
SensorManager.remapCoordinateSystem(rot, SensorManager.AXIS_X, SensorManager.AXIS_Z, outR);
}
if(gyroUpdateTime!=0) {
matrixHistory.mult(matrixTmp,matrixResult);
outR = matrixResult;
}
SensorManager.getOrientation(outR, values);
double x180pi = 180.0 / Math.PI;
float azimuth = (float)(values[0] * x180pi);
float pitch = (float)(values[1] * x180pi);
float roll = (float)(values[2] * x180pi);
// In landscape mode swap pitch and roll and invert the pitch.
if(isLandscape) {
float tmp = pitch;
pitch = -roll;
roll = -tmp;
azimuth = 180 - azimuth;
} else {
pitch = -pitch - 90;
azimuth = 90 - azimuth;
}
onOrientationChanged(azimuth,pitch,roll);
}
}
private void gyroscopeSensorChanged(SensorEvent event) {
// This timestep's delta rotation to be multiplied by the current rotation
// after computing it from the gyro sample data.
if(gyroUpdateTime != 0) {
final float dT = (event.timestamp - gyroUpdateTime) * NS2S;
// Axis of the rotation sample, not normalized yet.
float axisX = event.values[0];
float axisY = event.values[1];
float axisZ = event.values[2];
// Calculate the angular speed of the sample
float omegaMagnitude = (float)Math.sqrt(axisX*axisX + axisY*axisY + axisZ*axisZ);
// Normalize the rotation vector if it's big enough to get the axis
if(omegaMagnitude > EPSILON) {
axisX /= omegaMagnitude;
axisY /= omegaMagnitude;
axisZ /= omegaMagnitude;
}
// Integrate around this axis with the angular speed by the timestep
// in order to get a delta rotation from this sample over the timestep
// We will convert this axis-angle representation of the delta rotation
// into a quaternion before turning it into the rotation matrix.
float thetaOverTwo = omegaMagnitude * dT / 2.0f;
float sinThetaOverTwo = (float)Math.sin(thetaOverTwo);
float cosThetaOverTwo = (float)Math.cos(thetaOverTwo);
deltaRotationVector[0] = sinThetaOverTwo * axisX;
deltaRotationVector[1] = sinThetaOverTwo * axisY;
deltaRotationVector[2] = sinThetaOverTwo * axisZ;
deltaRotationVector[3] = cosThetaOverTwo;
}
gyroUpdateTime = event.timestamp;
SensorManager.getRotationMatrixFromVector(deltaRotationMatrix, deltaRotationVector);
// User code should concatenate the delta rotation we computed with the current rotation
// in order to get the updated rotation.
// rotationCurrent = rotationCurrent * deltaRotationMatrix;
matrixHistory.add(deltaRotationMatrix);
}
private float[] mags = new float[3];
private float[] accels = new float[3];
private boolean isReady;
private float[] rot = new float[9];
private float[] outR = new float[9];
private float[] inclination = new float[9];
private float[] values = new float[3];
// gyroscope stuff
private long gyroUpdateTime = 0;
private static final float NS2S = 1.0f / 1000000000.0f;
private float[] deltaRotationMatrix = new float[9];
private final float[] deltaRotationVector = new float[4];
//TODO: I have no idea how small this value should be.
private static final float EPSILON = 0.000001f;
private float[] matrixMult = new float[9];
private MatrixHistory matrixHistory = new MatrixHistory(100);
private float[] matrixTmp = new float[9];
private float[] matrixResult = new float[9];
/**
Azimuth: angle between the magnetic north direction and the Y axis, around the Z axis (0 to 359). 0=North, 90=East, 180=South, 270=West
Pitch: rotation around X axis (-180 to 180), with positive values when the z-axis moves toward the y-axis.
Roll: rotation around Y axis (-90 to 90), with positive values when the x-axis moves toward the z-axis.
*/
public abstract void onOrientationChanged(float azimuth, float pitch, float roll);
}
public class MatrixHistory
{
public MatrixHistory(int size) {
vals = new float[size][];
}
public void add(float[] val) {
synchronized(vals) {
vals[ix] = val;
ix = (ix + 1) % vals.length;
if(ix==0)
full = true;
}
}
public void mult(float[] tmp, float[] output) {
synchronized(vals) {
if(full) {
for(int i=0; i<vals.length; ++i) {
if(i==0) {
System.arraycopy(vals[i],0,output,0,vals[i].length);
} else {
MathUtils.multiplyMatrix3x3(output,vals[i],tmp);
System.arraycopy(tmp,0,output,0,tmp.length);
}
}
} else {
if(ix==0)
return;
for(int i=0; i<ix; ++i) {
if(i==0) {
System.arraycopy(vals[i],0,output,0,vals[i].length);
} else {
MathUtils.multiplyMatrix3x3(output,vals[i],tmp);
System.arraycopy(tmp,0,output,0,tmp.length);
}
}
}
}
}
private int ix = 0;
private boolean full = false;
private float[][] vals;
}
The second block of code contains my changes from the first block of code which add the gyroscope to the mix.
Specifically, the filtering factor for accel is made smaller (making the value more stable). The MatrixHistory class keeps track of the last 100 gyroscope deltaRotationMatrix values which are calculated in the gyroscopeSensorChanged method.
I've seen many questions on this site on this topic. They've helped me get to this point, but I cannot figure out what to do next. I really wish the Sensor Fusion guy had just posted some code somewhere. He obviously had it all put together.
Well, +1 to you for even knowing what a Kalman filter is. If you'd like, I'll edit this post and give you the code I wrote a couple years ago to do what you're trying to do.
But first, I'll tell you why you don't need it.
Modern implementations of the Android sensor stack use Sensor Fusion, as Stan mentioned above. This just means that all of the available data -- accel, mag, gyro -- is collected together in one algorithm, and then all the outputs are read back out in the form of Android sensors.
Edit: I just stumbled on this superb Google Tech Talk on the subject: Sensor Fusion on Android Devices: A Revolution in Motion Processing. Well worth the 45 minutes to watch it if you're interested in the topic.
In essence, Sensor Fusion is a black box. I've looked into the source code of the Android implementation, and it's a big Kalman filter written in C++. Some pretty good code in there, and far more sophisticated than any filter I ever wrote, and probably more sophisticated that what you're writing. Remember, these guys are doing this for a living.
I also know that at least one chipset manufacturer has their own sensor fusion implementation. The manufacturer of the device then chooses between the Android and the vendor implementation based on their own criteria.
Finally, as Stan mentioned above, Invensense has their own sensor fusion implementation at the chip level.
Anyway, what it all boils down to is that the built-in sensor fusion in your device is likely to be superior to anything you or I could cobble together. So what you really want to do is to access that.
In Android, there are both physical and virtual sensors. The virtual sensors are the ones that are synthesized from the available physical sensors. The best-known example is TYPE_ORIENTATION which takes accelerometer and magnetometer and creates roll/pitch/heading output. (By the way, you should not use this sensor; it has too many limitations.)
But the important thing is that newer versions of Android contain these two new virtual sensors:
TYPE_GRAVITY is the accelerometer input with the effect of motion filtered out
TYPE_LINEAR_ACCELERATION is the accelerometer with the gravity component filtered out.
These two virtual sensors are synthesized through a combination of accelerometer input and gyro input.
Another notable sensor is TYPE_ROTATION_VECTOR which is a Quaternion synthesized from accelerometer, magnetometer, and gyro. It represents the full 3-d orientation of the device with the effects of linear acceleration filtered out.
However, Quaternions are a little bit abstract for most people, and since you're likely working with 3-d transformations anyway, your best approach is to combine TYPE_GRAVITY and TYPE_MAGNETIC_FIELD via SensorManager.getRotationMatrix().
One more point: if you're working with a device running an older version of Android, you need to detect that you're not receiving TYPE_GRAVITY events and use TYPE_ACCELEROMETER instead. Theoretically, this would be a place to use your own kalman filter, but if your device doesn't have sensor fusion built in, it probably doesn't have gyros either.
Anyway, here's some sample code to show how I do it.
// Requires 1.5 or above
class Foo extends Activity implements SensorEventListener {
SensorManager sensorManager;
float[] gData = new float[3]; // Gravity or accelerometer
float[] mData = new float[3]; // Magnetometer
float[] orientation = new float[3];
float[] Rmat = new float[9];
float[] R2 = new float[9];
float[] Imat = new float[9];
boolean haveGrav = false;
boolean haveAccel = false;
boolean haveMag = false;
onCreate() {
// Get the sensor manager from system services
sensorManager =
(SensorManager)getSystemService(Context.SENSOR_SERVICE);
}
onResume() {
super.onResume();
// Register our listeners
Sensor gsensor = sensorManager.getDefaultSensor(Sensor.TYPE_GRAVITY);
Sensor asensor = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
Sensor msensor = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
sensorManager.registerListener(this, gsensor, SensorManager.SENSOR_DELAY_GAME);
sensorManager.registerListener(this, asensor, SensorManager.SENSOR_DELAY_GAME);
sensorManager.registerListener(this, msensor, SensorManager.SENSOR_DELAY_GAME);
}
public void onSensorChanged(SensorEvent event) {
float[] data;
switch( event.sensor.getType() ) {
case Sensor.TYPE_GRAVITY:
gData[0] = event.values[0];
gData[1] = event.values[1];
gData[2] = event.values[2];
haveGrav = true;
break;
case Sensor.TYPE_ACCELEROMETER:
if (haveGrav) break; // don't need it, we have better
gData[0] = event.values[0];
gData[1] = event.values[1];
gData[2] = event.values[2];
haveAccel = true;
break;
case Sensor.TYPE_MAGNETIC_FIELD:
mData[0] = event.values[0];
mData[1] = event.values[1];
mData[2] = event.values[2];
haveMag = true;
break;
default:
return;
}
if ((haveGrav || haveAccel) && haveMag) {
SensorManager.getRotationMatrix(Rmat, Imat, gData, mData);
SensorManager.remapCoordinateSystem(Rmat,
SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, R2);
// Orientation isn't as useful as a rotation matrix, but
// we'll show it here anyway.
SensorManager.getOrientation(R2, orientation);
float incl = SensorManager.getInclination(Imat);
Log.d(TAG, "mh: " + (int)(orientation[0]*DEG));
Log.d(TAG, "pitch: " + (int)(orientation[1]*DEG));
Log.d(TAG, "roll: " + (int)(orientation[2]*DEG));
Log.d(TAG, "yaw: " + (int)(orientation[0]*DEG));
Log.d(TAG, "inclination: " + (int)(incl*DEG));
}
}
}
Hmmm; if you happen to have a Quaternion library handy, it's probably simpler just to receive TYPE_ROTATION_VECTOR and convert that to an array.
To the question where to find complete code, here's a default implementation on Android jelly bean: https://android.googlesource.com/platform/frameworks/base/+/jb-release/services/sensorservice/
Start by checking the fusion.cpp/h.
It uses Modified Rodrigues Parameters (close to Euler angles) instead of quaternions. In addition to orientation the Kalman filter estimates gyro drift. For measurement updates it uses magnetometer and, a bit incorrectly, acceleration (specific force).
To make use of the code you should either be a wizard or know the basics of INS and KF. Many parameters have to be fine-tuned for the filter to work. As Edward adequately put, these guys are doing this for living.
At least in google's galaxy nexus this default implementation is left unused and is overridden by Invense's proprietary system.

Categories

Resources