camerasource.takePicture() save rotated images in some device - android

I am using vision api for tracking face. I applied a mask on the basis of face position.When i take a picture from front camera i call camerasource.takePicture() to save images.I am facing issue of image rotation in some device like samsung and capture image shows mask and face in different different position.I use Exif class to get orientation of images but it always return 0 so i am unable to rotate the image.
I am using following class to getOrientation and rotate image.
public class ExifUtils {
public Bitmap rotateBitmap(String src, Bitmap bitmap) {
try {
int orientation = getExifOrientation(src);
if (orientation == 1) {
return bitmap;
}
Matrix matrix = new Matrix();
switch (orientation) {
case 2:
matrix.setScale(-1, 1);
break;
case 3:
matrix.setRotate(180);
break;
case 4:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case 5:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case 6:
matrix.setRotate(90);
break;
case 7:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case 8:
matrix.setRotate(-90);
break;
default:
return bitmap;
}
try {
Bitmap oriented = Bitmap.createBitmap(bitmap, 0, 0,
bitmap.getWidth(), bitmap.getHeight(), matrix, true);
bitmap.recycle();
return oriented;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return bitmap;
}
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}
private int getExifOrientation(String src) throws IOException {
int orientation = 1;
try {
if (Build.VERSION.SDK_INT >= 5) {
Class<?> exifClass = Class
.forName("android.media.ExifInterface");
Constructor<?> exifConstructor = exifClass
.getConstructor(new Class[]{String.class});
Object exifInstance = exifConstructor
.newInstance(new Object[]{src});
Method getAttributeInt = exifClass.getMethod("getAttributeInt",
new Class[]{String.class, int.class});
Field tagOrientationField = exifClass
.getField("TAG_ORIENTATION");
String tagOrientation = (String) tagOrientationField.get(null);
orientation = (Integer) getAttributeInt.invoke(exifInstance,
new Object[]{tagOrientation, 1});
}
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (Fragment.InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (java.lang.InstantiationException e) {
e.printStackTrace();
}
return orientation;
}
}
I found this issue in vision api is there any solution.

I solve my problem myself. I get orientation from byte data then rotate my images according to orientation.
private CameraSource.PictureCallback mPicture = new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
switch(orientation) {
case 90:
bitmapPicture= rotateImage(bitmap, 90);
break;
case 180:
bitmapPicture= rotateImage(bitmap, 180);
break;
case 270:
bitmapPicture= rotateImage(bitmap, 270);
break;
case 0:
// if orientation is zero we don't need to rotate this
default:
break;
}
//write your code here to save bitmap
}
}
};
public static Bitmap rotateImage(Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix,
true);
}
Below class is used to get orientation from byte[] data.
public class Exif {
private static final String TAG = "CameraExif";
// Returns the degrees in clockwise. Values are 0, 90, 180, or 270.
public static int getOrientation(byte[] jpeg) {
if (jpeg == null) {
return 0;
}
int offset = 0;
int length = 0;
// ISO/IEC 10918-1:1993(E)
while (offset + 3 < jpeg.length && (jpeg[offset++] & 0xFF) == 0xFF) {
int marker = jpeg[offset] & 0xFF;
// Check if the marker is a padding.
if (marker == 0xFF) {
continue;
}
offset++;
// Check if the marker is SOI or TEM.
if (marker == 0xD8 || marker == 0x01) {
continue;
}
// Check if the marker is EOI or SOS.
if (marker == 0xD9 || marker == 0xDA) {
break;
}
// Get the length and check if it is reasonable.
length = pack(jpeg, offset, 2, false);
if (length < 2 || offset + length > jpeg.length) {
Log.e(TAG, "Invalid length");
return 0;
}
// Break if the marker is EXIF in APP1.
if (marker == 0xE1 && length >= 8 &&
pack(jpeg, offset + 2, 4, false) == 0x45786966 &&
pack(jpeg, offset + 6, 2, false) == 0) {
offset += 8;
length -= 8;
break;
}
// Skip other markers.
offset += length;
length = 0;
}
// JEITA CP-3451 Exif Version 2.2
if (length > 8) {
// Identify the byte order.
int tag = pack(jpeg, offset, 4, false);
if (tag != 0x49492A00 && tag != 0x4D4D002A) {
Log.e(TAG, "Invalid byte order");
return 0;
}
boolean littleEndian = (tag == 0x49492A00);
// Get the offset and check if it is reasonable.
int count = pack(jpeg, offset + 4, 4, littleEndian) + 2;
if (count < 10 || count > length) {
Log.e(TAG, "Invalid offset");
return 0;
}
offset += count;
length -= count;
// Get the count and go through all the elements.
count = pack(jpeg, offset - 2, 2, littleEndian);
while (count-- > 0 && length >= 12) {
// Get the tag and check if it is orientation.
tag = pack(jpeg, offset, 2, littleEndian);
if (tag == 0x0112) {
// We do not really care about type and count, do we?
int orientation = pack(jpeg, offset + 8, 2, littleEndian);
switch (orientation) {
case 1:
return 0;
case 3:
return 180;
case 6:
return 90;
case 8:
return 270;
}
Log.i(TAG, "Unsupported orientation");
return 0;
}
offset += 12;
length -= 12;
}
}
Log.i(TAG, "Orientation not found");
return 0;
}
private static int pack(byte[] bytes, int offset, int length,
boolean littleEndian) {
int step = 1;
if (littleEndian) {
offset += length - 1;
step = -1;
}
int value = 0;
while (length-- > 0) {
value = (value << 8) | (bytes[offset] & 0xFF);
offset += step;
}
return value;
}
}

I have encountered similar problem with Samsung devices, ExifInterface doesn't seem to work correctly with images saved by them. In order to solve the problem I used code from Glide image library, it seems to handle checking original image rotation correctly.
Check out this link: Glide source
getOrientation method from there seems to do the job most of the time.

Sounds like an issue with Exif tags to me. Basically, modern cameras save images in the same orientation, but also save a tag that tells you, what the original orientation was.
You could use Exif Interface, which comes bundled with the java api. I prefer Alessandro Crugnola's Android-Exif-Interface library, which doesn't require you to keep filepaths around
How I used Android-Exif-Interface in my project:
ExifInterface exif = new ExifInterface();
Matrix matrix = new Matrix();
try {
exif.readExif(context.getContentResolver().openInputStream(fileUri), ExifInterface.Options.OPTION_ALL);
ExifTag tag = exif.getTag(ExifInterface.TAG_ORIENTATION);
int orientation = tag.getValueAsInt(1);
switch (orientation) {
case 3: /* 180° */
matrix.postRotate(180);
break;
case 6: /* 90° */
matrix.postRotate(90);
break;
case 8: /* 270° */
matrix.postRotate(-90);
break;
}
} catch (IOException e) {
Log.i("INFO","expected behaviour: IOException");
//not every picture comes from the phone, should that be the case,
// we can't get exif tags anyway, since those aren't being transmitted
// via http (atleast I think so. I'd need to save the picture on the SD card to
// confirm that and I don't want to do that)
} catch(NullPointerException e){
Log.i("INFO","expected behaviour: NullPointerException");
//same as above, not every picture comes from the phone
}

In many cases, the pictureCallback() receives a Jpeg with undefined orientation tag. But you can calculate the actual device orientation either by looking at the display rotation, or running an orientation listener, as for Camera.takePicture returns a rotated byteArray.

Related

Camera Source (Google Mobile Vision) returns rotated image on some devices

I have the open sourced code for the Google Mobile Vision - CameraSource and this is the method I call to click a photo : cameraSource.takePicture();
In the open sourced version of CameraSource.java, the method for determining screen orientation is the stock one:
private void setRotation(Camera camera, Camera.Parameters parameters, int cameraId) {
WindowManager windowManager =
(WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
int degrees = 0;
int rotation = windowManager.getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
Log.e(TAG, "Bad rotation value: " + rotation);
}
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(cameraId, cameraInfo);
int angle;
int displayAngle;
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
angle = (cameraInfo.orientation + degrees) % 360;
displayAngle = (360 - angle) % 360; // compensate for it being mirrored
} else { // back-facing
angle = (cameraInfo.orientation - degrees + 360) % 360;
displayAngle = angle;
}
// This corresponds to the rotation constants in {#link Frame}.
mRotation = angle / 90;
camera.setDisplayOrientation(displayAngle);
parameters.setRotation(angle);
}
Here, the displayAngle and the angle are the same for Samsung, Lenovo and Yuntab H8. But the bitmap returned for backCamera is rotated differently in each of the device. I have to manually rotate the bitmap for each of the devices (Samsung : 90, Lenovo : 0 and Yuntab : 180)
My requirement is that onPictureTaken should return a bitmap which matches the current display orientation. I am looking into this, since a long time but yet have to figure a way to the solution. Here below is my onPicturetaken() (called after taking picture):
#Override
public void onPictureTaken(byte[] bytes) {
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, currentCameraId == 0 ? options : null);
}catch (Exception ex){
ex.printStackTrace();
Log.e("PictureTaken",ex.toString());
}
};
You should rotate the image when it is already saved in the device.
Then you can rotate it to match the position it was when the photo was taken.
Example code (It might need some cleanup and improvement, but it works...):
Method do calculate the rotation an image has:
private static int rotationNeeded(String path) {
try {
File file = new File(path);
if (!file.getName().contains(".jpg")) {
return 0;
}
ExifInterface exif = new ExifInterface(file.getAbsolutePath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
return 270;
}
if (orientation == ExifInterface.ORIENTATION_ROTATE_180) {
return 180;
}
if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
return 90;
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return 0;
}
Apply the rotation needed to the image:
public static void rotateImage(String filePath) {
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
//check image rotation
int rotate = rotationNeeded(filePath);
if (rotate != 0) {
//rotate image if needed
Matrix matrix = new Matrix();
matrix.postRotate(rotate);
Bitmap rotatedImage = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
bitmap.getHeight(), matrix, true);
bitmap.recycle();
bitmap = rotatedImage;
//save image
byte[] dataPicture = bao.toByteArray();
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(dataPicture);
fos.flush();
fos.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}

Take picture with drawable/paint on face using vision api

What I am trying?
I am trying to take picture with drawable/paint on face but, i am not able to get both on same picture.
What I have tried?
I have tried using CameraSource.takePicture but i am just getting face without any drawable/paint on it.
mCameraSource.takePicture(shutterCallback, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
try {
String mainpath = getExternalStorageDirectory() + separator + "TestXyz" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
stream.write(bytes);
stream.flush();
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
});
I also tried using :
mPreview.setDrawingCacheEnabled(true);
Bitmap drawingCache = mPreview.getDrawingCache();
try {
String mainpath = getExternalStorageDirectory() + separator + "TestXyz" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
drawingCache.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
in this case i am only getting what i draw on face. Here, mPreview is the CameraSourcePreview.
Just added capture button and added above code in this google example.
You are very close to achieve what you need :)
You have:
An image from the Camera of the face (First code snippet)
An image from the Canvas of the eyes overlay (Second code snippet)
What you need:
An image that has the face with the eyes overlay on top - A merged image.
How to merge?
To merge 2 images simply use a canvas, like so:
public Bitmap mergeBitmaps(Bitmap face, Bitmap overlay) {
// Create a new image with target size
int width = face.getWidth();
int height = face.getHeight();
Bitmap newBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Rect faceRect = new Rect(0,0,width,height);
Rect overlayRect = new Rect(0,0,overlay.getWidth(),overlay.getHeight());
// Draw face and then overlay (Make sure rects are as needed)
Canvas canvas = new Canvas(newBitmap);
canvas.drawBitmap(face, faceRect, faceRect, null);
canvas.drawBitmap(overlay, overlayRect, faceRect, null);
return newBitmap
}
Then you can save the new image, as you are doing now.
Full code would look like:
mCameraSource.takePicture(shutterCallback, new
CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
// Generate the Face Bitmap
BitmapFactory.Options options = new BitmapFactory.Options();
Bitmap face = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, options);
// Generate the Eyes Overlay Bitmap
mPreview.setDrawingCacheEnabled(true);
Bitmap overlay = mPreview.getDrawingCache();
// Generate the final merged image
Bitmap result = mergeBitmaps(face, overlay);
// Save result image to file
try {
String mainpath = getExternalStorageDirectory() + separator + "TestXyz" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
result.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
});
Note that the above is just an example code.
You should probably move the merging and saving to a file to a background thread.
I am able to capture image with drawable/paint on it by below solution :
private void captureImage() {
mPreview.setDrawingCacheEnabled(true);
Bitmap drawingCache = mPreview.getDrawingCache();
mCameraSource.takePicture(shutterCallback, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap picture = rotateImage(temp,orientation);
Bitmap overlay = Bitmap.createBitmap(mGraphicOverlay.getWidth(),mGraphicOverlay.getHeight(),picture.getConfig());
Canvas canvas = new Canvas(overlay);
Matrix matrix = new Matrix();
matrix.setScale((float)overlay.getWidth()/(float)picture.getWidth(),(float)overlay.getHeight()/(float)picture.getHeight());
// mirror by inverting scale and translating
matrix.preScale(-1, 1);
matrix.postTranslate(canvas.getWidth(), 0);
Paint paint = new Paint();
canvas.drawBitmap(picture,matrix,paint);
canvas.drawBitmap(drawingCache,0,0,paint);
try {
String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator;
File basePath = new File(mainpath);
if (!basePath.exists())
Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed");
String path = mainpath + "photo_" + getPhotoTime() + ".jpg";
File captureFile = new File(path);
captureFile.createNewFile();
if (!captureFile.exists())
Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed");
FileOutputStream stream = new FileOutputStream(captureFile);
overlay.compress(Bitmap.CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
picture.recycle();
drawingCache.recycle();
mPreview.setDrawingCacheEnabled(false);
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
Sometimes orientation issue also occurs on some devices. For that i used Exif class and rotateImage() function.
Exif Class (reference from here) :
public class Exif {
private static final String TAG = "CameraExif";
// Returns the degrees in clockwise. Values are 0, 90, 180, or 270.
public static int getOrientation(byte[] jpeg) {
if (jpeg == null) {
return 0;
}
int offset = 0;
int length = 0;
// ISO/IEC 10918-1:1993(E)
while (offset + 3 < jpeg.length && (jpeg[offset++] & 0xFF) == 0xFF) {
int marker = jpeg[offset] & 0xFF;
// Check if the marker is a padding.
if (marker == 0xFF) {
continue;
}
offset++;
// Check if the marker is SOI or TEM.
if (marker == 0xD8 || marker == 0x01) {
continue;
}
// Check if the marker is EOI or SOS.
if (marker == 0xD9 || marker == 0xDA) {
break;
}
// Get the length and check if it is reasonable.
length = pack(jpeg, offset, 2, false);
if (length < 2 || offset + length > jpeg.length) {
Log.e(TAG, "Invalid length");
return 0;
}
// Break if the marker is EXIF in APP1.
if (marker == 0xE1 && length >= 8 &&
pack(jpeg, offset + 2, 4, false) == 0x45786966 &&
pack(jpeg, offset + 6, 2, false) == 0) {
offset += 8;
length -= 8;
break;
}
// Skip other markers.
offset += length;
length = 0;
}
// JEITA CP-3451 Exif Version 2.2
if (length > 8) {
// Identify the byte order.
int tag = pack(jpeg, offset, 4, false);
if (tag != 0x49492A00 && tag != 0x4D4D002A) {
Log.e(TAG, "Invalid byte order");
return 0;
}
boolean littleEndian = (tag == 0x49492A00);
// Get the offset and check if it is reasonable.
int count = pack(jpeg, offset + 4, 4, littleEndian) + 2;
if (count < 10 || count > length) {
Log.e(TAG, "Invalid offset");
return 0;
}
offset += count;
length -= count;
// Get the count and go through all the elements.
count = pack(jpeg, offset - 2, 2, littleEndian);
while (count-- > 0 && length >= 12) {
// Get the tag and check if it is orientation.
tag = pack(jpeg, offset, 2, littleEndian);
if (tag == 0x0112) {
// We do not really care about type and count, do we?
int orientation = pack(jpeg, offset + 8, 2, littleEndian);
switch (orientation) {
case 1:
return 0;
case 3:
return 3;
case 6:
return 6;
case 8:
return 8;
}
Log.i(TAG, "Unsupported orientation");
return 0;
}
offset += 12;
length -= 12;
}
}
Log.i(TAG, "Orientation not found");
return 0;
}
private static int pack(byte[] bytes, int offset, int length,
boolean littleEndian) {
int step = 1;
if (littleEndian) {
offset += length - 1;
step = -1;
}
int value = 0;
while (length-- > 0) {
value = (value << 8) | (bytes[offset] & 0xFF);
offset += step;
}
return value;
}
}
rotateImage function :
private Bitmap rotateImage(Bitmap bm, int i) {
Matrix matrix = new Matrix();
switch (i) {
case ExifInterface.ORIENTATION_NORMAL:
return bm;
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
matrix.setScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.setRotate(180);
break;
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
matrix.setRotate(180);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_TRANSPOSE:
matrix.setRotate(90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.setRotate(90);
break;
case ExifInterface.ORIENTATION_TRANSVERSE:
matrix.setRotate(-90);
matrix.postScale(-1, 1);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.setRotate(-90);
break;
default:
return bm;
}
try {
Bitmap bmRotated = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
bm.recycle();
return bmRotated;
} catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
}
You can achieve the effect that you want by breaking it into smaller steps.
Take the picture
Send the bitmap to Google Mobile Vision to detect the "landmarks" in the face and the probability that each eye is open
Paint the appropriate "eyes" onto your image
When using Google Mobile Vision's FaceDetector, you'll get back a SparseArray of Face objects (which may contain more than one face, or which may be empty). So you'll need to handle these cases. But you can loop through the SparseArray and find the Face object that you want to play with.
static Bitmap processFaces(Context context, Bitmap picture) {
// Create a "face detector" object, using the builder pattern
FaceDetector detector = new FaceDetector.Builder(context)
.setTrackingEnabled(false) // disable tracking to improve performance
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
// create a "Frame" object, again using a builder pattern (and passing in our picture)
Frame frame = new Frame.Builder().setBitmap(picture).build(); // build frame
// get a sparse array of face objects
SparseArray<Face> faces = detector.detect(frame); // detect the faces
// This example just deals with a single face for the sake of simplicity,
// but you can change this to deal with multiple faces.
if (faces.size() != 1) return picture;
// make a mutable copy of the background image that we can modify
Bitmap bmOverlay = Bitmap.createBitmap(picture.getWidth(), picture.getHeight(), picture.getConfig());
Canvas canvas = new Canvas(bmOverlay);
canvas.drawBitmap(picture, 0, 0, null);
// get the Face object that we want to manipulate, and process it
Face face = faces.valueAt(0);
processFace(face, canvas);
detector.release();
return bmOverlay;
}
Once you've got a Face object, you can find the features that interest you like this
private static void processFace(Face face, Canvas canvas) {
// The Face object can tell you the probability that each eye is open.
// I'm comparing this probability to an arbitrary threshold of 0.6 here,
// but you can vary it between 0 and 1 as you please.
boolean leftEyeClosed = face.getIsLeftEyeOpenProbability() < .6;
boolean rightEyeClosed = face.getIsRightEyeOpenProbability() < .6;
// Loop through the face's "landmarks" (eyes, nose, etc) to find the eyes.
// landmark.getPosition() gives you the (x,y) coordinates of each feature.
for (Landmark landmark : face.getLandmarks()) {
if (landmark.getType() == Landmark.LEFT_EYE)
overlayEyeBitmap(canvas, leftEyeClosed, landmark.getPosition().x, landmark.getPosition().y);
if (landmark.getType() == Landmark.RIGHT_EYE)
overlayEyeBitmap(canvas, rightEyeClosed, landmark.getPosition().x, landmark.getPosition().y);
}
}
Then you can add your paint!
private static void overlayEyeBitmap(Canvas canvas, boolean eyeClosed, float cx, float cy) {
float radius = 40;
// draw the eye's background circle with appropriate color
Paint paintFill = new Paint();
paintFill.setStyle(Paint.Style.FILL);
if (eyeClosed)
paintFill.setColor(Color.YELLOW);
else
paintFill.setColor(Color.WHITE);
canvas.drawCircle(cx, cy, radius, paintFill);
// draw a black border around the eye
Paint paintStroke = new Paint();
paintStroke.setColor(Color.BLACK);
paintStroke.setStyle(Paint.Style.STROKE);
paintStroke.setStrokeWidth(5);
canvas.drawCircle(cx, cy, radius, paintStroke);
if (eyeClosed)
// draw horizontal line across closed eye
canvas.drawLine(cx - radius, cy, cx + radius, cy, paintStroke);
else {
// draw big off-center pupil on open eye
paintFill.setColor(Color.BLACK);
float cxPupil = cx - 10;
float cyPupil = cy + 10;
canvas.drawCircle(cxPupil, cyPupil, 25, paintFill);
}
}
In the snippet above, I just hardcoded the eye radii, to show proof of concept. You'll probably want to do some more flexible scaling, using some percentage of face.getWidth() to determine the appropriate values. But here's what this image processing can do:
Some more details about the Mobile Vision API are here, and Udacity's current Advanced Android course has a nice walkthrough of this stuff (taking a picture, sending it to Mobile Vision, and adding a bitmap onto it). The course is free, or you can just look at what they did on Github.

Android Picasso auto rotates image

I am using Picasso to load images from the web in my application. I have noticed that some images are shown rotated by 90degrees although when I open the image in my browser I see it correctly positioned. I assume that these images have EXIF data. Is there any way to instruct Picasso to ignore EXIF?
As we know, Picasso supports EXIF from local storage, this is done via Android inner Utils. Providing the same functionality can't be done easy due to ability to use custom Http loading libraries.
My solution is simple: we must override caching and apply Exif rotation before item is cached.
OkHttpClient client = new OkHttpClient.Builder()
.addNetworkInterceptor(chain -> {
Response originalResponse = chain.proceed(chain.request());
byte[] body = originalResponse.body().bytes();
ResponseBody newBody = ResponseBody
.create(originalResponse.body().contentType(), ImageUtils.processImage(body));
return originalResponse.newBuilder().body(newBody).build();
})
.cache(cache)
.build();
Here we add NetworkInterceptor that can transform request and response before it gets cached.
public class ImageUtils {
public static byte[] processImage(byte[] originalImg) {
int orientation = Exif.getOrientation(originalImg);
if (orientation != 0) {
Bitmap bmp = BitmapFactory.decodeByteArray(originalImg, 0, originalImg.length);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
rotateImage(orientation, bmp).compress(Bitmap.CompressFormat.PNG, 100, stream);
return stream.toByteArray();
}
return originalImg;
}
private static Bitmap rotateImage(int angle, Bitmap bitmapSrc) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(bitmapSrc, 0, 0,
bitmapSrc.getWidth(), bitmapSrc.getHeight(), matrix, true);
}
}
Exif transformation:
public class Exif {
private static final String TAG = "Exif";
// Returns the degrees in clockwise. Values are 0, 90, 180, or 270.
public static int getOrientation(byte[] jpeg) {
if (jpeg == null) {
return 0;
}
int offset = 0;
int length = 0;
// ISO/IEC 10918-1:1993(E)
while (offset + 3 < jpeg.length && (jpeg[offset++] & 0xFF) == 0xFF) {
int marker = jpeg[offset] & 0xFF;
// Check if the marker is a padding.
if (marker == 0xFF) {
continue;
}
offset++;
// Check if the marker is SOI or TEM.
if (marker == 0xD8 || marker == 0x01) {
continue;
}
// Check if the marker is EOI or SOS.
if (marker == 0xD9 || marker == 0xDA) {
break;
}
// Get the length and check if it is reasonable.
length = pack(jpeg, offset, 2, false);
if (length < 2 || offset + length > jpeg.length) {
Log.e(TAG, "Invalid length");
return 0;
}
// Break if the marker is EXIF in APP1.
if (marker == 0xE1 && length >= 8 &&
pack(jpeg, offset + 2, 4, false) == 0x45786966 &&
pack(jpeg, offset + 6, 2, false) == 0) {
offset += 8;
length -= 8;
break;
}
// Skip other markers.
offset += length;
length = 0;
}
// JEITA CP-3451 Exif Version 2.2
if (length > 8) {
// Identify the byte order.
int tag = pack(jpeg, offset, 4, false);
if (tag != 0x49492A00 && tag != 0x4D4D002A) {
Log.e(TAG, "Invalid byte order");
return 0;
}
boolean littleEndian = (tag == 0x49492A00);
// Get the offset and check if it is reasonable.
int count = pack(jpeg, offset + 4, 4, littleEndian) + 2;
if (count < 10 || count > length) {
Log.e(TAG, "Invalid offset");
return 0;
}
offset += count;
length -= count;
// Get the count and go through all the elements.
count = pack(jpeg, offset - 2, 2, littleEndian);
while (count-- > 0 && length >= 12) {
// Get the tag and check if it is orientation.
tag = pack(jpeg, offset, 2, littleEndian);
if (tag == 0x0112) {
// We do not really care about type and count, do we?
int orientation = pack(jpeg, offset + 8, 2, littleEndian);
switch (orientation) {
case 1:
return 0;
case 3:
return 180;
case 6:
return 90;
case 8:
return 270;
}
Log.i(TAG, "Unsupported orientation");
return 0;
}
offset += 12;
length -= 12;
}
}
Log.i(TAG, "Orientation not found");
return 0;
}
private static int pack(byte[] bytes, int offset, int length,
boolean littleEndian) {
int step = 1;
if (littleEndian) {
offset += length - 1;
step = -1;
}
int value = 0;
while (length-- > 0) {
value = (value << 8) | (bytes[offset] & 0xFF);
offset += step;
}
return value;
}
}
This solution is experimental and must be tested for leaks and probably improved. In most cases Samsung and iOs devices return 90 DEG rotation and this solution works. Other cases also must be tested.
Can you post the image you're using?
because as this thread said, exif orientation for images loaded from web is ignored(only content provider and local files).
I also try to display this image in picasso 2.5.2, the real orientation of the image is facing rightside(the bottom code in image is facing right). The exif orientation, is 90deg clockwise. Try open it in chrome(chrome is honoring exif rotation), the image will be faced down(bottom code in image is facing down).
based on #ph0en1x response this version use google exif library and kotlin: add this interceptor to okhttpclient used by picasso
addNetworkInterceptor {
val response = it.proceed(it.request())
val body = response.body
if (body?.contentType()?.type == "image") {
val bytes = body.bytes()
val degrees = bytes.inputStream().use { input ->
when (ExifInterface(input).getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL)) {
ExifInterface.ORIENTATION_ROTATE_270 -> 270
ExifInterface.ORIENTATION_ROTATE_180 -> 180
ExifInterface.ORIENTATION_ROTATE_90 -> 90
else -> 0
}
}
if (degrees != 0) {
val bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.size)
ByteArrayOutputStream().use { output ->
Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, Matrix().apply { postRotate(degrees.toFloat()) }, true)
.compress(Bitmap.CompressFormat.PNG, 100, output)
response.newBuilder().body(output.toByteArray().toResponseBody(body.contentType())).build()
}
} else
response.newBuilder().body(bytes.toResponseBody(body.contentType())).build()
} else
response
}

Android camera/picture orientation issues with Samsung Galaxy S3, S4, S5

I am developing a camera application for Android API 16 to 21 which main and only purpose is to take portrait photo. I am able to take picture with several devices (Nexus 4, Nexus 5, HTC...) and have them correctly oriented (meaning that my preview equals the taken picture both in size and orientation).
However I have tested my application on several other devices and some of them are giving me alot of trouble: Samsung Galaxy S3/S4/S5.
On these three devices, the preview is correctly displayed, however the pictures returned by the method onPictureTaken(final byte[] jpeg, Camera camera) are always sideways.
This is the Bitmap created from byte[] jpeg and displayed in the ImageView to my user just before saving it to the disk:
And here is the image once saved on the disk:
As you can see the image is completly stretched in the preview and wrongly rotated once saved on the disk.
Here is my CameraPreview class (I obfuscated other methods since they had nothing to do with camera parameters):
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback
{
private SurfaceHolder surfaceHolder;
private Camera camera;
// Removed unnecessary code
public void surfaceCreated(SurfaceHolder holder)
{
camera.setPreviewDisplay(holder);
setCameraParameters();
camera.startPreview();
}
private void setCameraParameters()
{
Camera.Parameters parameters = camera.getParameters();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, info);
DisplayMetrics metrics = new DisplayMetrics();
WindowManager windowManager = (WindowManager)getContext().getSystemService(Context.WINDOW_SERVICE);
windowManager.getDefaultDisplay().getMetrics(metrics);
int rotation = windowManager.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation)
{
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int rotate = (info.orientation - degrees + 360) % 360;
parameters.setRotation(rotate);
// Save Parameters
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
}
}
How come this exact piece of code works for other devices except Samsung's one ?
I tried to find answers on the following SO posts but nothing could help me so far:
this one and this other one.
EDIT
Implementing Joey Chong's answer does not changes anything:
public void onPictureTaken(final byte[] data, Camera camera)
{
try
{
File pictureFile = new File(...);
Bitmap realImage = BitmapFactory.decodeByteArray(data, 0, data.length);
FileOutputStream fos = new FileOutputStream(pictureFile);
realImage.compress(Bitmap.CompressFormat.JPEG, 100, fos);
int orientation = -1;
ExifInterface exif = new ExifInterface(pictureFile.toString());
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (exifOrientation)
{
case ExifInterface.ORIENTATION_ROTATE_270:
orientation = 270;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
orientation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_90:
orientation = 90;
break;
case ExifInterface.ORIENTATION_NORMAL:
orientation = 0;
break;
default:
break;
}
fos.close();
}
Here are the EXIF results I get for a working device:
Orientation: 0
And here the results for the S4:
Orientation: 0
It is because the phone still save in landscape and put the meta data as 90 degree.
You can try check the exif, rotate the bitmap before put in image view. To check exif, use something like below:
int orientation = -1;
ExifInterface exif = new ExifInterface(imagePath);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_270:
orientation = 270;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
orientation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_90:
orientation = 90;
break;
case ExifInterface.ORIENTATION_NORMAL:
orientation = 0;
break;
default:
break;
}
I had a similar problem regarding the saved image.
I used something similar to what is described here https://github.com/googlesamples/android-vision/issues/124 by user kinghsumit (the comment from Sep 15, 2016).
I'll copy it here, just in case.
private CameraSource.PictureCallback mPicture = new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
switch(orientation) {
case 90:
bitmapPicture= rotateImage(bitmap, 90);
break;
case 180:
bitmapPicture= rotateImage(bitmap, 180);
break;
case 270:
bitmapPicture= rotateImage(bitmap, 270);
break;
case 0:
// if orientation is zero we don't need to rotate this
default:
break;
}
//write your code here to save bitmap
}
}
public static Bitmap rotateImage(Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
Below class is used to get orientation from byte[] data.
public class Exif {
private static final String TAG = "CameraExif";
// Returns the degrees in clockwise. Values are 0, 90, 180, or 270.
public static int getOrientation(byte[] jpeg) {
if (jpeg == null) {
return 0;
}
int offset = 0;
int length = 0;
// ISO/IEC 10918-1:1993(E)
while (offset + 3 < jpeg.length && (jpeg[offset++] & 0xFF) == 0xFF) {
int marker = jpeg[offset] & 0xFF;
// Check if the marker is a padding.
if (marker == 0xFF) {
continue;
}
offset++;
// Check if the marker is SOI or TEM.
if (marker == 0xD8 || marker == 0x01) {
continue;
}
// Check if the marker is EOI or SOS.
if (marker == 0xD9 || marker == 0xDA) {
break;
}
// Get the length and check if it is reasonable.
length = pack(jpeg, offset, 2, false);
if (length < 2 || offset + length > jpeg.length) {
Log.e(TAG, "Invalid length");
return 0;
}
// Break if the marker is EXIF in APP1.
if (marker == 0xE1 && length >= 8 &&
pack(jpeg, offset + 2, 4, false) == 0x45786966 &&
pack(jpeg, offset + 6, 2, false) == 0) {
offset += 8;
length -= 8;
break;
}
// Skip other markers.
offset += length;
length = 0;
}
// JEITA CP-3451 Exif Version 2.2
if (length > 8) {
// Identify the byte order.
int tag = pack(jpeg, offset, 4, false);
if (tag != 0x49492A00 && tag != 0x4D4D002A) {
Log.e(TAG, "Invalid byte order");
return 0;
}
boolean littleEndian = (tag == 0x49492A00);
// Get the offset and check if it is reasonable.
int count = pack(jpeg, offset + 4, 4, littleEndian) + 2;
if (count < 10 || count > length) {
Log.e(TAG, "Invalid offset");
return 0;
}
offset += count;
length -= count;
// Get the count and go through all the elements.
count = pack(jpeg, offset - 2, 2, littleEndian);
while (count-- > 0 && length >= 12) {
// Get the tag and check if it is orientation.
tag = pack(jpeg, offset, 2, littleEndian);
if (tag == 0x0112) {
// We do not really care about type and count, do we?
int orientation = pack(jpeg, offset + 8, 2, littleEndian);
switch (orientation) {
case 1:
return 0;
case 3:
return 180;
case 6:
return 90;
case 8:
return 270;
}
Log.i(TAG, "Unsupported orientation");
return 0;
}
offset += 12;
length -= 12;
}
}
Log.i(TAG, "Orientation not found");
return 0;
}
private static int pack(byte[] bytes, int offset, int length, boolean littleEndian) {
int step = 1;
if (littleEndian) {
offset += length - 1;
step = -1;
}
int value = 0;
while (length-- > 0) {
value = (value << 8) | (bytes[offset] & 0xFF);
offset += step;
}
return value;
}
}
It worked for me, except for the Nexus 5x, but that's because that device has a peculiar issue due to its construction.
I hope this helps you!
I used this AndroidCameraUtil.
It helped me a lot on this issue.
You can try to use Camera parameters to fix rotation issue.
Camera.Parameters parameters = camera.getParameters();
parameters.set("orientation", "portrait");
parameters.setRotation(90);
camera.setParameters(parameters);

How to rotate and flip bitmap in onPictureTaken

I'm finding in onPictureTaken that the bitmap saved is mirrored about the y-axis and rotated 90 degrees clockwise even though the camera preview was not. This is on my Nexus S that's running 2.3.6. The same program running on my Nexus 4 with 4.2 has the resulting bitmap mirrored about the y-axis and rotated 180 degrees clockwise.
This is the code I'm running in onPictureTaken:
#Override
public void onPictureTaken(final byte[] data, Camera camera) {
Bitmap picture = BitmapFactory.decodeByteArray(data, 0, data.length);
String path = MediaStore.Images.Media.insertImage(getContentResolver(), picture, "name" , "description");
Log.e("tag", "path: " + path); // prints something like "path: content://media/external/images/media/819"
try {
ExifInterface exif = new ExifInterface(path); // prints this error: "04-25 21:28:21.063: E/JHEAD(12201): can't open 'content://media/external/images/media/819'"
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
Log.e("tag", "exif orientation: " + orientation); // this is outputting orientation unknown
} catch (IOException e) {
e.printStackTrace();
}
}
Can anyone show me how I rectify this given that I seem to be getting different results from different devices? How do I detect the orientation of the resulting bitmap so that I know to rotate it either 90 or 180 degrees counterclockwise?
[EDIT]
I added some more information using the ExifInterface stuff I've been reading about, but that information doesn't seem to pan out...
I have put a lot of work into this and thought, I'd share my solution.
It is tested on a Motorola Devy, Samsung Xcover 1 and Samsung XCover 2.
As I work with a custom camera preview, the solution basically has two
parts.
1. Take care of the camera preview and set rotation of the preview according
to device rotation.
2. Once a picture is taken, that is the 'onPictureTaken' callback is invoked
rotate the picture by the correct angle, such that it shows what the preview just
showed.
1
private void initPreview(int width, int height) {
if (camera != null && holder.getSurface() != null) {
try {
camera.setPreviewDisplay(holder);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast.makeText(getContext(), t.getMessage(),
Toast.LENGTH_LONG).show();
}
try {
Camera.Parameters parameters=camera.getParameters();
Camera.Size size=getBestPreviewSize(width, height, parameters);
Camera.Size pictureSize=getSmallestPictureSize(parameters);
Display display = windowManager.getDefaultDisplay();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO) { // for 2.1 and before
if (isPortrait(display)) {
parameters.set(CAMERA_PARAM_ORIENTATION, CAMERA_PARAM_PORTRAIT);
} else {
parameters.set(CAMERA_PARAM_ORIENTATION, CAMERA_PARAM_LANDSCAPE);
}
} else { // for 2.2 and later
switch (display.getRotation()) {
case Surface.ROTATION_0: // This is display orientation
if (size.height > size.width) parameters.setPreviewSize(size.height, size.width);
else parameters.setPreviewSize(size.width, size.height);
camera.setDisplayOrientation(90);
break;
case Surface.ROTATION_90:
if (size.height > size.width) parameters.setPreviewSize(size.height, size.width);
else parameters.setPreviewSize(size.width, size.height);
camera.setDisplayOrientation(0);
break;
case Surface.ROTATION_180:
if (size.height > size.width) parameters.setPreviewSize(size.height, size.width);
else parameters.setPreviewSize(size.width, size.height);
camera.setDisplayOrientation(270);
break;
case Surface.ROTATION_270:
if (size.height > size.width) parameters.setPreviewSize(size.height, size.width);
else parameters.setPreviewSize(size.width, size.height);
camera.setDisplayOrientation(180);
break;
}
}
parameters.setPictureSize(pictureSize.width, pictureSize.height);
//parameters.setPictureFormat(ImageFormat.JPEG);
camera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Your 'surfaceChanged' method, in your camera preview (SurfaceView),
you should look like this:
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
stopPreview();
initPreview(w, h);
startPreview();
}
where
stopPreview:
private void stopPreview() {
if (camera != null) {
camera.stopPreview();
}
}
startPreview:
private void startPreview() {
if (camera != null) {
camera.startPreview();
}
}
2
In your 'onPictureTaken' callback rotate the picture, using the following code:
Display display = getWindowManager().getDefaultDisplay();
int rotation = 0;
switch (display.getRotation()) {
case Surface.ROTATION_0: // This is display orientation
rotation = 90;
break;
case Surface.ROTATION_90:
rotation = 0;
break;
case Surface.ROTATION_180:
rotation = 270;
break;
case Surface.ROTATION_270:
rotation = 180;
break;
}
Bitmap bitmap = BitmapTools.toBitmap(data);
bitmap = BitmapTools.rotate(bitmap, rotation);
BitmapTools.java
public class BitmapTools {
public static Bitmap toBitmap(byte[] data) {
return BitmapFactory.decodeByteArray(data , 0, data.length);
}
public static Bitmap rotate(Bitmap in, int angle) {
Matrix mat = new Matrix();
mat.postRotate(angle);
return Bitmap.createBitmap(in, 0, 0, in.getWidth(), in.getHeight(), mat, true);
}
}
here you go check this out, save the picture and maybe this will work and remember if(bitmap.getWidth > bitmap.getHeight()) as another check
public static int getExifRotation(String imgPath)
{
try
{
ExifInterface exif = new ExifInterface(imgPath);
String rotationAmount = exif.getAttribute(ExifInterface.TAG_ORIENTATION);
if (!TextUtils.isEmpty(rotationAmount))
{
int rotationParam = Integer.parseInt(rotationAmount);
switch (rotationParam)
{
case ExifInterface.ORIENTATION_NORMAL:
return 0;
case ExifInterface.ORIENTATION_ROTATE_90:
return 90;
case ExifInterface.ORIENTATION_ROTATE_180:
return 180;
case ExifInterface.ORIENTATION_ROTATE_270:
return 270;
default:
return 0;
}
}
else
{
return 0;
}
}
catch (Exception ex)
{
return 0;
}
}
you must read about ExifInterface to resolve this issue.
i have this function in my application to check weather weather image taken from camera needs to be rotateed or not.
if(ExifNeedsRotate(GetPathFromUri(context, selectedImage))){
// Rotate your bitmap using the Matrix
}
public static boolean ExifNeedsRotate(String paramString){
if (android.os.Build.VERSION.SDK_INT >= 5){
try
{
Class localClass = Class.forName("android.media.ExifInterface");
Class[] arrayOfClass1 = new Class[1];
arrayOfClass1[0] = String.class;
Constructor localConstructor = localClass.getConstructor(arrayOfClass1);
Class[] arrayOfClass2 = new Class[1];
arrayOfClass2[0] = String.class;
Method localMethod = localClass.getMethod("getAttribute", arrayOfClass2);
Object[] arrayOfObject1 = new Object[1];
arrayOfObject1[0] = paramString;
Object localObject1 = localConstructor.newInstance(arrayOfObject1);
Object[] arrayOfObject2 = new Object[1];
arrayOfObject2[0] = "Orientation";
Object localObject2 = localMethod.invoke(localObject1, arrayOfObject2);
if (localObject2 != null){
boolean bool = localObject2.equals("6");
if (bool)
return true;
}
}
catch (Exception localException){
return false;
}
}
return false;
}
Pass the path of the ImageUri as a input.
public static String GetPathFromUri(Context paramContext, Uri paramUri)
{
String str;
try
{
if (paramUri.toString().startsWith("file:")){
str = paramUri.getPath();
}
else
{
str = null;
String[] arrayOfString = new String[1];
arrayOfString[0] = "_data";
Cursor localCursor = paramContext.getContentResolver().query(paramUri, arrayOfString, null, null, null);
if (localCursor != null)
{
localCursor.moveToFirst();
int i = localCursor.getColumnIndex(arrayOfString[0]);
if ((localCursor.getCount() >= 1) && (localCursor.getColumnCount() >= i + 1))
str = localCursor.getString(i);
localCursor.close();
}
}
}
catch (Exception localException){
str = null;
}
return str;
}
Instead of rotating the picture explicitly in the picture taken callback, you can configure the camera to have picture rotated for you when the picture is taken.
camera.SetDisplayOrientation(degrees) //sets the orientation in the preview
while
cameraParameters.SetRotation(degress) //rotates the actual captured image

Categories

Resources