Face Detection finds my face, then after 3 seconds the circle disappears. Only happens on some phones, so I am unsure why it is happening. My code is pretty boilerplate:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detect);
detector = new FaceDetector.Builder(getApplicationContext())
.setTrackingEnabled(false)
.setProminentFaceOnly(true)
.setMode(FaceDetector.FAST_MODE)
.setMinFaceSize((float) 0.60)
.setLandmarkType(FaceDetector.ALL_CLASSIFICATIONS)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
initViews();
}
private void initViews() {
imgTakePicture = (ImageView) findViewById(R.id.imgTakePic);
btnTakePicture = (Button) findViewById(R.id.btnTakePicture);
txtSampleDesc = (TextView) findViewById(R.id.txtSampleDescription);
txtTakenPicDesc = (TextView) findViewById(R.id.textView);
btnTakePicture.setOnClickListener(this);
imgTakePicture.setOnClickListener(this);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.d(TAG, "onActivityResult: this is resyult");
if (requestCode == CAMERA_REQUEST && resultCode == RESULT_OK) {
launchMediaScanIntent();
try {
processCameraPicture();
} catch (Exception e) {
Toast.makeText(getApplicationContext(), "Failed to load Image", Toast.LENGTH_SHORT).show();
}
}
}
private void launchMediaScanIntent() {
Log.d(TAG, "launchMediaScanIntent: ");
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanIntent.setData(imageUri);
this.sendBroadcast(mediaScanIntent);
}
private void startCamera() {
Log.d(TAG, "startCamera: ");
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
Log.d(TAG, "startCamera: 2");
File photo = new File(Environment.getExternalStorageDirectory(), "/videoDIARY/ReferencePic/photo.jpg");
imageUri = Uri.fromFile(photo);
intent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
startActivityForResult(intent, CAMERA_REQUEST);
}
EDIT: Ok, I have worked out this is all about the device orientation. Works fine on all devices in landscape mode, only on some devices in portrait mode. Still trying to work out why, will update when i fix!
Ok, so it turned out to have nothing to do with facial detection, and everything to do with how Android saves Camera Intent Images. Basically it gets the orientation confused, so you need to check the width vs the height, to make sure it is doing it right, and rotate it if not. Here is how I checked:
private Bitmap decodeBitmapUri(Context ctx, Uri uri) throws FileNotFoundException {
Log.d(TAG, "decodeBitmapUri: ");
//Toast.makeText(this, "1o" , Toast.LENGTH_LONG).show();
Log.d(TAG, "initViews1: face detector is ============================ " + detector.isOperational());
int targetW = 300;
int targetH = 300;
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
bmOptions.inPreferredConfig=Bitmap.Config.RGB_565;
BitmapFactory.decodeStream(ctx.getContentResolver().openInputStream(uri), null, bmOptions);
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, info);
int rotation = this.getWindowManager().getDefaultDisplay().getRotation();
int orientation = this.getResources().getConfiguration().orientation;
Log.d(TAG, "decodeBitmapUri: OREINTATION is ==================== " + orientation);
Log.d(TAG, "decodeBitmapUri: CAMERA ROTATION ========================= " + rotation);
//Camera.Size size = android.hardware.Camera.get
int photoW = bmOptions.outWidth;
Log.d(TAG, "decodeBitmapUri: width: " + photoW );
int photoH = bmOptions.outHeight;
Log.d(TAG, "decodeBitmapUri: height: " + photoH);
Log.d(TAG, "decodeBitmapUri: 4");
//Toast.makeText(this, "11" , Toast.LENGTH_LONG).show();
int scaleFactor = Math.min(photoW / targetW, photoH / targetH);
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
/*this is because some phones default a camera Intent to landscape no matter how the phone is held
* so we check for camera orienatation, then check to see if width is greater than height
* */
if(orientation == 1 && (photoW > photoH)){
return rotate(BitmapFactory.decodeStream(ctx.getContentResolver()
.openInputStream(uri), null, bmOptions));
}
return BitmapFactory.decodeStream(ctx.getContentResolver()
.openInputStream(uri), null, bmOptions);
}
public static Bitmap rotate(Bitmap bitmap){
int w = bitmap.getWidth();
int h = bitmap.getHeight();
Matrix mtx = new Matrix();
mtx.postRotate(270);
return Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, true);
}
Related
I capture photo with default camera by calling
Intent cameraIntent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent, ApplicationData.CAMERA_REQUEST);
Then I save the photo to sdcard and retrieve it and set it to ImageView
OutputStream output;
Bitmap photo = (Bitmap) data.getExtras().get("data");
output = new FileOutputStream(file);
photo.compress(Bitmap.CompressFormat.PNG, 100, output);
output.flush();
output.close();
mThumbnaiImagelLayout.setVisibility(View.VISIBLE);
mImageThumbNail.setImageBitmap(photo);
What I want in ImageView is exactly the same like thumbnail image of the device.For example:
But it become like this
So my question is:
1/How can I resize captured image?
2/Is any other way I can set image to ImageView without bitmap? Because when I set ImageView with Bitmap, it look terrible, like this (the image's size is 512x512)
Please help me to solve this problem.Thank you!
Try this
android:scaleType="fitCenter"
If you use Bundle extras = data.getExtras(); in your onActivityResult then it will return thumbnail image not actual image.
Here is code I have used for Capturing and Saving Camera Image then display it to imageview.
Here is method for opening capturing camera image activity.
private static final int CAMERA_PHOTO = 111;
private Uri imageToUploadUri;
private void captureCameraImage() {
Intent chooserIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
File f = new File(Environment.getExternalStorageDirectory(), "POST_IMAGE.jpg");
chooserIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f));
imageToUploadUri = Uri.fromFile(f);
startActivityForResult(chooserIntent, CAMERA_PHOTO);
}
then your onActivityResult() method should be like this.
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == CAMERA_PHOTO && resultCode == Activity.RESULT_OK) {
if(imageToUploadUri != null){
Uri selectedImage = imageToUploadUri;
getContentResolver().notifyChange(selectedImage, null);
Bitmap reducedSizeBitmap = getBitmap(imageToUploadUri.getPath());
if(reducedSizeBitmap != null){
imageview.setImageBitmap(reducedSizeBitmap);
}else{
Toast.makeText(this,"Error while capturing Image",Toast.LENGTH_LONG).show();
}
}else{
Toast.makeText(this,"Error while capturing Image",Toast.LENGTH_LONG).show();
}
}
}
Here is getBitmap() method used in onActivityResult().
private Bitmap getBitmap(String path) {
Uri uri = Uri.fromFile(new File(path));
InputStream in = null;
try {
final int IMAGE_MAX_SIZE = 1200000; // 1.2MP
in = getContentResolver().openInputStream(uri);
// Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, o);
in.close();
int scale = 1;
while ((o.outWidth * o.outHeight) * (1 / Math.pow(scale, 2)) >
IMAGE_MAX_SIZE) {
scale++;
}
Log.d("", "scale = " + scale + ", orig-width: " + o.outWidth + ", orig-height: " + o.outHeight);
Bitmap b = null;
in = getContentResolver().openInputStream(uri);
if (scale > 1) {
scale--;
// scale to max possible inSampleSize that still yields an image
// larger than target
o = new BitmapFactory.Options();
o.inSampleSize = scale;
b = BitmapFactory.decodeStream(in, null, o);
// resize to desired dimensions
int height = b.getHeight();
int width = b.getWidth();
Log.d("", "1th scale operation dimenions - width: " + width + ", height: " + height);
double y = Math.sqrt(IMAGE_MAX_SIZE
/ (((double) width) / height));
double x = (y / height) * width;
Bitmap scaledBitmap = Bitmap.createScaledBitmap(b, (int) x,
(int) y, true);
b.recycle();
b = scaledBitmap;
System.gc();
} else {
b = BitmapFactory.decodeStream(in);
}
in.close();
Log.d("", "bitmap size - width: " + b.getWidth() + ", height: " +
b.getHeight());
return b;
} catch (IOException e) {
Log.e("", e.getMessage(), e);
return null;
}
}
I'm developing an application that can display a photo from the camera using the camera intent using the extra crop option. The code is working fine in most of the devices but when i tried to test it in my brand new Galaxy Note3 it crashes and didn't run properly, Also the image taken is still huge in size "Almost 4 MB" which is so large to be displayed in the imageview. Can anyone point me if there is anyway to avoid this?
Hereunder my code :
Intent intent = new Intent(
"android.media.action.IMAGE_CAPTURE");
file = getOutputMediaFile();
intent.putExtra("crop", "true");
intent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(file));
intent.putExtra("outputFormat",
Bitmap.CompressFormat.JPEG
.toString());
intent.putExtra(
MediaStore.EXTRA_SCREEN_ORIENTATION,
ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
startActivityForResult(intent,
ACTION_REQUEST_CAMERA);
and for activityforresult
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
switch (requestCode) {
case ACTION_REQUEST_CAMERA:
if (data != null) {
try {
int inWidth = 0;
int inHeight = 0;
InputStream in = new FileInputStream(
file.getAbsolutePath());
// decode image size (decode metadata only, not the
// whole image)
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, options);
in.close();
in = null;
// save width and height
inWidth = options.outWidth;
inHeight = options.outHeight;
// decode full image pre-resized
in = new FileInputStream(file.getAbsolutePath());
options = new BitmapFactory.Options();
// calc rought re-size (this is no exact resize)
options.inSampleSize = Math.max(inWidth / 350,
inHeight / 550);
// decode full image
Bitmap roughBitmap = BitmapFactory.decodeStream(in,
null, options);
// calc exact destination size
Matrix m = new Matrix();
RectF inRect = new RectF(0, 0, roughBitmap.getWidth(),
roughBitmap.getHeight());
RectF outRect = new RectF(0, 0, 700, 800);
m.setRectToRect(inRect, outRect,
Matrix.ScaleToFit.CENTER);
float[] values = new float[9];
m.getValues(values);
// resize bitmap
Bitmap resizedBitmap = Bitmap.createScaledBitmap(
roughBitmap,
(int) (roughBitmap.getWidth() * values[0]),
(int) (roughBitmap.getHeight() * values[4]),
true);
// save image
try {
FileOutputStream out = new FileOutputStream(
file.getAbsolutePath());
resizedBitmap.compress(Bitmap.CompressFormat.JPEG,
90, out);
fullphoto = resizedBitmap;
setPic(file.getAbsolutePath(), camera);
} catch (Exception e) {
Log.e("Image", e.getMessage(), e);
}
} catch (IOException e) {
Log.e("Image", e.getMessage(), e);
}
}
// fullphoto = BitmapFactory.decodeFile(file.getAbsolutePath());
// photo = decodeSampledBitmapFromFile(file.getAbsolutePath(),
// 100, 100);
// camera.setImageBitmap(imghelper.getRoundedCornerBitmap(
// fullphoto, 10));
iscamera = "Yes";
firsttime = false;
break;
}
in my application i can open the camera and take a picture. The picture is stored in a full size of 2448x3264 pixels on the sd-card. how can i configure this in my application, to save the picture in a size of 90x90 pixels and not in 2448x3264 pixel?
to open the camera and capture a image i use following methods:
/*
* Capturing Camera Image will lauch camera app requrest image capture
*/
private void captureImage() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// start the image capture Intent
startActivityForResult(intent, CAMERA_CAPTURE_IMAGE_REQUEST_CODE);
}
private Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
private File getOutputMediaFile(int type) {
// External sdcard location
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_PICTURES), IMAGE_DIRECTORY_NAME);
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d(IMAGE_DIRECTORY_NAME, "Oops! Failed create " + IMAGE_DIRECTORY_NAME + " directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault()).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator + "IMG_" + timeStamp + ".jpg");
}
else {
return null;
}
return mediaFile;
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// if the result is capturing Image
if (requestCode == CAMERA_CAPTURE_IMAGE_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
/*
try {
decodeUri(this, fileUri, 90, 90);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
*/
// successfully captured the image
Toast.makeText(getApplicationContext(),
"Picture successfully captured", Toast.LENGTH_SHORT).show();
} else if (resultCode == RESULT_CANCELED) {
// user cancelled Image capture
Toast.makeText(getApplicationContext(),
"User cancelled image capture", Toast.LENGTH_SHORT).show();
} else {
// failed to capture image
Toast.makeText(getApplicationContext(),
"Sorry! Failed to capture image", Toast.LENGTH_SHORT).show();
}
}
}
public static Bitmap decodeUri(Context c, Uri uri, final int requiredWidth, final int requiredHeight) throws FileNotFoundException {
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeStream(c.getContentResolver().openInputStream(uri), null, o);
int width_tmp = o.outWidth, height_tmp = o.outHeight;
int scale = 1;
while(true) {
if(width_tmp / 2 < requiredWidth || height_tmp / 2 < requiredHeight)
break;
width_tmp /= 2;
height_tmp /= 2;
scale *= 2;
}
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeStream(c.getContentResolver().openInputStream(uri), null, o2);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
// get the file url
fileUri = savedInstanceState.getParcelable("file_uri");
}
i hope that s.o. can help me with this. i am trying to load the captured images into a little imageview, look like that. thanks in advance
No, you cannot control the picture size when you use MediaStore.ACTION_IMAGE_CAPTURE Intent. You can achieve this if you implement your "custom camera" (and there are plenty of working samples on Internet), including mine.
The byte array received in onPictureTaken() is a Jpeg buffer. Look at this Java package for image manipulation: http://mediachest.sourceforge.net/mediautil/ (there is an Android port on GitHub). There are very powerful and efficient methods to scale down Jpeg, without decoding it into Bitmap and back.
Here, I'm giving a method which will take the saved path on SDCard of taken picture and will return the required size image as Bitmap. Now what you have to do is just pass image path on SDCard and get the resized image.
private Bitmap processTakenPicture(String fullPath) {
int targetW = 90; //your required width
int targetH = 90; //your required height
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(fullPath, bmOptions);
int scaleFactor = 1;
scaleFactor = calculateInSampleSize(bmOptions, targetW, targetH);
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor * 2;
bmOptions.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(fullPath, bmOptions);
return bitmap;
}
private int calculateInSampleSize(BitmapFactory.Options options, int reqWidth,
int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
if (width > height) {
inSampleSize = Math.round((float) height / (float) reqHeight);
} else {
inSampleSize = Math.round((float) width / (float) reqWidth);
}
}
return inSampleSize;
}
After you had read the original image, you can use:
Bitmap.createScaledBitmap(photo, width, height, true);
here is another question wherre a guy has the same problem. He uses the following.
Bitmap ThumbImage = ThumbnailUtils.extractThumbnail(BitmapFactory.decodeFile(imagePath), THUMBSIZE, THUMBSIZE);
I need to detect the user face and also compare the face to authenticate my application,for that I used FaceDetector API to detect the user face.
When i run my code it works without any defects.But it gives detected faces count as Zero.
public class AndroidFaceDetectorActivity extends Activity {
private static final int TAKE_PICTURE_CODE = 100;
private static final int MAX_FACES = 5;
private Bitmap cameraBitmap = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
((Button)findViewById(R.id.take_picture)).setOnClickListener(btnClick);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(TAKE_PICTURE_CODE == requestCode){
processCameraImage(data);
}
}
private void openCamera(){
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, TAKE_PICTURE_CODE);
}
private void processCameraImage(Intent intent){
setContentView(R.layout.detectlayout);
((Button)findViewById(R.id.detect_face)).setOnClickListener(btnClick);
ImageView imageView = (ImageView)findViewById(R.id.image_view);
cameraBitmap = (Bitmap)intent.getExtras().get("data");
imageView.setImageBitmap(cameraBitmap);
}
private void detectFaces(){
if(null != cameraBitmap){
Log.d("FACE_RECOGNITION","CHECK");
int width = cameraBitmap.getWidth();
int height = cameraBitmap.getHeight();
FaceDetector detector = new FaceDetector(width, height,AndroidFaceDetectorActivity.MAX_FACES);
Face[] faces = new Face[AndroidFaceDetectorActivity.MAX_FACES];
Bitmap bitmap565 = Bitmap.createBitmap(width, height, Config.RGB_565);
Paint ditherPaint = new Paint();
Paint drawPaint = new Paint();
ditherPaint.setDither(true);
drawPaint.setColor(Color.RED);
drawPaint.setStyle(Paint.Style.STROKE);
drawPaint.setStrokeWidth(2);
Canvas canvas = new Canvas();
canvas.setBitmap(bitmap565);
canvas.drawBitmap(cameraBitmap, 0, 0, ditherPaint);
int facesFound = detector.findFaces(bitmap565, faces);
PointF midPoint = new PointF();
float eyeDistance = 0.0f;
float confidence = 0.0f;
Log.i("FaceDetector", "Number of faces found: " + facesFound);
if(facesFound > 0)
{
for(int index=0; index<facesFound; ++index){
faces[index].getMidPoint(midPoint);
eyeDistance = faces[index].eyesDistance();
confidence = faces[index].confidence();
Log.i("FaceDetector",
"Confidence: " + confidence +
", Eye distance: " + eyeDistance +
", Mid Point: (" + midPoint.x + ", " + midPoint.y + ")");
canvas.drawRect((int)midPoint.x - eyeDistance ,
(int)midPoint.y - eyeDistance ,
(int)midPoint.x + eyeDistance,
(int)midPoint.y + eyeDistance, drawPaint);
}
}
String filepath = Environment.getExternalStorageDirectory() + "/facedetect" + System.currentTimeMillis() + ".jpg";
try {
FileOutputStream fos = new FileOutputStream(filepath);
bitmap565.compress(CompressFormat.JPEG, 90, fos);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
ImageView imageView = (ImageView)findViewById(R.id.image_view);
imageView.setImageBitmap(bitmap565);
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
//#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.take_picture: openCamera(); break;
case R.id.detect_face: detectFaces(); break;
}
}
};
}
What i did wrong?
or
Is any other way to do that?
Thanks
getExtras().get("data") for MediaStore.ACTION_IMAGE_CAPTURE intent produces very low-resolution bitmap (I believe it's 160x120 px) which could work as a thumbnail, but is not enough for face detection to do its job.
Normally face detection is OK on medium-res images (e.g. 64x480 px) that you can receive form Camera.previewCallback(), but this way you need permissions and code that controls the camera in your app, you cannot use an intent for that.
Here is the official into to face detection on Android: http://developer.android.com/guide/topics/media/camera.html#face-detection.
If you really prefer it this way, you may use getData() to find the captured image at its full resolution, and convert it into a bitmap, like
cameraBitmap = BitmapFactory.decodeFile(data.getData().getPath());
In my app i need to upload some images from my mobiles gallery page.
I am using Samsung Galaxy ace and i have captured image some images in portrait mode using the default camera of the mobile. After capturing i opened those images in my app and tried to show it in image view. The images which are been captured in portrait mode seems to be in landscape in image view.
Using the exifInterface.getAttribute(ExifInterface.TAG_ORIENTATION) i checked the images orientation value to be as 6.
Using the following code i am showing the image in portrait mode in image view,
Matrix matrix = new Matrix();
matrix.postRotate(90);
bitmap = BitmapFactory.decodeStream(getContentResolver().openInputStream(HomePage._uri));
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
i.setImageBitmap(bitmap);
But after uploading the image and retrieving it in another activity of my app it seems to be again in landscape mode. How to upload the image in portrait itself ?
I have captured in Portrait , i have showed it in portrait by myself, while uploading it i need it to be in portrait itself, so that when i am retrieving it i can view it in portrait mode,
how to get this done, (for capturing i dint use the camera in my app, i captured using the mobiles default camera, outside the app)
i have found the solution to get the image from gallery and uploading it. Some images selected from gallery may look rotated, in that case the following solution works good
Select image from gallery
Intent intent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("image/*");
startActivityForResult(intent, 2);
Next in onActivityResult
public void onActivityResult(int requestCode, int resultCode, final Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
if(resultCode == Activity.RESULT_OK )
{
f(requestCode == 2)
{
try
{
String [] proj = { MediaStore.Images.Media.DATA };
Cursor cursor = managedQuery(data.getData(), proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
pathInput = cursor.getString(column_index);
Appconstants.f = Environment.getExternalStorageDirectory() + "/tmp_siva.jpg";
ImageUtils.resampleImageAndSaveToNewLocation(pathInput, Appconstants.f);
}
catch (Exception ex)
{
Log.e("Exception ex # try catch",""+ex);
}
}
}
}
Here is the ImageUtils class
public class ImageUtils
{
private ImageUtils()
{
}
public static void resampleImageAndSaveToNewLocation(String pathInput, String pathOutput) throws Exception
{
Bitmap bmp = resampleImage(pathInput, 800);
OutputStream out = new FileOutputStream(pathOutput);
bmp.compress(Bitmap.CompressFormat.JPEG, 100, out);
}
public static Bitmap resampleImage(String path, int maxDim) throws Exception
{
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, bfo);
BitmapFactory.Options optsDownSample = new BitmapFactory.Options();
optsDownSample.inSampleSize = getClosestResampleSize(bfo.outWidth, bfo.outHeight, maxDim);
Bitmap bmpt = BitmapFactory.decodeFile(path, optsDownSample);
Matrix m = new Matrix();
if (bmpt.getWidth() > maxDim || bmpt.getHeight() > maxDim)
{
BitmapFactory.Options optsScale = getResampling(bmpt.getWidth(), bmpt.getHeight(), maxDim);
m.postScale((float)optsScale.outWidth / (float)bmpt.getWidth(), (float)optsScale.outHeight / (float)bmpt.getHeight());
}
int sdk = new Integer(Build.VERSION.SDK).intValue();
if (sdk > 4)
{
int rotation = ExifUtils.getExifRotation(path);
if (rotation != 0)
{
m.postRotate(rotation);
}
}
return Bitmap.createBitmap(bmpt, 0, 0, bmpt.getWidth(), bmpt.getHeight(), m, true);
}
private static BitmapFactory.Options getResampling(int cx, int cy, int max)
{
float scaleVal = 1.0f;
BitmapFactory.Options bfo = new BitmapFactory.Options();
if (cx > cy)
{
scaleVal = (float)max / (float)cx;
}
else if (cy > cx)
{
scaleVal = (float)max / (float)cy;
}
else
{
scaleVal = (float)max / (float)cx;
}
bfo.outWidth = (int)(cx * scaleVal + 0.5f);
bfo.outHeight = (int)(cy * scaleVal + 0.5f);
return bfo;
}
private static int getClosestResampleSize(int cx, int cy, int maxDim)
{
/*Log.e("cx",""+cx);
Log.e("cy",""+cy);*/
int max = Math.max(cx, cy);
int resample = 1;
for (resample = 1; resample < Integer.MAX_VALUE; resample++)
{
if (resample * maxDim > max)
{
resample--;
break;
}
}
if (resample > 0)
{
return resample;
}
return 1;
}
public static BitmapFactory.Options getBitmapDims(String path) throws Exception
{
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, bfo);
return bfo;
}
}
Here is the Exif class
public class ExifUtils
{
private ExifUtils()
{
}
public static int getExifRotation(String imgPath)
{
try
{
ExifInterface exif = new ExifInterface(imgPath);
String rotationAmount = exif.getAttribute(ExifInterface.TAG_ORIENTATION);
if (!TextUtils.isEmpty(rotationAmount))
{
int rotationParam = Integer.parseInt(rotationAmount);
switch (rotationParam)
{
case ExifInterface.ORIENTATION_NORMAL:
return 0;
case ExifInterface.ORIENTATION_ROTATE_90:
return 90;
case ExifInterface.ORIENTATION_ROTATE_180:
return 180;
case ExifInterface.ORIENTATION_ROTATE_270:
return 270;
default:
return 0;
}
}
else
{
return 0;
}
}
catch (Exception ex)
{
return 0;
}
}
}
The image gets selected in gallery is been checked whether of portrait or landscape type and been rotated and saved in a new path in sdcard. To avoid OOM issue its been resized.