I am working on an image processing app and I need to divide taken photo into four regions, but using BitmapFactoy takes too much resource and time and slows me down, I would like to do this using raw byte[] data that public void onPictureTaken(byte[] data, Camera camera) gives me.
Right now this is what I do and I want to improve it to use byte[] to speed it up:
public void onPictureTaken(byte[] data, Camera camera) {
new AsyncImageAnalyzer(data).execute();
data = null;
}
public AsyncImageAnalyzer(byte[] d) {
mData = d;
surfaceView = null;
}
#Override
protected Void doInBackground(Void... params) {
FileOutputStream fos2 = null;
try {
String fileName = Long.toString(Calendar.getInstance().getTimeInMillis())+".jpg";
String storageState = Environment.getExternalStorageState();
if (storageState.equals(Environment.MEDIA_MOUNTED)) {
File file = new File(/*"/sdcard/XXX"*/Environment.getExternalStorageDirectory().getAbsolutePath()+File.separator+"XXX",
fileName);
/*file.createNewFile();*/
fos2 = new FileOutputStream(file);
fos2.write(mData,0,mData.length);
fos2.flush();
fos2.close();
/*fos2.write(mData);*/
/*fos2.close();*/
if(extrasReceived.equals("1")){
spe.putString("FirstPicPath","/"+fileName).commit();
}else {
spe.putString("SecondPicPath","/"+fileName).commit();
}
}
} catch (Exception e) {
e.printStackTrace();
Log.e("ISA exception",e.getMessage() );
}finally {
try {
if (fos2 != null) {
fos2.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = sp.getInt("compression_ratio",1);
bitmap = BitmapFactory.decodeByteArray(mData, 0, mData.length,options);
mData = null;
t1G = Bitmap.createBitmap(bitmap, 0, (bitmap.getHeight() / 2) - 1, bitmap.getWidth() / 2, bitmap.getHeight() / 2);
t2G = Bitmap.createBitmap(bitmap, (bitmap.getWidth() / 2) - 1, (bitmap.getHeight() / 2) - 1, bitmap.getWidth() / 2, bitmap.getHeight() / 2);
t1R = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth() / 2, bitmap.getHeight() / 2);
t2R = Bitmap.createBitmap(bitmap, (bitmap.getWidth() / 2) - 1, 0, bitmap.getWidth() / 2, bitmap.getHeight() / 2);
try {
t2RtoInt = getDominantColor(t2R);
t1RtoInt = getDominantColor(t1R);
t1GtoInt = getDominantColor(t1G);
t2GtoInt = getDominantColor(t2G);
} catch (Exception e) {
e.printStackTrace();
Log.e("exception", e.getMessage());
}
return null;
}
Also any advice for me that causes speed up in these operations would be appreciated.
Have you ever think about BitmapRegionDecoder (Currently only the JPEG and PNG formats are supported)? If you develop your app with API level 10 and up, you could use this API to speed up the large bitmap processing.
boolean isShareable = false;
try {
InputStream is = new FileInputStream("/mnt/sdcard/test.png");
Rect rect = new Rect();
BitmapFactory.Options options = new BitmapFactory.Options();
BitmapRegionDecoder.newInstance(is, isShareable).decodeRegion(rect, options);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Related
I am using Camera in my app to take pictures of ID cards, I have a rectangular overlay to which image will be cropped. issue is that the image quality is reduced once the image is captured.
I am unable to figure out where exactly it is happening. In cutImage method, I am cutting the image but I don't think I am doing anything to the resolution of the image there.
Can any one suggest where the quality might be going down.
takePicture is called when the user clicks to take the picture.
Once the picture is taken there is a button 'use picture' that is when usePicture is called.
cutImage method is used to crop the image based on the preview.
any suggestions on how to stop the resolution from going down will be very very helpful
protected void takePicture() {
Log.e(TAG, "takePicture started");
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
try {
ImageReader reader = ImageReader.newInstance(textureViewWidth, textureViewHeight, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
takenPictureBytes = bytes;
Log.d(TAG, "takenPictureBytes length - " + takenPictureBytes.length);
} catch (Exception e) {
Log.d(TAG, " onImageAvailable exception ");
e.printStackTrace();
} finally {
if (image != null) {
Log.d(TAG, " image closing");
image.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Log.d(TAG, "takePicture - camera capture session");
switchPanels(true);
progress.setVisibility(View.GONE);
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - onConfigured- camera access exception ");
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "takePicture - onConfigureFailed");
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - CameraAccessException ");
e.printStackTrace();
}
}
private void usePicture() {
Log.d(TAG, "usePicture - started ");
if(null != takenPictureBytes ){
try{
String imagePath = null;
Bitmap bitmap = BitmapFactory.decodeByteArray(takenPictureBytes, 0, takenPictureBytes.length);
int bitmapByteCountUsePic = byteSizeOf(bitmap);
Matrix matrix = new Matrix();
matrix.postRotate(90);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
if (isFrameMode) {
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
} else {
imagePath = saveBitmap(rotatedBitmap);
}
TakePhotoFragment.TakePhotoFragmentEvent takePhotoFragmentEvent = new TakePhotoFragment.TakePhotoFragmentEvent();
takePhotoFragmentEvent.setImagePath(imagePath);
// send rxjava
//pop backstack
RxBus.getInstance().post(takePhotoFragmentEvent);
getActivity().getSupportFragmentManager().popBackStack();
}catch (Exception e){
Log.d(TAG, "usePicture - exception ");
e.printStackTrace();
}
}else{
Log.d(TAG, "usePicture - takenPictureBytes is null");
DialogUtil.showErrorSnackBar(getView(), R.string.retake_photo );
}
}
public Bitmap cutImage(final Bitmap bitmap, final int pixepWidth, final int pixelsHeight, float widthRatio) {
int bitmapByteCountCutImage = byteSizeOf(bitmap);
Bitmap output = createBitmap(pixepWidth, pixelsHeight, Bitmap.Config.ARGB_8888);
Bitmap original = bitmap;
final Paint paint = new Paint();
Canvas canvas = new Canvas(output);
int padding = (int) ((float) getResources().getDimensionPixelSize(R.dimen.double_padding) * widthRatio);
Rect rect = new Rect(padding, (original.getHeight() - pixelsHeight) / 2, padding + pixepWidth, original.getHeight() - (original.getHeight() - pixelsHeight) / 2);
final RectF cutedRect = new RectF(0, 0, pixepWidth, pixelsHeight);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
canvas.drawBitmap(original, rect, cutedRect, paint);
return output;
}
private String saveBitmap(Bitmap bitmap) {
File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Toast.makeText(getActivity(), "Can't create directory to save image.", Toast.LENGTH_LONG).show();
return null;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmssSSS");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(stream.toByteArray());
fos.close();
return pictureFile.getAbsolutePath();
} catch (Exception error) {
Log.d(TAG, "File" + filename + "not saved: " + error.getMessage());
}
return null;
}
You are changing the bitmap size/resolution in this code:
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
Put in a breakpoint and see what the new heightRatio and widthRatio are, and what the photoFrameView.getWidth() * withRatio value comes out to. I think you will find it is small compared to the original image. I'm not sure why you are calculating the Ratios with the textureViewWidth/Height, you shouldn't have to do that. Whatever you are displaying the image in should be able to 'Fill' without having to change the size of the underlying bitmap, and thus losing resolution.
You might check out this method:
rawBitmap = ((BitmapDrawable)imageToLoad.getDrawable()).getBitmap();
theBitmap = Bitmap.createScaledBitmap(rawBitmap, 285, 313, false);
I am capturing frames in OnPreviewFrame() and then processing them in a thread to check if they are valid or not.
public void onPreviewFrame(byte[] data, Camera camera) {
if (imageFormat == ImageFormat.NV21) {
//We only accept the NV21(YUV420) format.
frameCount++;
if (frameCount > 19 && frameCount % 2 == 0) {
Camera.Parameters parameters = camera.getParameters();
FrameModel fModel = new FrameModel(data);
fModel.setPreviewWidth(parameters.getPreviewSize().width);
fModel.setPreviewHeight(parameters.getPreviewSize().height);
fModel.setPicFormat(parameters.getPreviewFormat());
fModel.setFrameCount(frameCount);
validateFrame(fModel);
}
}
}
In validateFrame(), i submit a ValidatorThread runnable instance to a ThreadPoolExecutor with 4 core and max threads, to process the frames parallelly.
public class ValidatorThread implements Runnable {
private FrameModel frame;
public ValidatorThread(FrameModel fModel) {
frame = fModel;
}
#Override
public void run() {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
processNV21Data();
}
private void processNV21Data() {
YuvImage yuv = new YuvImage(frame.getData(), frame.getPicFormat(),
frame.getPreviewWidth(), frame.getPreviewHeight(), null);
frame.releaseData();
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(new Rect(0, 0, frame.getPreviewWidth(), frame.getPreviewHeight()), 100, out);
byte[] bytes = out.toByteArray();
yuv = null;
try {
if (out != null)
out.close();
out = null;
} catch (IOException e) {
e.printStackTrace();
}
Bitmap baseBitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
bytes = null;
// rotate bitmap
baseBitmap = rotateImage(baseBitmap, frame.getRotation());
//create copy of original bitmap to use later
Bitmap mCheckedBitmap = baseBitmap.copy(Bitmap.Config.ARGB_8888, true);
// convert base bitmap to greyscale for validation
baseBitmap = toGrayscale(baseBitmap);
boolean isBitmapValid = Util.isBitmapValid(baseBitmap);
if (isBitmapValid) {
baseBitmap.recycle();
mCheckedBitmap.recycle();
frame = null;
} else {
baseBitmap.recycle();
mCheckedBitmap.recycle();
frame = null;
}
}
public Bitmap toGrayscale(Bitmap bmpOriginal) {
int width, height;
height = bmpOriginal.getHeight();
width = bmpOriginal.getWidth();
Bitmap bmpGrayscale = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
Canvas c = new Canvas(bmpGrayscale);
Paint paint = new Paint();
bmpOriginal.recycle();
return bmpGrayscale;
}
private Bitmap rotateImage(final Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap rotatedBitmap = Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
source.recycle();
return rotatedBitmap;
}
}
The FrameModel class has such declaration :
public class FrameModel {
private byte[] data;
private int previewWidth;
private int previewHeight;
private int picFormat;
private int frameCount;
public void releaseData() {
data = null;
}
// getters and setters
}
I am getting OutOf Memory error while processing multiple frames.
Can anyone help what memory optimisation does the code need?
You can reduce memory usage if you produce grayscale bitmap from YUV data without going through Jpeg. This will also be significantly faster.
public Bitmap yuv2grayscale(byte[] yuv, int width, int height) {
int[] pixels = new int[width * height];
for (int i = 0; i < height*width; i++) {
int y = yuv[i] & 0xff;
pixels[i] = 0xFF000000 | y << 16 | y << 16 | y;
}
return Bitmap.createBitmap(pixels, width, height, Bitmap.Config.RGB_565);
}
Alternatively, you can create an RGB_565 bitmap without going through int[width*height] pixels array, and manipulate the bitmap pixels in place using NDK.
I want exactly the functionality of Camera (Like Single and Batch(Multiple Photos at a time)) in the following app:
https://play.google.com/store/apps/details?id=com.thegrizzlylabs.geniusscan.free&hl=en
I have implemented Successfully this. But, my question is, I have implemented this functionality with SurfaceView. When I capture photos from camera, its blurred when compared to the Genius Scan App.
Could anyone please let me know how exactly i can achieve this functionality without getting blurred.
NOTE: Capturing Multiple Photos
private void takeImage() {
camera.takePicture(null, null, new PictureCallback() {
private File imageFile;
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
// convert byte array into bitmap
Bitmap loadedImage = null;
Bitmap rotatedBitmap = null;
loadedImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// rotate Image
Matrix rotateMatrix = new Matrix();
rotateMatrix.postRotate(rotation);
rotatedBitmap = Bitmap.createBitmap(loadedImage, 0, 0,
loadedImage.getWidth(), loadedImage.getHeight(),
rotateMatrix, false);
String state = Environment.getExternalStorageState();
File folder = null;
if (state.contains(Environment.MEDIA_MOUNTED)) {
folder = new File(Environment
.getExternalStorageDirectory() + "/Demo");
} else {
folder = new File(Environment
.getExternalStorageDirectory() + "/Demo");
}
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
}
if (success) {
java.util.Date date = new java.util.Date();
imageFile = new File(folder.getAbsolutePath()
+ File.separator
+ new Timestamp(date.getTime()).toString()
+ "Image.jpg");
imageFile.createNewFile();
} else {
Toast.makeText(getBaseContext(), "Image Not saved",
Toast.LENGTH_SHORT).show();
return;
}
ByteArrayOutputStream ostream = new ByteArrayOutputStream();
// save image into gallery
rotatedBitmap.compress(CompressFormat.JPEG, 100, ostream);
FileOutputStream fout = new FileOutputStream(imageFile);
fout.write(ostream.toByteArray());
fout.close();
ContentValues values = new ContentValues();
values.put(Images.Media.DATE_TAKEN,
System.currentTimeMillis());
values.put(Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.MediaColumns.DATA,
imageFile.getAbsolutePath());
CameraDemoActivity.this.getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
if (mSingleView.getVisibility() == View.VISIBLE) {
btnDoneClicked();
} else {
}
mArrayUri.add(Uri.fromFile(imageFile));
if (mBatchView.getVisibility() == View.VISIBLE) {
batchClickCount++;
mtxtCapturedClicks.setText(String.valueOf(batchClickCount));
} else {
batchClickCount = 0;
mtxtCapturedClicks.setText("");
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public static Bitmap scaleBitmap(Bitmap bitmap, int newWidth, int newHeight) {
Bitmap scaledBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
float scaleX = newWidth / (float) bitmap.getWidth();
float scaleY = newHeight / (float) bitmap.getHeight();
float pivotX = 0;
float pivotY = 0;
Matrix scaleMatrix = new Matrix();
scaleMatrix.setScale(scaleX, scaleY, pivotX, pivotY);
Canvas canvas = new Canvas(scaledBitmap);
canvas.setMatrix(scaleMatrix);
canvas.drawBitmap(bitmap, 0, 0, new Paint(Paint.FILTER_BITMAP_FLAG));
return scaledBitmap;
}
Try this function to improve the image quality
I find out that when i use
createbitmap()
ONLY for rotate bitmap and after this
createscaledbitmap()
ONLY for scale in Android SDK, the values of bitmap(JPEG coeff.in my case) are changed in case of rotating image! (lower quality)
So probably i need make rotate and scale in one call function createbitmap with object Matrix. (can me somebody tell more spec. info about Math in this case? I can not find anythink)
In this code I try it, but it returns different dimensions in rotatedBitmap not the dstWidth,dstHeight ; pixelIn:ยจ
NOTE:i need only 1d data token vector, :OUTPUT
public static int[] getPixels(String pathOfInputImage, int dstWidth,
int dstHeight) {
Bitmap resizedBitmap = null;
InputStream in = null;
int[] pixelIn;
boolean filter = false;
int inWidth = 0;
int inHeight = 0;
// calculate the scale - in this case = 0.4f
try {
in = new FileInputStream(pathOfInputImage);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (in == null) {
Log.v("--------Image ERROR--------",
"Image ERROR PIXEL FileInputStream( ");
}
// decode image size (decode metadata only, not the whole image)
BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
// options.inDither = true;
// options.inPreferQualityOverSpeed = true;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
options.inJustDecodeBounds = true;
BitmapFactory.decodeStream(in, null, options);
try {
in.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
in = null;
// save width and height
inWidth = options.outWidth;
inHeight = options.outHeight;
FileInputStream in2 = null;
// decode full image pre-resized
try {
in2 = new FileInputStream(pathOfInputImage);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Options options2 = new BitmapFactory.Options();
// options.inDither = true;
// options2.inPreferQualityOverSpeed = true;
options2.inPreferredConfig = Bitmap.Config.ARGB_8888;
// calc rought re-size (this is no exact resize)
options2.inSampleSize = Math.max(inWidth / dstWidth, inHeight
/ dstHeight);
// decode full
System.gc();// /????????????????????????????????
Bitmap roughBitmap = BitmapFactory.decodeStream(in2, null, options2);
float scaleWidth = ((float) dstWidth) / inWidth;
float scaleHeight = ((float) dstHeight) / inHeight;
if (roughBitmap == null) {
pixelIn = getPixelsZadniVratka(pathOfInputImage, dstWidth,
dstHeight);
} else {
// calc exact destination size
PhotoRecord p9 = new PhotoRecord();
int rot9 = p9.loadRotation(pathOfInputImage).roatate;
Matrix matrix = new Matrix();
matrix.postRotate(rot9);
matrix.postScale(scaleWidth, scaleHeight);
// Bitmap resizedBitmap2 = Bitmap.createScaledBitmap(roughBitmap,
// dstWidth, dstHeight, filter);
// /------------------------------------
Bitmap rotatedBitmap = Bitmap.createBitmap(roughBitmap, 0, 0,
roughBitmap.getWidth(), roughBitmap.getHeight(), matrix,
filter);
Log.v("--------TAG--------", "JE inWidth PROCESS?????( " + inWidth);
Log.v("--------TAG--------", "JE getHeight PROCESS?????( "
+ rotatedBitmap.getHeight());
Log.v("--------TAG--------", "JE getWidth PROCESS?????( "
+ rotatedBitmap.getWidth());
pixelIn = new int[dstWidth * dstHeight];
if (rotatedBitmap != null) {
rotatedBitmap.getPixels(pixelIn, 0, dstWidth, 0, 0, dstWidth,
dstHeight);
rotatedBitmap.recycle();
} else {
pixelIn = null;
}
}
return pixelIn;
}
I read a lot of topics where bitmap was resizes by decodeFile and createScaledBitmap. But I would like to change this file, without create extra bitmaps or files. It means, just when I open this file bitmap will be smaller/bigger. Is it possibility?
Edit:
Specifically, I have jpeg files and I will zip this files, and before making zip I set size of images (in zip file).
I'm not clearly understood, what you want to obtain,
but here is a simple code to resize image file
String your_file_path = "image.png";
int set_scale_your_need = 2;
//getting your image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeFile(your_file_path, o);
o.inJustDecodeBounds = false;
o.inSampleSize = set_scale_your_need;
Bitmap bitmap = BitmapFactory.decodeFile(your_file_path, o);
FileOutputStream fos = new FileOutputStream(your_file_path, false);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, fos);
fos.flush();
If you just want to have it open as a smaller version of it's self you can use BitmapFactory.Options.inSampleSize to reduce the load size. If you want to actually resize the image and resave it to the file system, you will want to do that in an AsyncTask,
something like this:
public class ImageResizerTask extends AsyncTask<Integer, Void, Bitmap> {
Bitmap mBitmap;
String filePath;
Context mContext;
Activity mCallBack;
//ProgressDialog pd;
public ImageResizerTask(Context context, String path,
Activity callBack) {
mContext = context;
filePath = path;
mBitmap = BitmapFactory.decodeFile(filePath);
mCallBack = callBack;
}
#Override
protected void onPreExecute() {
}
#Override
protected Bitmap doInBackground(Integer... params) {
// TODO Auto-generated method stub
resize(mBitmap);
return mBitmap;
}
#Override
protected void onPostExecute(Bitmap bitmap) {
//pd.dismiss();
bitmap.recycle();
mCallBack.onImageResized(filePath);
}
private void resize(Bitmap tmp) {
final Bitmap bitmap = Bitmap.createBitmap(500, 500,
Bitmap.Config.ARGB_8888);
final Canvas canvas = new Canvas(bitmap);
Log.v("TMPBMP",
"temp bmp width:" + tmp.getWidth() + " height:"
+ tmp.getHeight());
if (tmp.getWidth() < tmp.getHeight()) {
final int width = (int) (1f * tmp.getWidth() / tmp.getHeight() * 500);
final int height = 500;
final Bitmap scaled = Bitmap.createScaledBitmap(tmp, width, height,
false);
final int leftOffset = (bitmap.getWidth() - scaled.getWidth()) / 2;
final int topOffset = 0;
canvas.drawBitmap(scaled, leftOffset, topOffset, null);
} else {
final int width = 500;
final int height = (int) (1f * tmp.getHeight() / tmp.getWidth() * 500);
;
final Bitmap scaled = Bitmap.createScaledBitmap(tmp, width, height,
false);
final int leftOffset = 0;
final int topOffset = (bitmap.getHeight() - scaled.getHeight()) / 2;
canvas.drawBitmap(scaled, leftOffset, topOffset, null);
}
FileOutputStream outStream;
try {
outStream = new FileOutputStream(filePath);
try {
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outStream);
outStream.flush();
outStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}