I am having trouble with a transaction too large exception related to the cropImageView. Anytime I use the cropImageView to crop an image and then use an intent to change the activity, it gives me a Transaction too large exception.
I went on and saw that it may be cause I am passing too much data from one activity to another but even when I took the part of my code that passes a byte array in the intent it still crashes with the transaction too large exception.
Here is my code:
public class TestActivity extends AppCompatActivity {
private CropImageView cropImageView;
private Button cropImage;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
cropImage = (Button) findViewById(R.id.cropImage);
cropImageView = (CropImageView) findViewById(R.id.cropImageView);
Intent intent = getIntent();
byte[] bite = intent.getExtras().getByteArray("imageForCropping");
Bitmap bitmap = getPhoto(bite);
cropImageView.setImageBitmap(bitmap);
cropImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Bitmap bit = cropImageView.getCroppedImage();
byte[] byteArray = getBytes(bit);
Intent intent = new Intent(TestActivity.this, HomeScreenActivity.class);
intent.putExtra("croppedImage", byteArray);
TestActivity.this.startActivity(intent);
}
});
}
public static Bitmap getPhoto(byte[] image) {
return BitmapFactory.decodeByteArray(image, 0, image.length );
}
public static byte[] getBytes(Bitmap image) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.PNG, 100, stream);
return stream.toByteArray();
}
}
I have tried to avoid using this intent to pass the byte array by using a sqlite database but it still crashes with a transaction too large exception. So this led me to believe that CropImageView is causing the problem.
Please let me know if you know what the problem could be.
Thanks in advance!
It's a bug in the cropImageView, it attempts to save the image as its own view state when onSaveInstanceState() is called.
The solution is to save the image out into a file, and call setImageBitmap(null) on the crop image view before calling super.onSaveInstanceState(), then restore the image bitmap when needed from the file.
EDIT: Okay, I'll share my code on this, but it's really really hacky. I was happy that it worked. Also, it's from a camera, so I took the photo and got the bitmap as a byte array.
Please advise caution.
#Override
public void onResume() {
if(cameraPresenter != null) {
cameraPresenter.onResume();
}
if(takenBitmap != null && !takenBitmap.isRecycled()) {
takenPhotoDisplay.setImageBitmap(takenBitmap);
}
}
#Override
public void onViewDestroyed(boolean removedByFlow) {
takenPhotoDisplay.setImageBitmap(null);
}
#Override // called before `Activity super.onSaveInstanceState()`
public void preSaveViewState(Bundle bundle) {
takenPhotoDisplay.setImageBitmap(null);
}
private byte[] takenPhoto;
private Bitmap takenBitmap;
private void restoreBitmap() {
try {
takenPhoto = readByteArrayFromFile("TEMP");
} catch(FileNotFoundException e) {
Log.e(TAG, "No bitmap found to restore");
} catch(IOException e) {
Log.e(TAG, "Could not read bitmap");
}
}
private byte[] readByteArrayFromFile(String fileName)
throws IOException {
Context context = CustomApplication.get();
byte[] result = null;
FileInputStream fileInputStream = null;
try {
fileInputStream = context.openFileInput(fileName);
result = IOUtils.toByteArray(fileInputStream);
} catch(FileNotFoundException e) {
Log.e(TAG, "The file was not found [" + fileName + "]", e);
throw e;
} catch(IOException e) {
Log.e(TAG, "Unable to read data from stream [" + fileName + "]", e);
throw e;
} finally {
if(fileInputStream != null) {
try {
fileInputStream.close();
} catch(IOException e) {
Log.e(TAG, "Could not close.");
}
}
}
return result;
}
private void preserveBitmap() {
if(takenPhoto != null) {
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = CustomApplication.get().openFileOutput("TEMP", Context.MODE_PRIVATE);
fileOutputStream.write(takenPhoto);
} catch(FileNotFoundException e) {
Log.e(TAG, "Could not open temp");
} catch(IOException e) {
Log.e(TAG, "Could not write temp");
} finally {
if(fileOutputStream != null) {
try {
fileOutputStream.close();
} catch(IOException e) {
Log.e(TAG, "Nobody cares!");
}
}
}
}
}
private void destroyCurrentBitmap() {
if(takenBitmap != null) {
takenPhotoDisplay.setImageBitmap(null);
takenBitmap.recycle();
takenBitmap = null;
takenPhoto = null;
Runtime.getRuntime().gc();
}
}
private void handleTakenPhoto(byte[] data) {
CameraView.this.takenPhoto = data;
if(takenPhoto != null) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap bm = BitmapFactory.decodeByteArray(takenPhoto, 0, takenPhoto.length, options);
Log.d(TAG, "The Bitmap has width [" + bm.getWidth() + "] and height [" + bm.getHeight() + "]");
// Setting post rotate to 90
Matrix mtx = new Matrix();
Log.i(TAG, "CURRENT ROTATION ANGLE: [" + getRotationAngle() + "]");
// Rotating Bitmap
mtx.postRotate(getRotationAngle());
// Flipping bitmap
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getActiveCameraId(), info);
if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mtx.postScale(-1, 1, bm.getWidth() / 2, bm.getHeight() / 2);
}
Runtime.getRuntime().gc();
Bitmap newBitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), mtx, true);
if(bm != newBitmap) {
bm.recycle();
bm = newBitmap; //yes, this is on purpose!
Runtime.getRuntime().gc();
} else {
Log.i(TAG, "The created bitmap is the same as the previous one.");
}
bm = newBitmap; //yes, this is on purpose!
takenBitmap = bm;
if(takenBitmap != null && !takenBitmap.isRecycled()) {
takenPhotoDisplay.setImageBitmap(takenBitmap);
} else {
Log.w(TAG, "The bitmap in [takenBitmap] is recycled!");
takenPhotoDisplay.setImageBitmap(null);
}
} else {
Log.w(TAG, "The taken photo is NULL.");
}
}
//IMAGE ROTATION FIX
public int getRotationAngle(int cameraId) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
Activity activity = ActivityUtils.getActivity(getContext());
WindowManager windowManager = activity.getWindowManager();
Display defaultDisplay = windowManager.getDefaultDisplay();
int rotation = defaultDisplay.getRotation();
int degrees = 0;
switch(rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
//if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
//} else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
//}
if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
if(getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
result = (info.orientation - degrees + 540) % 360;
} else {
result = (info.orientation - degrees + 360) % 360;
}
} else {
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
Do not pass bytes of image in intent or anywhere. Save it on disk, pass only the URI, and read image from disk
Related
how can I compress image size before uploading this is my code
I want to compress image size before its uploaded to WordPress site
I am using WordPress rest API in my android app
how can I compress image size before uploading this is my code
I want to compress image size before its uploaded to WordPress site
I am using WordPress rest API in my android app
// getting images selected from gallery for post and sending them to server
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case FilePickerConst.REQUEST_CODE_PHOTO:
if (resultCode == Activity.RESULT_OK && data != null) {
imagePaths = new ArrayList<>();
imageRequestCount = 1;
imagePaths.addAll(data.getStringArrayListExtra(FilePickerConst.KEY_SELECTED_MEDIA));
if (SettingsMain.isConnectingToInternet(context)) {
if (imagePaths.size() > 0) {
btnSelectPix.setEnabled(false);
AsyncImageTask asyncImageTask = new AsyncImageTask();
asyncImageTask.execute(imagePaths);
}
}
} else {
btnSelectPix.setEnabled(true);
Toast.makeText(context, settingsMain.getAlertDialogMessage("internetMessage"), Toast.LENGTH_SHORT).show();
}
break;
}
}
private void adforest_uploadImages(List<MultipartBody.Part> parts) {
Log.d("info image parts", parts.toString());
String ad_id = Integer.toString(addId);
RequestBody adID =
RequestBody.create(
okhttp3.MultipartBody.FORM, ad_id);
Log.d("info SendImage", addId + "");
final Call<ResponseBody> req = restService.postUploadImage(adID, parts, UrlController.UploadImageAddHeaders(context));
req.enqueue(new Callback<ResponseBody>() {
#Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
if (response.isSuccessful()) {
Log.v("info Upload", response.toString());
JSONObject responseobj = null;
try {
responseobj = new JSONObject(response.body().string());
Log.d("info UploadImage object", "" + responseobj.getJSONObject("data").toString());
if (responseobj.getBoolean("success")) {
adforest_updateImages(responseobj.getJSONObject("data"));
int selectedImageSize = imagePaths.size();
int totalSize = currentSize + selectedImageSize;
Log.d("info image2", "muImage" + totalSize + "imagePaths" + totalUploadedImages);
if (totalSize == totalUploadedImages) {
adforest_UploadSuccessImage();
imagePaths.clear();
paths.clear();
if (allFile.size() > 0) {
for (File file : allFile) {
if (file.exists()) {
if (file.delete()) {
Log.d("file Deleted :", file.getPath());
} else {
Log.d("file not Deleted :", file.getPath());
}
}
}
}
}
} else {
adforest_UploadFailedImage();
Toast.makeText(context, responseobj.get("message").toString(), Toast.LENGTH_SHORT).show();
}
} catch (JSONException e) {
adforest_UploadFailedImage();
e.printStackTrace();
} catch (IOException e) {
adforest_UploadFailedImage();
e.printStackTrace();
}
btnSelectPix.setEnabled(true);
} else {
adforest_UploadFailedImage();
}
}
#Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
Log.e("info Upload Image Err:", t.toString());
if (t instanceof TimeoutException) {
adforest_UploadFailedImage();
// adforest_requestForImages();
}
if (t instanceof SocketTimeoutException) {
adforest_UploadFailedImage();
// adforest_requestForImages();
//
} else {
adforest_UploadFailedImage();
// adforest_requestForImages();
}
}
});
}
private void adforest_UploadFailedImage() {
progress_bar.setVisibility(View.GONE);
loadingLayout.setVisibility(View.GONE);
Gallary.setVisibility(View.VISIBLE);
Gallary.setCompoundDrawablesWithIntrinsicBounds(R.drawable.ic_check_circle_black_24dp, 0, 0, 0);
Gallary.setText("" + 0);
Gallary.setTextColor(Color.parseColor("#a0a0a0"));
tv_done.setVisibility(View.VISIBLE);
tv_done.setTextColor(Color.parseColor("#ff0000"));
tv_done.setText(progressModel.getFailMessage());
btnSelectPix.setEnabled(true);
Toast.makeText(context, progressModel.getFailMessage(), Toast.LENGTH_SHORT).show();
}
private void adforest_UploadSuccessImage() {
progress_bar.setVisibility(View.GONE);
Gallary.setVisibility(View.VISIBLE);
loadingLayout.setVisibility(View.GONE);
Gallary.setCompoundDrawablesWithIntrinsicBounds(R.drawable.ic_check_circle_green_24dp, 0, 0, 0);
Gallary.setText(totalFiles + "");
tv_done.setText(progressModel.getSuccessMessage());
Toast.makeText(context, progressModel.getSuccessMessage(), Toast.LENGTH_SHORT).show();
btnSelectPix.setEnabled(true);
tv_done.setTextColor(Color.parseColor("#20a406"));
}
private MultipartBody.Part adforestst_prepareFilePart(String fileName, Uri fileUri) {
File finalFile = new File(SettingsMain.getRealPathFromURI(context, fileUri));
allFile.add(finalFile);
// create RequestBody instance from file
RequestBody requestFile =
RequestBody.create(
MediaType.parse(getContentResolver().getType(fileUri)),
finalFile
);
// MultipartBody.Part is used to send also the actual file name
return MultipartBody.Part.createFormData(fileName, finalFile.getName(), requestFile);
}
private File adforest_rotateImage(String path) {
File file = null;
Bitmap bitmap = BitmapFactory.decodeFile(path);
ExifInterface ei = null;
try {
ei = new ExifInterface(path);
} catch (IOException e) {
e.printStackTrace();
}
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
Bitmap rotatedBitmap = null;
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotatedBitmap = rotateImage(bitmap, 90);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotatedBitmap = rotateImage(bitmap, 180);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotatedBitmap = rotateImage(bitmap, 270);
break;
case ExifInterface.ORIENTATION_NORMAL:
default:
rotatedBitmap = bitmap;
}
file = new File(getRealPathFromURI(getImageUri(rotatedBitmap)));
allFile.add(file);
return file;
}
public Uri getImageUri(Bitmap inImage) {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
String path = MediaStore.Images.Media.insertImage(getContentResolver(), inImage, "Title", null);
return Uri.parse(path);
}
public String getRealPathFromURI(Uri uri) {
Cursor cursor = getContentResolver().query(uri, null, null, null, null);
cursor.moveToFirst();
int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA);
return cursor.getString(idx);
}
private class AsyncImageTask extends AsyncTask<ArrayList<String>, Void, List<MultipartBody.Part>> {
ArrayList<String> imaagesLis = null;
boolean checkDimensions = true, checkImageSize;
#SafeVarargs
#Override
protected final List<MultipartBody.Part> doInBackground(ArrayList<String>... params) {
List<MultipartBody.Part> parts = null;
imaagesLis = params[0];
totalFiles = imaagesLis.size();
for (int i = 0; i < imaagesLis.size(); i++) {
parts = new ArrayList<>();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss");
String currentDateandTime = sdf.format(new Date());
Log.d("info image", currentDateandTime);
checkDimensions = true;
checkImageSize = true;
if (adPostImageModel.getDim_is_show()) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imaagesLis.get(i), options);
int imageWidth = options.outWidth;
int imageHeight = options.outHeight;
if (imageHeight > Integer.parseInt(adPostImageModel.getDim_height()) &&
imageWidth > Integer.parseInt(adPostImageModel.getDim_width())) {
checkDimensions = true;
} else {
checkDimensions = false;
runOnUiThread(() -> Toast.makeText(context, adPostImageModel.getDim_height_message(), Toast.LENGTH_SHORT).show());
}
}
File file = new File(imaagesLis.get(i));
long fileSizeInBytes = file.length();
Integer fileSizeBytes = Math.round(fileSizeInBytes);
if (fileSizeBytes > Integer.parseInt(adPostImageModel.getImg_size())) {
checkImageSize = false;
runOnUiThread(() -> Toast.makeText(context, adPostImageModel.getImg_message(), Toast.LENGTH_SHORT).show());
} else {
checkImageSize = true;
}
if (checkImageSize && checkDimensions) {
File finalFile1 = adforest_rotateImage(imaagesLis.get(i));
// File finalFile1 =new File(imaagesLis.get(i));
Uri tempUri = SettingsMain.decodeFile(context, finalFile1);
parts.add(adforestst_prepareFilePart("file" + i, tempUri));
adforest_uploadImages(parts);
}
}
return parts;
}
You can use this scale down function
public static Bitmap scaleDown(Bitmap your_image, float YOUR_SIZE,boolean filter) { //filter false = Pixelated, true = Smooth
float ratio = Math.min( YOUR_SIZE/ your_image.getWidth(),
YOUR_SIZE/ your_image.getHeight());
int width = Math.round( ratio * your_image.getWidth());
int height = Math.round( ratio * your_image.getHeight());
Bitmap newBitmap = Bitmap.createScaledBitmap(your_image, width, height,
filter);
return newBitmap;
}
You already have the mechanism for compressing in your code:
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
See the documentation:
https://developer.android.com/reference/android/graphics/Bitmap.html#public-methods_1
Basically you need to change 100 to a lower number in order to compress the image.
I'm creating android camera app. This is an custom camera using camera API.
Whenever I take picture from camera it always save in landscap mode.
I'm using this method to set the camera orientation.
public static void setCameraDisplayOrientation(Activity activity,
int cameraId, android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
Here is code which is responsible to store image.
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
};
This code save the image successfully but always in landscape mode. What I'm missing I'm new in android. I want to save image as like it is shown in preview.
Update 2 (According To Answer)
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Camera.CameraInfo info = new Camera.CameraInfo();
int rotationAngle = getCorrectCameraOrientation (MainActivity.this, info);
Toast.makeText(MainActivity.this, "Angle "+ rotationAngle, Toast.LENGTH_SHORT).show();
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
try {
writeFile (data, pictureFile);
} catch (IOException e) {
e.printStackTrace();
}
processImage (pictureFile, rotationAngle, 1);
}
};
I'm using code in this way but this is not working still same problem image is always in landscape mode.
Try
1. First of all, add methods (writeFile, processImage and getCorrectCameraOrientation) defined below (After step 3)
2. Calculate camera orientation after capturing photo and update onPictureTaken**
#Override
public void onPictureTaken (final byte[] data, final Camera camera) {
int rotationAngle = getCorrectCameraOrientation (this, info);
3. Create file and update photo angle using rotationAngle
File file = new File (folder, fileName);
try {
file.createNewFile ();
}
catch (IOException e) {
e.printStackTrace ();
}
writeFile (data, file);
processImage (file, rotationAngle, compressRatio);
writeFile
public static void writeFile (byte[] data, File file) throws IOException {
BufferedOutputStream bos = null;
try {
FileOutputStream fos = new FileOutputStream (file);
bos = new BufferedOutputStream (fos);
bos.write (data);
}
finally {
if (bos != null) {
try {
bos.flush ();
bos.close ();
}
catch (Exception e) {
}
}
}
}
processImage
public static void processImage (File file, int rotationAngle, int compressionRatio) {
BufferedOutputStream bos = null;
try {
Bitmap bmp = BitmapFactory.decodeFile (file.getPath ());
Matrix matrix = new Matrix ();
matrix.postRotate (rotationAngle);
bmp = Bitmap.createBitmap (bmp, 0, 0, bmp.getWidth (), bmp.getHeight (), matrix, true);
FileOutputStream fos = new FileOutputStream (file);
bmp.compress (Bitmap.CompressFormat.PNG, compressionRatio, fos);
}
catch (IOException e) {
e.printStackTrace ();
}
catch (OutOfMemoryError t) {
t.printStackTrace ();
}
catch (Throwable t) {
t.printStackTrace ();
}
finally {
if (bos != null) {
try {
bos.flush ();
bos.close ();
}
catch (Exception e) {
}
}
}
}
getCorrectCameraOrientation
public static int getCorrectCameraOrientation (Activity activity, Camera.CameraInfo info) {
int rotation = activity.getWindowManager ().getDefaultDisplay ().getRotation ();
int degrees = 0;
if (hasValidRotation (rotation)) {
degrees = rotation * 90;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360;
}
else {
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
This question already has answers here:
Why does an image captured using camera intent gets rotated on some devices on Android?
(23 answers)
Closed 6 years ago.
in my app I need to load image from camera.
This is the code I've used:
private void openCamera()
{
mMediaUri =getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
Intent photoIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
photoIntent.putExtra(MediaStore.EXTRA_OUTPUT, mMediaUri);
startActivityForResult(photoIntent, REQUEST_TAKE_PHOTO);
}
private Uri getOutputMediaFileUri(int mediaTypeImage)
{
//check for external storage
if(isExternalStorageAvaiable())
{
File mediaStorageDir = getActivity().getExternalFilesDir(Environment.DIRECTORY_PICTURES);
String fileName = "";
String fileType = "";
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new java.util.Date());
fileName = "IMG_"+timeStamp;
fileType = ".jpg";
File mediaFile;
try
{
mediaFile = File.createTempFile(fileName,fileType,mediaStorageDir);
Log.i("st","File: "+Uri.fromFile(mediaFile));
}
catch (IOException e)
{
e.printStackTrace();
Log.i("St","Error creating file: " + mediaStorageDir.getAbsolutePath() +fileName +fileType);
return null;
}
return FileProvider.getUriForFile(getActivity(),BuildConfig.APPLICATION_ID + ".provider", mediaFile);
}
//something went wrong
return null;
}
private boolean isExternalStorageAvaiable()
{
String state = Environment.getExternalStorageState();
if(Environment.MEDIA_MOUNTED.equals(state))
{
return true;
}
else
{
return false;
}
}
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
if(resultCode == DIALOG_CODE)
{
String s = data.getStringExtra("choose");
if(s.equals(getString(R.string.takephoto)))
{
openCamera();
}
else if(s.equals(getString(R.string.library)))
{
openGallery();
}
}
else if(resultCode == RESULT_OK)
{
if (requestCode == REQUEST_TAKE_PHOTO || requestCode == REQUEST_PICK_PHOTO) //dalla fotocamera
{
if (data != null) //caso galleria
{
mMediaUri = data.getData();
}
Picasso.with(getActivity()).load(mMediaUri).resize(1280, 1280).transform(new RoundedTransformation()).centerCrop().into(photo, new Callback()
{
public void onSuccess()
{
}
#Override
public void onError() {
}
});
}
}
}
and it works but it gives me this error: if I take a photo like this:
it loads a picture in imageview in this way:
but if I load the picture from the gallery, it works ok.
What's the error here?
Thanks
You have to rotate image using ExifInterface.
Image rotate based on image capture angle.
ExifInterface will rotate image even if you click photo from any angle.
It will display properly.
File mediaFile = new File(mediaPath);
Bitmap bitmap;
if (mediaFile.exists()) {
if (isImage(mediaPath)) {
Bitmap myBitmap = BitmapFactory.decodeFile(mediaFile.getAbsolutePath());
int height = (myBitmap.getHeight() * 512 / myBitmap.getWidth());
Bitmap scale = Bitmap.createScaledBitmap(myBitmap, 512, height, true);
int rotate = 0;
try {
exif = new ExifInterface(mediaFile.getAbsolutePath());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
switch (orientation) {
case ExifInterface.ORIENTATION_NORMAL:
rotate = 0;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
}
Matrix matrix = new Matrix();
matrix.postRotate(rotate);
Bitmap rotateBitmap = Bitmap.createBitmap(scale, 0, 0, scale.getWidth(),
scale.getHeight(), matrix, true);
displayImage.setImageBitmap(rotateBitmap);
}
}
public static boolean isImage(String str) {
boolean temp = false;
String[] arr = { ".jpeg", ".jpg", ".png", ".bmp", ".gif" };
for (int i = 0; i < arr.length; i++) {
temp = str.endsWith(arr[i]);
if (temp) {
break;
}
}
return temp;
}
We are developing an Android application and using ZXing for scanning QR codes. Is there a way to get full frame of decoded QR code using ZXing ?
Following method is added to PlanarYUVLuminanceSource class.
public void saveFrame() {
try {
YuvImage image = new YuvImage(yuvData, ImageFormat.NV21, getWidth(), getHeight(), null);
File file = new File(Environment.getExternalStorageDirectory().getPath() + "/frame.jpeg");
FileOutputStream fos = new FileOutputStream(file);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 100, fos);
} catch (FileNotFoundException e) {
Log.e(TAG, "FileNotFoundException!");
}
}
Called within DecodeHandler class as below,
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get()
.buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP,
source.renderCroppedGreyscaleBitmap());
source.saveFrame();
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Here is result image,
Finally I found a solution. It is based on renderCroppedGreyscaleBitmap() method of PlanarYUVLuminanceSource class. Here is how I changed decode() method.
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
// Grab & save frame
Bitmap wholeBmp = renderGrayScaleBitmap(data, width, height);
if(wholeBmp != null)
saveBitmap(wholeBmp, "frame.png");
else
Log.e(TAG, "Bitmap of frame is empty!");
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap());
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Create bitmap from decode data
private Bitmap renderGrayScaleBitmap(byte[] data, int width, int height) {
int[] pixels = new int[width * height];
int inputOffset = width;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int grey = data[inputOffset + x] & 0xff;
pixels[outputOffset + x] = 0xFF000000 | (grey * 0x00010101);
}
inputOffset += width;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
Save bitmap to sdcard
private void saveBitmap(Bitmap bmp, String name) {
FileOutputStream out = null;
try {
String filename = Environment.getExternalStorageDirectory().toString() + "/" + name;
Log.i(TAG, "writtenPath=" + filename);
out = new FileOutputStream(filename);
bmp.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I made a simple camera application with android. But there is one problem that I cannot solve. My camera app's surfaceView is in portrait mode , however, when I take the image and save the image in the folder. It's not saved in a portrait mode. Since, I'm new to android development I would need some help. I've put some code below.
PictureCallback jpegCallback = new PictureCallback() {
#SuppressWarnings("deprecation")
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
Calendar c = Calendar.getInstance();
File videoDirectory = new File(path);
if (!videoDirectory.exists()) {
videoDirectory.mkdirs();
}
try {
// Write to SD Card
outStream = new FileOutputStream(path + c.getTime().getSeconds() + ".jpg");
outStream.write(data);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Bitmap realImage;
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 5;
options.inPurgeable=true; //Tell to gc that whether it needs free memory, the Bitmap can be cleared
options.inInputShareable=true; //Which kind of reference will be used to recover the Bitmap data after being clear, when it will be used in the future
realImage = BitmapFactory.decodeByteArray(data,0,data.length,options);
ExifInterface exif = null;
try {
exif = new ExifInterface(path + c.getTime().getSeconds()
+ ".jpg");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
Log.d("EXIF value",
exif.getAttribute(ExifInterface.TAG_ORIENTATION));
if (exif.getAttribute(ExifInterface.TAG_ORIENTATION)
.equalsIgnoreCase("1")) {
realImage = rotate(realImage, 90);
} else if (exif.getAttribute(ExifInterface.TAG_ORIENTATION)
.equalsIgnoreCase("8")) {
realImage = rotate(realImage, 90);
} else if (exif.getAttribute(ExifInterface.TAG_ORIENTATION)
.equalsIgnoreCase("3")) {
realImage = rotate(realImage, 90);
} else if (exif.getAttribute(ExifInterface.TAG_ORIENTATION)
.equalsIgnoreCase("0")) {
realImage = rotate(realImage, 90);
}
} catch (Exception e) {
}
image.setImageBitmap(realImage);
fotoButton.setClickable(true);
camera.startPreview();
progressLayout.setVisibility(View.GONE);
exitButton.setClickable(true);
}
};
public static Bitmap rotate(Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(),
source.getHeight(), matrix, false);
}
try below methods some times some device have orientantion roated to 90 or 180 or 270 here is the methods try this to get image in normal mode if its gets rotated:
public static Bitmap RotateImage(String imagePath, boolean rotateImage) {
Matrix imageMatrix = new Matrix();
if (rotateImage) {
int rotationNeeded = getImageRotationDegrees(imagePath);
if (rotationNeeded != 0) {
imageMatrix.postRotate(rotationNeeded);
}
}
public static int getImageRotationDegrees(String imagePath) {
int currentRotation = getImageOrientation(imagePath);
switch (currentRotation) {
case ExifInterface.ORIENTATION_ROTATE_90:
return 90;
case ExifInterface.ORIENTATION_ROTATE_180:
return 180;
case ExifInterface.ORIENTATION_ROTATE_270:
return 270;
}
return 0;
}
public static int getImageOrientation(String imagePath) {
ExifInterface exifInterface;
try {
exifInterface = new ExifInterface(imagePath);
} catch (IOException e) {
return ExifInterface.ORIENTATION_UNDEFINED;
}
return exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
}