Accessing Full Frame Using ZXing on Android - android

We are developing an Android application and using ZXing for scanning QR codes. Is there a way to get full frame of decoded QR code using ZXing ?
Following method is added to PlanarYUVLuminanceSource class.
public void saveFrame() {
try {
YuvImage image = new YuvImage(yuvData, ImageFormat.NV21, getWidth(), getHeight(), null);
File file = new File(Environment.getExternalStorageDirectory().getPath() + "/frame.jpeg");
FileOutputStream fos = new FileOutputStream(file);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 100, fos);
} catch (FileNotFoundException e) {
Log.e(TAG, "FileNotFoundException!");
}
}
Called within DecodeHandler class as below,
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get()
.buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP,
source.renderCroppedGreyscaleBitmap());
source.saveFrame();
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Here is result image,

Finally I found a solution. It is based on renderCroppedGreyscaleBitmap() method of PlanarYUVLuminanceSource class. Here is how I changed decode() method.
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
// Grab & save frame
Bitmap wholeBmp = renderGrayScaleBitmap(data, width, height);
if(wholeBmp != null)
saveBitmap(wholeBmp, "frame.png");
else
Log.e(TAG, "Bitmap of frame is empty!");
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap());
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Create bitmap from decode data
private Bitmap renderGrayScaleBitmap(byte[] data, int width, int height) {
int[] pixels = new int[width * height];
int inputOffset = width;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int grey = data[inputOffset + x] & 0xff;
pixels[outputOffset + x] = 0xFF000000 | (grey * 0x00010101);
}
inputOffset += width;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
Save bitmap to sdcard
private void saveBitmap(Bitmap bmp, String name) {
FileOutputStream out = null;
try {
String filename = Environment.getExternalStorageDirectory().toString() + "/" + name;
Log.i(TAG, "writtenPath=" + filename);
out = new FileOutputStream(filename);
bmp.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

Related

Android create PDF from expandable List View content

I was using RecyclerView to create PDF previously. I want to add share and print functionality for which I want to create PDF with content in it. The method I was using was
public static void generatePDF(RecyclerView view, boolean isShareTrue, Context context, boolean isWeekly) {
RecyclerView.Adapter adapter = view.getAdapter();
Bitmap bigBitmap = null;
if (adapter != null) {
int size = adapter.getItemCount();
int height = 0;
Paint paint = new Paint();
int iHeight = 0;
final int maxMemory = (int) (Runtime.getRuntime().maxMemory() / 1024);
// Use 1/8th of the available memory for this memory cache.
final int cacheSize = maxMemory / 8;
LruCache<String, Bitmap> bitmaCache = new LruCache<>(cacheSize);
for (int i = 0; i < size; i++) {
RecyclerView.ViewHolder holder = adapter.createViewHolder(view, adapter.getItemViewType(i));
adapter.onBindViewHolder(holder, i);
holder.itemView.measure(View.MeasureSpec.makeMeasureSpec(view.getWidth(), View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
holder.itemView.layout(0, 0, holder.itemView.getMeasuredWidth(), holder.itemView.getMeasuredHeight());
holder.itemView.setDrawingCacheEnabled(true);
holder.itemView.buildDrawingCache();
Bitmap drawingCache = holder.itemView.getDrawingCache();
if (drawingCache != null) {
bitmaCache.put(String.valueOf(i), drawingCache);
}
height += holder.itemView.getMeasuredHeight();
}
bigBitmap = Bitmap.createBitmap(view.getMeasuredWidth(), height, Bitmap.Config.ARGB_8888);
Canvas bigCanvas = new Canvas(bigBitmap);
bigCanvas.drawColor(Color.WHITE);
Document document = new Document(PageSize.A4);
File file;
if (isWeekly)
file = new File(Environment.getExternalStorageDirectory(), "Weekly_list_" + AppPreferences.INSTANCE.getUserId() + ".pdf");
else
file = new File(Environment.getExternalStorageDirectory(), "Open_Cycle_List_" + AppPreferences.INSTANCE.getUserId() + ".pdf");
try {
PdfWriter.getInstance(document, new FileOutputStream(file));
} catch (DocumentException | FileNotFoundException e) {
e.printStackTrace();
}
for (int i = 0; i < size; i++) {
try {
//Adding the content to the document
Bitmap bmp = bitmaCache.get(String.valueOf(i));
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.PNG, 100, stream);
Image image = Image.getInstance(stream.toByteArray());
float scaler = ((document.getPageSize().getWidth() - document.leftMargin()
- document.rightMargin() - 0) / image.getWidth()) * 100; // 0 means you have no indentation. If you have any, change it.
image.scalePercent(scaler);
image.setAlignment(com.itextpdf.text.Image.ALIGN_CENTER | com.itextpdf.text.Image.ALIGN_TOP);
if (!document.isOpen()) {
document.open();
}
document.add(image);
} catch (Exception ex) {
Log.e("TAG-ORDER PRINT ERROR", ex.getMessage());
}
}
if (document.isOpen()) {
document.close();
}
if (isShareTrue) {
Intent intentShareFile = new Intent(Intent.ACTION_SEND);
if (file.exists()) {
Uri pdfUri;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
pdfUri = FileProvider.getUriForFile(context, context.getPackageName() + ".provider", file);
} else {
pdfUri = Uri.fromFile(file);
}
intentShareFile.setType("application/pdf");
intentShareFile.putExtra(Intent.EXTRA_STREAM, pdfUri);
intentShareFile.putExtra(Intent.EXTRA_SUBJECT, "Sharing File...");
intentShareFile.putExtra(Intent.EXTRA_TEXT, "Sharing File...");
context.startActivity(Intent.createChooser(intentShareFile, "Share File"));
}
} else {
Toast.makeText(context, "PDF Generated successfully", Toast.LENGTH_LONG).show();
}
}
}
Now I have replaced recycler view with ExpandableListView and my print and share functionality is not working.
I want to create PDF with ExpandableListView content.

How to take picture with camera using ARCore

ARCore camera doesn't seem to support takePicture.
https://developers.google.com/ar/reference/java/com/google/ar/core/Camera
Anyone know how I can take pictures with ARCore?
I am assuming you mean a picture of what the camera is seeing and the AR objects. At a high level you need to get permission to write to external storage to save the picture, copy the frame from OpenGL and then save it as a png (for example). Here are the specifics:
Add the WRITE_EXTERNAL_STORAGE permission to the AndroidManifest.xml
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
Then change CameraPermissionHelper to iterate over both the CAMERA and WRITE_EXTERNAL_STORAGE permissions to make sure they are granted
private static final String REQUIRED_PERMISSIONS[] = {
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA
};
/**
* Check to see we have the necessary permissions for this app.
*/
public static boolean hasCameraPermission(Activity activity) {
for (String p : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(activity, p) !=
PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Check to see we have the necessary permissions for this app,
* and ask for them if we don't.
*/
public static void requestCameraPermission(Activity activity) {
ActivityCompat.requestPermissions(activity, REQUIRED_PERMISSIONS,
CAMERA_PERMISSION_CODE);
}
/**
* Check to see if we need to show the rationale for this permission.
*/
public static boolean shouldShowRequestPermissionRationale(Activity activity) {
for (String p : REQUIRED_PERMISSIONS) {
if (ActivityCompat.shouldShowRequestPermissionRationale(activity, p)) {
return true;
}
}
return false;
}
Next, add a couple fields to HelloARActivity to keep track of the dimensions of the frame and boolean to indicate when to save the picture.
private int mWidth;
private int mHeight;
private boolean capturePicture = false;
Set the width and height in onSurfaceChanged()
public void onSurfaceChanged(GL10 gl, int width, int height) {
mDisplayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mWidth = width;
mHeight = height;
}
At the bottom of onDrawFrame(), add a check for the capture flag. This should be done after all the other drawing happens.
if (capturePicture) {
capturePicture = false;
SavePicture();
}
Then add the onClick method for a button to take the picture, and the actual code to save the image:
public void onSavePicture(View view) {
// Here just a set a flag so we can copy
// the image from the onDrawFrame() method.
// This is required for OpenGL so we are on the rendering thread.
this.capturePicture = true;
}
/**
* Call from the GLThread to save a picture of the current frame.
*/
public void SavePicture() throws IOException {
int pixelData[] = new int[mWidth * mHeight];
// Read the pixels from the current GL frame.
IntBuffer buf = IntBuffer.wrap(pixelData);
buf.position(0);
GLES20.glReadPixels(0, 0, mWidth, mHeight,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
// Create a file in the Pictures/HelloAR album.
final File out = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES) + "/HelloAR", "Img" +
Long.toHexString(System.currentTimeMillis()) + ".png");
// Make sure the directory exists
if (!out.getParentFile().exists()) {
out.getParentFile().mkdirs();
}
// Convert the pixel data from RGBA to what Android wants, ARGB.
int bitmapData[] = new int[pixelData.length];
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
int p = pixelData[i * mWidth + j];
int b = (p & 0x00ff0000) >> 16;
int r = (p & 0x000000ff) << 16;
int ga = p & 0xff00ff00;
bitmapData[(mHeight - i - 1) * mWidth + j] = ga | r | b;
}
}
// Create a bitmap.
Bitmap bmp = Bitmap.createBitmap(bitmapData,
mWidth, mHeight, Bitmap.Config.ARGB_8888);
// Write it to disk.
FileOutputStream fos = new FileOutputStream(out);
bmp.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.flush();
fos.close();
runOnUiThread(new Runnable() {
#Override
public void run() {
showSnackbarMessage("Wrote " + out.getName(), false);
}
});
}
Last step is to add the button to the end of activity_main.xml layout
<Button
android:id="#+id/fboRecord_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignStart="#+id/surfaceview"
android:layout_alignTop="#+id/surfaceview"
android:onClick="onSavePicture"
android:text="Snap"
tools:ignore="OnClick"/>
Acquiring the image buffer
In the latest ARCore SDK, we get access to the image buffer via public class Frame. Below is the sample code which gives us access to the image buffer.
private void onSceneUpdate(FrameTime frameTime) {
try {
Frame currentFrame = sceneView.getArFrame();
Image currentImage = currentFrame.acquireCameraImage();
int imageFormat = currentImage.getFormat();
if (imageFormat == ImageFormat.YUV_420_888) {
Log.d("ImageFormat", "Image format is YUV_420_888");
}
}
onSceneUpdate() will be called for every update if you register it to setOnUpdateListener() callback. Image will be in YUV_420_888 format, but it will have full Field of view of native high resolution camera.
Also do not forget to close resources of received image by calling currentImage.close(). Otherwise you will receive a ResourceExhaustedException on the next run of onSceneUpdate.
Writing the acquired image buffer to a file
Following implementation converts YUV buffer to compressed JPEG byte array
private static byte[] NV21toJPEG(byte[] nv21, int width, int height) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);
return out.toByteArray();
}
public static void WriteImageInformation(Image image, String path) {
byte[] data = null;
data = NV21toJPEG(YUV_420_888toNV21(image),
image.getWidth(), image.getHeight());
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(path));
bos.write(data);
bos.flush();
bos.close();
}
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}
Sorry for answering late.You can use code to click picture in ARCore:
private String generateFilename() {
String date =
new SimpleDateFormat("yyyyMMddHHmmss", java.util.Locale.getDefault()).format(new Date());
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES) + File.separator + "Sceneform/" + date + "_screenshot.jpg";
}
private void saveBitmapToDisk(Bitmap bitmap, String filename) throws IOException {
File out = new File(filename);
if (!out.getParentFile().exists()) {
out.getParentFile().mkdirs();
}
try (FileOutputStream outputStream = new FileOutputStream(filename);
ByteArrayOutputStream outputData = new ByteArrayOutputStream()) {
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputData);
outputData.writeTo(outputStream);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
throw new IOException("Failed to save bitmap to disk", ex);
}
}
private void takePhoto() {
final String filename = generateFilename();
/*ArSceneView view = fragment.getArSceneView();*/
mSurfaceView = findViewById(R.id.surfaceview);
// Create a bitmap the size of the scene view.
final Bitmap bitmap = Bitmap.createBitmap(mSurfaceView.getWidth(), mSurfaceView.getHeight(),
Bitmap.Config.ARGB_8888);
// Create a handler thread to offload the processing of the image.
final HandlerThread handlerThread = new HandlerThread("PixelCopier");
handlerThread.start();
// Make the request to copy.
PixelCopy.request(mSurfaceView, bitmap, (copyResult) -> {
if (copyResult == PixelCopy.SUCCESS) {
try {
saveBitmapToDisk(bitmap, filename);
} catch (IOException e) {
Toast toast = Toast.makeText(DrawAR.this, e.toString(),
Toast.LENGTH_LONG);
toast.show();
return;
}
Snackbar snackbar = Snackbar.make(findViewById(android.R.id.content),
"Photo saved", Snackbar.LENGTH_LONG);
snackbar.setAction("Open in Photos", v -> {
File photoFile = new File(filename);
Uri photoURI = FileProvider.getUriForFile(DrawAR.this,
DrawAR.this.getPackageName() + ".ar.codelab.name.provider",
photoFile);
Intent intent = new Intent(Intent.ACTION_VIEW, photoURI);
intent.setDataAndType(photoURI, "image/*");
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
startActivity(intent);
});
snackbar.show();
} else {
Log.d("DrawAR", "Failed to copyPixels: " + copyResult);
Toast toast = Toast.makeText(DrawAR.this,
"Failed to copyPixels: " + copyResult, Toast.LENGTH_LONG);
toast.show();
}
handlerThread.quitSafely();
}, new Handler(handlerThread.getLooper()));
}

CropImageView Transaction too large Exception

I am having trouble with a transaction too large exception related to the cropImageView. Anytime I use the cropImageView to crop an image and then use an intent to change the activity, it gives me a Transaction too large exception.
I went on and saw that it may be cause I am passing too much data from one activity to another but even when I took the part of my code that passes a byte array in the intent it still crashes with the transaction too large exception.
Here is my code:
public class TestActivity extends AppCompatActivity {
private CropImageView cropImageView;
private Button cropImage;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
cropImage = (Button) findViewById(R.id.cropImage);
cropImageView = (CropImageView) findViewById(R.id.cropImageView);
Intent intent = getIntent();
byte[] bite = intent.getExtras().getByteArray("imageForCropping");
Bitmap bitmap = getPhoto(bite);
cropImageView.setImageBitmap(bitmap);
cropImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Bitmap bit = cropImageView.getCroppedImage();
byte[] byteArray = getBytes(bit);
Intent intent = new Intent(TestActivity.this, HomeScreenActivity.class);
intent.putExtra("croppedImage", byteArray);
TestActivity.this.startActivity(intent);
}
});
}
public static Bitmap getPhoto(byte[] image) {
return BitmapFactory.decodeByteArray(image, 0, image.length );
}
public static byte[] getBytes(Bitmap image) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.PNG, 100, stream);
return stream.toByteArray();
}
}
I have tried to avoid using this intent to pass the byte array by using a sqlite database but it still crashes with a transaction too large exception. So this led me to believe that CropImageView is causing the problem.
Please let me know if you know what the problem could be.
Thanks in advance!
It's a bug in the cropImageView, it attempts to save the image as its own view state when onSaveInstanceState() is called.
The solution is to save the image out into a file, and call setImageBitmap(null) on the crop image view before calling super.onSaveInstanceState(), then restore the image bitmap when needed from the file.
EDIT: Okay, I'll share my code on this, but it's really really hacky. I was happy that it worked. Also, it's from a camera, so I took the photo and got the bitmap as a byte array.
Please advise caution.
#Override
public void onResume() {
if(cameraPresenter != null) {
cameraPresenter.onResume();
}
if(takenBitmap != null && !takenBitmap.isRecycled()) {
takenPhotoDisplay.setImageBitmap(takenBitmap);
}
}
#Override
public void onViewDestroyed(boolean removedByFlow) {
takenPhotoDisplay.setImageBitmap(null);
}
#Override // called before `Activity super.onSaveInstanceState()`
public void preSaveViewState(Bundle bundle) {
takenPhotoDisplay.setImageBitmap(null);
}
private byte[] takenPhoto;
private Bitmap takenBitmap;
private void restoreBitmap() {
try {
takenPhoto = readByteArrayFromFile("TEMP");
} catch(FileNotFoundException e) {
Log.e(TAG, "No bitmap found to restore");
} catch(IOException e) {
Log.e(TAG, "Could not read bitmap");
}
}
private byte[] readByteArrayFromFile(String fileName)
throws IOException {
Context context = CustomApplication.get();
byte[] result = null;
FileInputStream fileInputStream = null;
try {
fileInputStream = context.openFileInput(fileName);
result = IOUtils.toByteArray(fileInputStream);
} catch(FileNotFoundException e) {
Log.e(TAG, "The file was not found [" + fileName + "]", e);
throw e;
} catch(IOException e) {
Log.e(TAG, "Unable to read data from stream [" + fileName + "]", e);
throw e;
} finally {
if(fileInputStream != null) {
try {
fileInputStream.close();
} catch(IOException e) {
Log.e(TAG, "Could not close.");
}
}
}
return result;
}
private void preserveBitmap() {
if(takenPhoto != null) {
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = CustomApplication.get().openFileOutput("TEMP", Context.MODE_PRIVATE);
fileOutputStream.write(takenPhoto);
} catch(FileNotFoundException e) {
Log.e(TAG, "Could not open temp");
} catch(IOException e) {
Log.e(TAG, "Could not write temp");
} finally {
if(fileOutputStream != null) {
try {
fileOutputStream.close();
} catch(IOException e) {
Log.e(TAG, "Nobody cares!");
}
}
}
}
}
private void destroyCurrentBitmap() {
if(takenBitmap != null) {
takenPhotoDisplay.setImageBitmap(null);
takenBitmap.recycle();
takenBitmap = null;
takenPhoto = null;
Runtime.getRuntime().gc();
}
}
private void handleTakenPhoto(byte[] data) {
CameraView.this.takenPhoto = data;
if(takenPhoto != null) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap bm = BitmapFactory.decodeByteArray(takenPhoto, 0, takenPhoto.length, options);
Log.d(TAG, "The Bitmap has width [" + bm.getWidth() + "] and height [" + bm.getHeight() + "]");
// Setting post rotate to 90
Matrix mtx = new Matrix();
Log.i(TAG, "CURRENT ROTATION ANGLE: [" + getRotationAngle() + "]");
// Rotating Bitmap
mtx.postRotate(getRotationAngle());
// Flipping bitmap
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(getActiveCameraId(), info);
if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mtx.postScale(-1, 1, bm.getWidth() / 2, bm.getHeight() / 2);
}
Runtime.getRuntime().gc();
Bitmap newBitmap = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), mtx, true);
if(bm != newBitmap) {
bm.recycle();
bm = newBitmap; //yes, this is on purpose!
Runtime.getRuntime().gc();
} else {
Log.i(TAG, "The created bitmap is the same as the previous one.");
}
bm = newBitmap; //yes, this is on purpose!
takenBitmap = bm;
if(takenBitmap != null && !takenBitmap.isRecycled()) {
takenPhotoDisplay.setImageBitmap(takenBitmap);
} else {
Log.w(TAG, "The bitmap in [takenBitmap] is recycled!");
takenPhotoDisplay.setImageBitmap(null);
}
} else {
Log.w(TAG, "The taken photo is NULL.");
}
}
//IMAGE ROTATION FIX
public int getRotationAngle(int cameraId) {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
Activity activity = ActivityUtils.getActivity(getContext());
WindowManager windowManager = activity.getWindowManager();
Display defaultDisplay = windowManager.getDefaultDisplay();
int rotation = defaultDisplay.getRotation();
int degrees = 0;
switch(rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
//if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// result = (info.orientation + degrees) % 360;
// result = (360 - result) % 360; // compensate the mirror
//} else { // back-facing
// result = (info.orientation - degrees + 360) % 360;
//}
if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
if(getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
result = (info.orientation - degrees + 540) % 360;
} else {
result = (info.orientation - degrees + 360) % 360;
}
} else {
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
Do not pass bytes of image in intent or anywhere. Save it on disk, pass only the URI, and read image from disk

Bitmap Image Saved to Internal Storage is Corrupt

I want to design an app that generates a QR code and gives the user the possibility to save the generated image to their internal storage only. I successfully generate the bitmap and save it as .PNG image, but when I try to open it from the gallery it appears broken or corrupt.
Below is the code to generate the bitmap and display it on an ImageView(qrCode):
bitmap = encodeAsBitmap(value);
qrCode.setImageBitmap(bitmap);
Bitmap encodeAsBitmap(String str) throws WriterException {
BitMatrix result;
try {
result = new MultiFormatWriter().encode(str,
BarcodeFormat.QR_CODE, WIDTH, WIDTH, null);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int w = result.getWidth();
int h = result.getHeight();
int[] pixels = new int[w * h];
for (int y = 0; y < h; y++) {
int offset = y * w;
for (int x = 0; x < w; x++) {
pixels[offset + x] = result.get(x, y) ? getResources().getColor(R.color.colorBlack) :
getResources().getColor(R.color.colorWhite);
}
}
Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, 500, 0, 0, w, h);
return bitmap;
}
It works perfectly up to this level. The user can then click a button in order to save this image to their device's internal storage, thanks to the below method:
public void onClickSaveCode(View view) {
String title = getResources().getString(R.string.saved_image_title_prepend) + stringDate;
String format = getResources().getString(R.string.saved_image_format);
String directory = getResources().getString(R.string.saved_image_directory);
// Method call to save image
saveImageToInternalStorage(bitmap, directory, title, format);
}
public boolean saveImageToInternalStorage(Bitmap bitmap, String directory, String title, String format) {
ContextWrapper contextWrapper = new ContextWrapper(getApplicationContext());
File imageDirectory = contextWrapper.getDir(directory, Context.MODE_WORLD_READABLE);
File path = new File(imageDirectory, title + format);
try {
FileOutputStream fos = new FileOutputStream(path);
// Use the compress method on the Bitmap object to write image to the OutputStream
bitmap.compress(Bitmap.CompressFormat.PNG, QUALITY, fos);
fos.close();
new SingleMediaScanner(this, path);
Toast.makeText(this, getString(R.string.save_success), Toast.LENGTH_LONG).show();
return true;
} catch (Exception e) {
e.printStackTrace();
Toast.makeText(this, getString(R.string.save_failure), Toast.LENGTH_LONG).show();
return false;
}
}
And finally below is the MediaScannerConnection class to scan for all images saved to the device and display them in the gallery:
public class SingleMediaScanner implements MediaScannerConnectionClient {
private MediaScannerConnection mSC;
private File file;
public SingleMediaScanner(Context context, File f) {
file = f;
mSC = new MediaScannerConnection(context, this);
mSC.connect();
}
#Override
public void onMediaScannerConnected() {
mSC.scanFile(file.getAbsolutePath(), null);
}
#Override
public void onScanCompleted(String path, Uri uri) {
mSC.disconnect();
}
}
The images are saved, yet they appear in the gallery as broken files.
Any help will be greatly appreciated.
string path = Android.OS.Environment.ExternalStorageDirectory.AbsolutePath;
string filePath = System.IO.Path.Combine(path, "compressed.png");
//Bitmap bmp = ((BitmapDrawable)imgV.Drawable).Bitmap;
Bitmap b = newBitmap;
FileStream ms = new FileStream(filePath, FileMode.Create);
//FileOutputStream fos = new FileOutputStream(filePath,true);
await b.CompressAsync(Bitmap.CompressFormat.Png, 100, ms);
ms.Close();
//ByteArrayOutputStream opstream = new ByteArrayOutputStream();
//b.Compress(Bitmap.CompressFormat.Png, 100, opstream);
//byte[] bytArray = opstream.ToByteArray();
Toast.MakeText(Application.Context, "Compressed : " , ToastLength.Short).Show();
imgCompress.SetImageBitmap(b);

Loading image from cache using BitmapFactory.decodeStream() in Android

UPDATES: Even if i don't retrieve images from cache, i tried to retrieve via Drawable where i stored all the 18 images in the "drawable-mdpi" folder. Still, a blank screen was display.
I was able to retrieved images from the server and save the image (.GIF) into the cache. However, when i need to load that image from cache, the image doesn't show up on screen. Here is the codes that does the work:
File cacheDir = context.getCacheDir();
File cacheMap = new File(cacheDir, smallMapImageNames.get(i).toString());
if(cacheMap.exists()){
FileInputStream fis = null;
try {
fis = new FileInputStream(cacheMap);
Bitmap local = BitmapFactory.decodeStream(fis);
puzzle.add(local);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}else{
Drawable smallMap = LoadImageFromWebOperations(mapPiecesURL.get(i).toString());
if(i==0){
height1 = smallMap.getIntrinsicHeight();
width1 = smallMap.getIntrinsicWidth();
}
if (smallMap instanceof BitmapDrawable) {
Bitmap bitmap = ((BitmapDrawable)smallMap).getBitmap();
FileOutputStream fos = null;
try {
cacheMap.createNewFile();
fos = new FileOutputStream(cacheMap);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.flush();
fos.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
puzzle.add(bitmap);
}
}
ArrayList to store the image names: smallMapImageNames (The image names can also be found in the URL)
ArrayList to store the URL of the images: mapPiecesURL
To sum it up i have 2 questions
1) how to load images from cache?
2) regarding the bitmap.compress(), the images from the server is .GIF format but i apply Bitmap.CompressFormat.PNG. So is there going to be any problem with this?
Can anyone please help me with this?
The two functions
private Bitmap getBitMap(Context context) {
// TODO Auto-generated method stub
WifiPositioningServices wifiPositioningServices = new WifiPositioningServices();
String[] mapURLandCalibratedPoint1 = wifiPositioningServices.GetMapURLandCalibratedPoint("ERLab-1_1.GIF","ERLab"); //list of map pieces url in the first 9 pieces
String[] mapURLandCalibratedPoint2 = wifiPositioningServices.GetMapURLandCalibratedPoint("ERLab-4_1.GIF","ERLab"); //list of map pieces url in the last 9 pieces
ArrayList<String> smallMapImageNames = new ArrayList<String>();
ArrayList<String> mapPiecesURL = new ArrayList<String>();
for(int i=0; i<mapURLandCalibratedPoint1.length; i++){
if(mapURLandCalibratedPoint1[i].length()>40){ //image url
int len = mapURLandCalibratedPoint1[i].length();
int subStrLen = len-13;
smallMapImageNames.add(mapURLandCalibratedPoint1[i].substring(subStrLen, len-3)+"JPEG");
mapPiecesURL.add(mapURLandCalibratedPoint1[i]);
}
else{
//perform other task
}
}
for(int i=0; i<mapURLandCalibratedPoint2.length; i++){
if(mapURLandCalibratedPoint2[i].length()>40){ //image url
int len = mapURLandCalibratedPoint2[i].length();
int subStrLen = len-13;
smallMapImageNames.add(mapURLandCalibratedPoint2[i].substring(subStrLen, len-3)+"JPEG");
mapPiecesURL.add(mapURLandCalibratedPoint2[i]);
}
else{
//perform other task
}
}
Bitmap result = Bitmap.createBitmap(1029, 617, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(result);
ArrayList<Bitmap> puzzle = new ArrayList<Bitmap>();
int height1 = 0 ;
int width1 = 0;
File cacheDir = context.getCacheDir();
for(int i=0; i<18; i++){
File cacheMap = new File(cacheDir, smallMapImageNames.get(i).toString());
if(cacheMap.exists()){
//retrieved from cached
try {
FileInputStream fis = new FileInputStream(cacheMap);
Bitmap bitmap = BitmapFactory.decodeStream(fis);
puzzle.add(bitmap);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}else{
//retrieve from server and cached it
Drawable smallMap = LoadImageFromWebOperations(mapPiecesURL.get(i).toString());
if(i==0){
height1 = smallMap.getIntrinsicHeight();
width1 = smallMap.getIntrinsicWidth();
}
if (smallMap instanceof BitmapDrawable) {
Bitmap bitmap = ((BitmapDrawable)smallMap).getBitmap();
FileOutputStream fos = null;
try {
cacheMap.createNewFile();
fos = new FileOutputStream(cacheMap);
bitmap.compress(CompressFormat.JPEG, 100, fos);
fos.flush();
fos.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
puzzle.add(bitmap);
}
}
}
Rect srcRect;
Rect dstRect;
int cnt =0;
for (int j = 0; j < 3; j++) {
int newHeight = height1 * (j % 3);
for (int k = 0; k < 3; k++) {
if (j == 0 && k == 0) {
srcRect = new Rect(0, 0, width1, height1);
dstRect = new Rect(srcRect);
} else {
int newWidth = width1 * k;
srcRect = new Rect(0, 0, width1, height1);
dstRect = new Rect(srcRect);
dstRect.offset(newWidth, newHeight);
}
canvas.drawBitmap(puzzle.get(cnt), srcRect, dstRect,null);
cnt++;
}
}
for(int a=0; a<3; a++){
int newHeight = height1 * (a % 3);
for (int k = 3; k < 6; k++) {
if (a == 0 && k == 0) {
srcRect = new Rect(0, 0, width1*3, height1);
dstRect = new Rect(srcRect);
} else {
int newWidth = width1 * k;
srcRect = new Rect(0, 0, width1, height1);
dstRect = new Rect(srcRect);
dstRect.offset(newWidth, newHeight);
}
canvas.drawBitmap(puzzle.get(cnt), srcRect, dstRect,
null);
cnt++;
}
}
return result;
}
private Drawable LoadImageFromWebOperations(String url) {
// TODO Auto-generated method stub
try
{
InputStream is = (InputStream) new URL(url).getContent();
Drawable d = Drawable.createFromStream(is, "src name");
return d;
}catch (Exception e) {
System.out.println("Exc="+e);
return null;
}
}
I am actually trying to display 18 pieces (3X6) of images to form up a floorplan. So to display the images, i use two for-loop to display it. the two .GIF images, ERLab-1_1.GIF and ERLab-4_1.GIF are the center piece of each group. For example, the first row of would be ERLab-0_0.GIF, ERLab-1_0.GIF, ERLab-2_0.GIF, ERLab-3_0.GIF, ERLab-4_0.GIF, ERLab-5_0.GIF. Second row would be XXX-X_1.GIF and XXX-X_2.GIF for the third row.
Lastly,
Bitmap resultMap = getBitMap(this.getContext());
bmLargeImage = Bitmap.createBitmap(1029 , 617, Bitmap.Config.ARGB_8888);
bmLargeImage = resultMap;
Then in the onDraw function would be drawing the image onto the canvas.
I just solved my own question.
In this line, canvas.drawBitmap(puzzle.get(cnt), srcRect, dstRect,null); within each of the for-loop which i am using it to draw the bitmap onto the canvas, i need to cast the each item in the ArrayList (puzzle) to Bitmap. Only then will the image get display.
I thought that if the ArrayList is definite as such, ArrayList<Bitmap> puzzle = new ArrayList<Bitmap>(); each items in the ArrayList would be of Bitmap type. But isn't that always true?

Categories

Resources