saving Zbar barcode as image - android

I want to use barcode scanner into my application and i am using Zbar library
however, i can scan barcode but i want to save scanned image into sd card.
So far, i can able to capture image and save into SD but when i try to open it i have broken image error and cannot display it.
What am i using is :
private final Camera.PreviewCallback saveImage = new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
mCamera.setPreviewCallback(null);
String path = Environment.getExternalStorageDirectory() + "/DCIM/mcs_" + timeStamp + ".jpg";
FileOutputStream fos = null;
try
{
fos = new FileOutputStream(path);
fos.write(data);
fos.close();
}
catch(Exception e)
{
}
}
};
PreviewCallback previewCb = new PreviewCallback()
{
public void onPreviewFrame(byte[] data, Camera camera)
{
Camera.Parameters parameters = camera.getParameters();
Size size = parameters.getPreviewSize();
Image barcode = new Image(size.width, size.height);
barcode.setData(data);
barcode = barcode.convert("Y800");
int result = scanner.scanImage(barcode);
if (result != 0)
{
mCamera.setPreviewCallback(saveImage);
mCamera.stopPreview();
SymbolSet syms = scanner.getResults();
for (Symbol sym : syms)
{
Intent intent = new Intent(getApplicationContext(), ScanCodeResult.class);
intent.putExtra("timeStamp", timeStamp);
intent.putExtra("result", sym.getData().toString());
//startActivity(intent);
break;
}
}
}
};

#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Size size = camera.getParameters().getPreviewSize(); //获取预览大小
final int w = size.width; //宽度
final int h = size.height;
final YuvImage image = new YuvImage(data, ImageFormat.NV21, w, h, null);
ByteArrayOutputStream os = new ByteArrayOutputStream(data.length);
if(!image.compressToJpeg(new Rect(0, 0, w, h), 100, os)){
return;
}
byte[] tmp = os.toByteArray();
Bitmap bmp = BitmapFactory.decodeByteArray(tmp, 0,tmp.length);
FileHelper fileHelper = new FileHelper();
fileHelper.storeInSD(bmp);
}

Related

Camera Preview Android

I'm developing an Android App that has a Camera Preview Activity. It calls takePicture() every 2 second using a timer and does some processing on the captured image in PictureCallback. From the Android documentation, I learnt that PictureCallback happens in the same thread as Camera.open().
Also, it's recommended to call takePicture() in a separate thread. What's the best way to call StartPreview() after an image is captured?
I would want the processing on each capture to happen on separate threads and the camera preview should continue in the main UI thread. What's the best way to implement this using AsyncTask()?
public class CameraActivity extends AppCompatActivity{
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static String TAG = "Exception";
int viewWidth = 0;
int viewHeight = 0;
private Camera mCamera;
private CameraPreview mPreview;
private ImageView iv;
private RelativeLayout rl;
private Camera.PictureCallback mPicture;
private MRZ_OCR mrz = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
rl = (RelativeLayout) findViewById(R.id.rel_camera);
iv = (ImageView) findViewById(R.id.black_above);
viewWidth = iv.getWidth();
viewHeight = rl.getHeight() - 2 * iv.getHeight();
// Create an instance of Camera
mCamera = getCameraInstance();
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
new Timer().schedule(new TimerTask() {
#Override
public void run() {
mCamera.startPreview();
mrz = new MRZ_OCR();
mrz.execute();
}
}, 4000, 4000);
mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// Crop to get only MRZ
Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);
//Verify if it has MRZ
bm = MRZ.getMRZ(bm);
if (bm != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
createImageFile(byteArray);
Toast.makeText(getApplicationContext(), "Pic Saved", Toast.LENGTH_LONG).show();
}
}
};
}
#Override
protected void onPause() {
super.onPause();
releaseCamera(); // release the camera immediately on pause event
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
private class MRZ_OCR extends AsyncTask<Void, Void, Void>
{
private byte[] data;
#Override
protected Void doInBackground(Void... params) {
mCamera.takePicture(null, null, mPicture);
// Sleep for however long, you could store this in a variable and
// have it updated by a menu item which the user selects.
try {
Thread.sleep(3000); // 3 second preview
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
#Override
protected void onPostExecute(Void result) {
// This returns the preview back to the live camera feed
mCamera.startPreview();
}
}
public static int pxFromDp(final Context context, final float dp) {
return (int) (dp * context.getResources().getDisplayMetrics().density);
}
/**
* A safe way to get an instance of the Camera object.
*/
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private static File getOutputMediaFile(int type)
{
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
private static void createImageFile(byte[] byteArray) {
//create empty image type file
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(byteArray);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
}
I don't know about the api takePicture(),but i think what you need to do is put this code in a separate thread.
Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);
//Verify if it has MRZ
bm = MRZ.getMRZ(bm);
if (bm != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
createImageFile(byteArray);
}
decodeBitmap is a time-consuming process, especially in you app,performed once every 2 seconds. it will blocking the main thread. and why it's recommended to call takePicture() in a separate thread, I think it is the same reason.
You already answered your question. Pass byte[] data to an AsyncTask:
private class PictureConverter extends AsyncTask<Void, Void, Void> {
private byte[] data;
private Camera camera;
public PictureConverter(byte[] _data, Camera _camera) {
data = _data;
camera = _camera;
}
protected Void doInBackground(Void... data) {
Camera.Parameters parameters = camera.getParameters();
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), parameters.getPreviewSize().width, parameters.getPreviewSize().height, null);
yuvImage.compressToJpeg(new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height), 90, out);
byte[] imageBytes = out.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
out.flush();
out.close();
//TODO save the image
return null;
}
protected void onProgressUpdate() {
}
protected void onPostExecute() {
//TODO report that the image got saved
}
}

Android: mPrevCallback to JPG, results in black pictures

I am trying to capture the preview of the camera on a surfaceview ,to save it as a JPEG in the internal memory. I found some code here on this site, that does mostly I want but saves the image to the SD Card. I changed that, and came up with the following code.
Camera.PreviewCallback mPrevCallback = new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame( byte[] data, Camera Cam ) {
//Log.d(TAG, "FRAME");
Camera.Parameters parameters = Cam.getParameters();
int format = parameters.getPreviewFormat();
//Log.d(TAG, "FORMAT:" + format);
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream("/data/data/com.example.max.camtest/files/test"+System.currentTimeMillis()+".jpg");
outStream.write(byt);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
};
The preview is shown on the surfaceview and the mPrevCallback is triggered.It successfully saves pictures that have diffrent sizes (250~500Kb) but they are all black. When I try to capture a picture with the camera.takePicture function is it also black.
What Am I doing wrong? How can I debug this?
Thanks!
Use this intent to take picture
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
File f = new File(android.os.Environment.getExternalStorageDirectory(), AppInfo.getInstance().getCurrentLoginUserInfo().getId()+".jpg");
intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f));
intent.putExtra("return-data", true);
startActivityForResult(intent, 1);
and on Your Activity Result.... Note Bitmap bitmap = getScaledBitmap(uri.getPath(), 200, true); 200 is your max image size.
if(requestCode == 1)
{
String base = Environment.getExternalStorageDirectory().getAbsolutePath().toString();
final String imgPath = base + "/" +AppInfo.getInstance().getCurrentLoginUserInfo().getId()+".jpg";
File file = new File(imgPath);
if (file.exists())
{
Uri uri = Uri.fromFile(file);
Log.d(TAG, "Image Uri path: " + uri.getPath());
Bitmap bitmap = getScaledBitmap(uri.getPath(), 200, true);
}}
This method ll return image bitmap after resizing it-
private Bitmap getScaledBitmap(String imagePath, float maxImageSize, boolean filter) {
FileInputStream in;
BufferedInputStream buf;
try {
in = new FileInputStream(imagePath);
buf = new BufferedInputStream(in);
Bitmap realImage = BitmapFactory.decodeStream(buf);
float ratio = Math.min(
(float) maxImageSize / realImage.getWidth(),
(float) maxImageSize / realImage.getHeight());
int width = Math.round((float) ratio * realImage.getWidth());
int height = Math.round((float) ratio * realImage.getHeight());
Bitmap newBitmap = Bitmap.createScaledBitmap(realImage, width, height, filter);
return newBitmap;
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
Now you have scaled bitmap image.
Hope this ll help you.

Take picture from android programmatically

I am creating a photo editing app and trying to capture a photo from android camera. I use following code onPictureTaken :
#Override
public void onPictureTaken(byte[] data, Camera camera) {
BitmapFactory.Options opt;
opt = new BitmapFactory.Options();
opt.inTempStorage = new byte[16 * 1024];
Parameters parameters = camera.getParameters();
Size size = parameters.getPictureSize();
int height11 = size.height;
int width11 = size.width;
float mb = (width11 * height11) / 1024000;
if (mb > 4f)
opt.inSampleSize = 4;
else if (mb > 3f)
opt.inSampleSize = 2;
Bitmap bitmapPicture = null;
bitmapPicture = BitmapFactory.decodeByteArray(data, 0, data.length,opt);
bitmapPicture=Bitmap.createScaledBitmap(bitmapPicture,(int)CaptureImage.screen_width,(int)CaptureImage.screen_height,false);
if(((CaptureImage)context).cameraType==1){
Matrix matrix = new Matrix();
if(((CaptureImage)context).screenType==0)
matrix.postRotate(180);
bitmapPicture = Bitmap.createBitmap(bitmapPicture, 0, 0,
bitmapPicture.getWidth(), bitmapPicture.getHeight(), matrix,
true);
}
// write captured image in sd card
new File(Constants.IMAGES_PATH).mkdirs();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
photoFile = "Picture_" + date + ".jpg";
File pictureFile = new File(Constants.IMAGES_PATH, photoFile);
imgUri = Uri.fromFile(pictureFile);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
bitmapPicture.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.write(data);
fos.close();
ExifInterface exif = new ExifInterface(pictureFile.getAbsolutePath());
if(((CaptureImage)context).screenType==0){
if(((CaptureImage)context).cameraType==0)
exif.setAttribute(ExifInterface.TAG_ORIENTATION,""+ExifInterface.ORIENTATION_ROTATE_90);
if(((CaptureImage)context).cameraType==1)
exif.setAttribute(ExifInterface.TAG_ORIENTATION,""+ExifInterface.ORIENTATION_ROTATE_90);
}
else{
//if(((CaptureImage)context).cameraType==0)
// exif.setAttribute(ExifInterface.TAG_ORIENTATION,""+ExifInterface.ORIENTATION_ROTATE_270);
// if(((CaptureImage)context).cameraType==1)
//exif.setAttribute(ExifInterface.TAG_ORIENTATION,""+ExifInterface.ORIENTATION_ROTATE_180);
}
exif.saveAttributes();
} catch (IOException exception) {
Log.v("Error", exception.getMessage());
}
Intent intent = new Intent(context, Home.class);
intent.putExtra(Constants.CAPTURED_IMAGE,imgUri);
context.startActivity(intent);
((Activity) context).finish();
}
Above code is working on all phones but on nexus 4 when I use this code image is not getting saved and a blank screen is shown.

Picture Callback (doesn't enter its loop)

I am creating a program that uses twice the takepicture function, like this:
mCamera.takePicture(null, null, mPicture);
But only once it enter the PictureCallback loop:
private PictureCallback mPicture = new PictureCallback(){
// Bitmap readyToGo;
#Override
public void onPictureTaken(byte[] data, Camera camera){
Log.d(TAG, "entrei no picture callback");
//-----OUT OF MEMORY ERROR
pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
mPreview.setDrawingCacheEnabled(true);
mPreview.setDrawingCacheQuality(View.DRAWING_CACHE_QUALITY_AUTO);
BitmapFactory.Options options = new BitmapFactory.Options();
//options.inPurgeable = true;
//options.inInputShareable = true;
options.inJustDecodeBounds = true;
options.inSampleSize = 5;
options.inJustDecodeBounds = false;
Bitmap bitmap = mPreview.getDrawingCache();
//---------------------------------------------------
bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
//combine the two bitmaps!!!!-------------
Log.d("main", "antes overlay");
combination = overlay(bmp, bitmap);
Log.d("main", "depois overlay");
//------------------ROTATION---------------------
if(pictureFile == null)
{
Log.d(TAG, "Error creating media file, check storages permissions. ");
return;
}
try
{
ExifInterface exif = new ExifInterface(pictureFile.getAbsolutePath()); //Since API Level 5
String exifOrientation = exif.getAttribute(ExifInterface.TAG_ORIENTATION);
Log.d("main", "exif orientation= "+exifOrientation);
FileOutputStream fos = new FileOutputStream(pictureFile);
Log.d(TAG, "ALO!!!");
combination.compress(CompressFormat.JPEG, 100, fos);//troquei bitmap por combination
fos.flush();
fos.close();
//------------------------------
clearBitmap(combination);
clearBitmap(bmp);
//------------------------------
}
catch(FileNotFoundException e)
{
Log.d(TAG, "File not found: "+e.getMessage());
}
catch(IOException e)
{
Log.d(TAG, "Error accessing file: "+e.getMessage());
}
}
};
Does anyobody know what happen, can I call it twice?
call thread to pause for 2 seconds and then recall it
or You can use your above code with this picture callback
public void takeSnapPhoto() {
camera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
int format = parameters.getPreviewFormat();
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
try {
// Write to SD Card
File file = createFileInSDCard(FOLDER_PATH, "Image_"+System.currentTimeMillis()+".jpg");
//Uri uriSavedImage = Uri.fromFile(file);
outStream = new FileOutputStream(file);
outStream.write(byt);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
}); }

How can I take a picture with an .png image on it

I've created an camera application which has an image on camera preview. This is image is a hat, for example, and I want to take an photo of an person with the hat image over his head. This is what I've done:
mFrameLayout = new FrameLayout(this);
mPreview = new Preview(this);
mFrameLayout.addView(mPreview);
mImage = new ImageView(this);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(100, 100);
layoutParams.leftMargin = 200;
layoutParams.topMargin = 100;
mImage.setLayoutParams(layoutParams);
mImage.setImageResource(R.drawable.gorro);
mFrameLayout.addView(mImage);
mFrameLayout.setOnClickListener(this);
setContentView(mFrameLayout);
Preview Class:
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
Preview(Context context) {
super(context);
mSurfaceView = new SurfaceView(context);
addView(mSurfaceView);
}
And this class has all the surface methods implemented.
When I touch on any place on the Camera, the photo is taken. Here's the code:
public void onClick(View arg0) {
Toast.makeText(this, "Taken!", Toast.LENGTH_LONG).show();
mCamera.takePicture(null, mPictureCallback, mPictureCallback);
}
Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] imageData, Camera c) {
if (imageData != null) {
String root = Environment.getExternalStorageDirectory()
.toString();
File myDir = new File(root + "/saved_images2");
myDir.mkdirs();
Random generator = new Random();
int n = 10000;
n = generator.nextInt(n);
String fname = "Image-" + n + ".jpg";
File file = new File(myDir, fname);
if (file.exists())
file.delete();
try {
FileOutputStream out = new FileOutputStream(file);
out.write(imageData);
out.flush();
out.close();
Log.d("teste", "onPictureTaken - wrote bytes: "
+ imageData.length);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
The problem is when I take an photo with the .png image, the photo is saved without it.
How can I also save the .png image when photo is taken ?
You have to convert your imageData to Bitmap in PictureCallBack and then use a Canvas with method drawBitmap to draw your hat bitmap onto the captured image from Camera. Finally you can save result bitmap to a file.
So:
Bitmap cameraBitmap = BitmapFactory.decodeByteArray(imageData, 0, imageData.length);
Canvas canv = new Canvas(cameraBitmap );
canv.drawBitmap(.... your hat image);

Categories

Resources