How to merge multiple images into single image like matrix? - android

I am working on one application for it I have multiple images and I want to create one image from it. I use the below code for it but the output of the single image is only vertical format.
String sdPath = Environment.getExternalStorageDirectory().getPath()
+ "/MERGE/";
BitmapFactory.Options options;
Bitmap bitmap;
ArrayList<Bitmap> myBitmapList = new ArrayList<Bitmap>();
int vWidth = 0;
int vHeight = 0;
for (int i = 1; i <= 40; i++) {
try {
options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
bitmap = BitmapFactory.decodeFile(sdPath + "img" + i + ".jpg",
options);
vWidth = bitmap.getWidth() > vWidth ? bitmap.getWidth()
: vWidth;
vHeight += bitmap.getHeight();
myBitmapList.add(bitmap);
} catch (Exception e) {
e.printStackTrace();
} finally {
options = null;
bitmap = null;
System.gc();
Runtime.getRuntime().totalMemory();
Runtime.getRuntime().freeMemory();
}
}
Bitmap vTargetBitmap = Bitmap.createBitmap(vWidth, vHeight,
Bitmap.Config.ARGB_8888);
Canvas vCanvas = new Canvas(vTargetBitmap);
int vInsertY = 0;
for (int i = 1; i < 40; i++) {
vCanvas.drawBitmap(myBitmapList.get(i), (float) 0f,
(float) vInsertY, null);
vInsertY += myBitmapList.get(i).getHeight();
}
String tmpImg = String.valueOf(System.currentTimeMillis()) + ".png";
OutputStream os = null;
try {
os = new FileOutputStream(Environment.getExternalStorageDirectory()
.getPath() + "/" + tmpImg);
vTargetBitmap.compress(CompressFormat.PNG, 50, os);
} catch (IOException e) {
Log.e("combineImages", "problem combining images", e);
}
Toast.makeText(this, "Done", Toast.LENGTH_LONG).show();
The actual output I want is something like below image.
Actual output I want is something like this

private Bitmap mergeMultiple(ArrayList<Bitmap> parts) {
Bitmap result = Bitmap.createBitmap(parts.get(0).getWidth() * 2, parts.get(0).getHeight() * 2, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(result);
Paint paint = new Paint();
for (int i = 0; i < parts.size(); i++) {
canvas.drawBitmap(parts.get(i), parts.get(i).getWidth() * (i % 2), parts.get(i).getHeight() * (i / 2), paint);
}
return result;
}

Related

How to convert string to Barcode

How could I generate the Barcode in Android,
I have been did it with this lib com.google.zxing:core:3.2.1, but It only can make QRCode not Barcode, and I have tried to modify the code be like
Bitmap TextToImageEncode(String Value) throws WriterException {
BitMatrix bitMatrix;
try {
bitMatrix = new MultiFormatWriter().encode(
Value,
BarcodeFormat.DATA_MATRIX.QR_CODE,
QRcodeWidth, QRcodeWidth, null
);
} catch (IllegalArgumentException Illegalargumentexception) {
return null;
}
int bitMatrixWidth = bitMatrix.getWidth();
int bitMatrixHeight = bitMatrix.getHeight();
int[] pixels = new int[bitMatrixWidth * bitMatrixHeight];
for (int y = 0; y < bitMatrixHeight; y++) {
int offset = y * bitMatrixWidth;
for (int x = 0; x < bitMatrixWidth; x++) {
pixels[offset + x] = bitMatrix.get(x, y) ?
getResources().getColor(R.color.black):getResources().getColor(R.color.white);
}
}
Bitmap bitmap = Bitmap.createBitmap(bitMatrixWidth, bitMatrixHeight, Bitmap.Config.ARGB_4444);
bitmap.setPixels(pixels, 0, 500, 0, 0, bitMatrixWidth, bitMatrixHeight);
return bitmap;
}
how to make it, please let me know,
Thanks
Try this
compile 'com.google.zxing:core:3.2.1'
compile 'com.journeyapps:zxing-android-embedded:3.2.0#aar'
String text="" // Whatever you need to encode in the QR code
MultiFormatWriter multiFormatWriter = new MultiFormatWriter();
try {
BitMatrix bitMatrix = multiFormatWriter.encode(text, BarcodeFormat.QR_CODE,200,200);
BarcodeEncoder barcodeEncoder = new BarcodeEncoder();
Bitmap bitmap = barcodeEncoder.createBitmap(bitMatrix);
imageView.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
UPDATE
if its not help you try this
https://github.com/journeyapps/zxing-android-embedded
Try this one :
In build:gradle:
implementation 'com.google.zxing:core:3.2.1'
implementation 'com.journeyapps:zxing-android-embedded:3.2.0#aar'
And then write your own code like below.
public class DemoActivity extends AppCompatActivity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout l = new LinearLayout(this);
l.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT));
l.setOrientation(LinearLayout.VERTICAL);
setContentView(l);
// barcode data
String barcode_data = "123456viralpatel";
// barcode image
Bitmap bitmap = null;
ImageView iv = new ImageView(this);
try {
bitmap = encodeAsBitmap(barcode_data, BarcodeFormat.CODE_128, 600, 300);
iv.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
l.addView(iv);
//barcode text
TextView tv = new TextView(this);
tv.setGravity(Gravity.CENTER_HORIZONTAL);
tv.setText(barcode_data);
l.addView(tv);
}
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
Bitmap encodeAsBitmap(String contents, BarcodeFormat format, int img_width, int img_height) throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contentsToEncode, format, img_width, img_height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
}

zxing android qrcode generator

i included image of my stacktrace
MultiFormatWriter multiFormatWriter = new
MultiFormatWriter();
try {
BitMatrix bitMatrix = multiFormatWriter.encode(uid,
BarcodeFormat.QR_CODE,200,200);----55
BarcodeEncoder barcodeEncoder = new BarcodeEncoder();
Bitmap bitmap = barcodeEncoder.createBitmap(bitMatrix);
imageView.setImageBitmap(Bitmap.createBitmap(bitmap));
} catch (WriterException e) {
e.printStackTrace();
}
im getting a null pointer exeption error at this point 55 .I used zxing library but im unable to perform required operation please help me
Try this
public static Bitmap encodeStringToBitmap(String contents) throws WriterException {
//Null check, just b/c
if (contents == null) {
return null;
}
Map<EncodeHintType, Object> hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, "UTF-8");
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result = writer.encode(contents, BarcodeFormat.PDF_417, 700, 900, hints);
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? 0xFF000000 : 0xFFFFFFFF;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
For more trial
visit Java Code Examples for com.google.zxing.MultiFormatWriter

Rotate bitmap image

Im taking a screenshot of my app and try to post it on facebook using the facebook SDK. But as the ShareDialog appears with the Image, it´s upside down.. So I need to re-rotate it.
This is how I create the image:
private void saveScreenshot() {
try{
FileHandle fh;
do{
fh = new FileHandle(Gdx.files.getLocalStoragePath() + "stoneIMG" + counter++ + ".png");
}while(fh.exists());
Pixmap pixmap = getScreenshot(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), false);
PixmapIO.writePNG(fh, pixmap);
pixmap.dispose();
System.out.println(fh.toString());
}catch(Exception e) {
}
}
And here I fetch it:
private Pixmap getScreenshot(int x, int y, int w, int h, boolean yDown){
final Pixmap pixmap = ScreenUtils.getFrameBufferPixmap(x, y, w, h);
if(yDown) {
ByteBuffer pixels = pixmap.getPixels();
int numBytes = w * h * 4;
byte[] lines = new byte[numBytes];
int numBytesPerLine = w * 4;
for (int i = 0; i < h; i++) {
pixels.position((h - i - 1) * numBytesPerLine);
pixels.get(lines, i * numBytesPerLine, numBytesPerLine);
}
pixels.clear();
pixels.put(lines);
}
return pixmap;
}
Then I try to share the photo:
public void sharePhoto() {
String filePath = Gdx.files.getLocalStoragePath() + "stoneIMG" + counter + ".png";
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
If you want to rotate a bitmap by 180 degrees you can use this code:
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
Matrix matrix = new Matrix();
matrix.postRotate(180);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap , 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);

Android - Get the Mask image X,Y coordinates with in overlay after zooming,Rotation,Moving

I have an overlay with one mask-able transparent area with the
x,y,width,height prams. i am taking one image from gallery and
placing that selected image at the Given x,y coordinates. This placing
is done in frame layout. Now i am able to rotate, zoom, move with in
that mask area. after i fitted into that mask are i want to get the
current x,y coordinates of the image with in the mask so that for
combining i am combining the image using canvas based on the x,y
position.presently i am taking the values using event.getx(), event.gety() in on touch .
if i draw using those positions mask image is not drawing at the correct
position after i zooming or moving
This is my code. Please help me if anyone have the idea.
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_crop);
mSelectedVersion = getIntent().getExtras().getInt(
CROP_VERSION_SELECTED_KEY, -1);
Object overLayPath = getIntent().getExtras().containsKey("overLayPath") ? getIntent()
.getExtras().get("overLayPath") : null;
className = getIntent().getExtras().containsKey("class") ? (String) getIntent()
.getExtras().get("class") : null;
pageCount = (int) (getIntent().getExtras().containsKey("pageCount") ? getIntent()
.getExtras().getInt("pageCount") : 0);
File file = new File(overLayPath.toString());
String imgFile = (String) getIntent().getExtras().get("bitmap");
metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mScreenHeight = metrics.heightPixels;
mScreenWidth = metrics.widthPixels;
if (photoImg != null) {
photoImg.recycle();
photoImg = null;
}
if (className.equalsIgnoreCase("Fb_List")) {
if (imgFile != null && !imgFile.equalsIgnoreCase("")) {
File imageFile = new File(imgFile);
photoImg = BitmapFactory
.decodeFile(imageFile.getAbsolutePath());
}
} else {
if (imgFile != null && !imgFile.equalsIgnoreCase("")) {
Uri selectedImageURI = Uri.parse(imgFile);
InputStream is;
try {
is = getContentResolver().openInputStream(selectedImageURI);
if (photoImg != null) {
photoImg.recycle();
photoImg = null;
}
photoImg = decodeSampledBitmapFromResource(is,
(int) (320 * metrics.density),
(int) (416 * metrics.density));
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
mImg = (ImageView) findViewById(R.id.cp_img);
photoImg = createMaskImage(photoImg, mImg);
if (!file.exists()) {
bitmap = BitmapFactory.decodeResource(getResources(),
R.drawable.transparentoverlay);
} else {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
bitmap = BitmapFactory.decodeFile(overLayPath.toString(), options);
}
BitmapDrawable drawable = (BitmapDrawable) mImg.getDrawable();
if (drawable != null && drawable.getBitmap() != null) {
drawable.getBitmap().recycle();
mImg.setImageDrawable(null);
}
mTemplateImg = (ImageView) findViewById(R.id.cp_face_template);
mTemplateImg.setImageBitmap(bitmap);
mImg.setOnTouchListener(this);
if (mScreenWidth > 600 && mScreenHeight > 1024) {
bitmap = Bitmap.createScaledBitmap(bitmap,
(int) (320 * metrics.density),
(int) (416 * metrics.density), true);
mTemplateImg.setImageBitmap(bitmap);
} else if (mScreenWidth > 460 && mScreenHeight > 720) {
bitmap = Bitmap.createScaledBitmap(bitmap,
(int) (320 * metrics.density),
(int) (416 * metrics.density), true);
mTemplateImg.setImageBitmap(bitmap);
}
mImg.setImageBitmap(photoImg);
mMatrix.postScale(mScaleFactor, mScaleFactor);
mImg.setImageMatrix(mMatrix);
// Setup Gesture Detectors
mScaleDetector = new ScaleGestureDetector(getApplicationContext(),
new ScaleListener());
mRotateDetector = new RotateGestureDetector(getApplicationContext(),
new RotateListener());
mMoveDetector = new MoveGestureDetector(MaskCropActivity.this,
new MoveListener());
}
private Bitmap createMaskImage(Bitmap photoImg2, ImageView maskView) {
if (photoImg2 != null) {
String template = Common.getPrefrenceValue(
Constants.TEMPLATE_OBJECT, MaskCropActivity.this);
JSONObject templateObject = null;
if (template != null && !template.equalsIgnoreCase("")) {
try {
templateObject = new JSONObject(template);
JSONArray pageDetails = templateObject
.getJSONArray("pageDetails");
JSONObject pageObject = pageDetails
.getJSONObject(pageCount - 1);
JSONArray maskImages;
maskImages = pageObject.getJSONArray("maskImages");
if (maskImages != null && maskImages.length() > 0) {
JSONObject maskObject = maskImages.getJSONObject(0);
String coOrdinates = maskObject.has("coOrdinates") ? maskObject
.getString("coOrdinates") : "";
if (coOrdinates.contains(",")
&& coOrdinates.split(",").length == 4) {
int x = Integer.parseInt(coOrdinates.split(",")[0]);
int y = Integer.parseInt(coOrdinates.split(",")[1]);
maskX = x;
maskY = y;
int width = Integer
.parseInt(coOrdinates.split(",")[2]);
int height = Integer.parseInt(coOrdinates
.split(",")[3]);
if (width != 0 && height != 0) {
if (((photoImg2.getWidth() - (int) (width * metrics.density)) / 2)
+ (int) (width * metrics.density) > photoImg2
.getWidth()
|| (50 + (int) (height * metrics.density)) > photoImg2
.getHeight()) {
Log.i(TAG,
"image size is less than required");
photoImg2 = Bitmap.createScaledBitmap(
photoImg2,
(int) (width * metrics.density),
(int) (height * metrics.density),
true);
} else {
Log.i(TAG,
"image size is more than required");
photoImg2 = Bitmap
.createBitmap(
photoImg2,
(photoImg2.getWidth() - (int) (width * metrics.density)) / 2,
50,
(int) (width * metrics.density),
(int) (height * metrics.density));
}
}
MarginLayoutParams mlp = (MarginLayoutParams) maskView
.getLayoutParams();
mlp.setMargins(x, y, 0, 0);// all in pixels
maskView.setLayoutParams(mlp);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
return photoImg2;
}
return photoImg2;
}
public static Bitmap decodeSampledBitmapFromResource(
InputStream inputStream, int reqWidth, int reqHeight) {
byte[] byteArr = new byte[0];
byte[] buffer = new byte[1024];
int len;
int count = 0;
try {
while ((len = inputStream.read(buffer)) > -1) {
if (len != 0) {
if (count + len > byteArr.length) {
byte[] newbuf = new byte[(count + len) * 2];
System.arraycopy(byteArr, 0, newbuf, 0, count);
byteArr = newbuf;
}
System.arraycopy(buffer, 0, byteArr, count, len);
count += len;
}
}
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(byteArr, 0, count, options);
options.inSampleSize = calculateInSampleSize(options, reqWidth,
reqHeight);
options.inPurgeable = true;
options.inInputShareable = true;
options.inJustDecodeBounds = false;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
int[] pids = { android.os.Process.myPid() };
return BitmapFactory.decodeByteArray(byteArr, 0, count, options);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public void onCropImageButton(View v) {
// Create progress dialog and display it.
try {
mProgressDialog = new ProgressDialog(v.getContext());
mProgressDialog.setCancelable(false);
mProgressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
mProgressDialog.setMessage("Cropping Image\nPlease Wait.....");
mProgressDialog.show();
} catch (Exception e) {
e.printStackTrace();
}
// Setting values so that we can retrive the image from
// ImageView multiple times.
mImg.buildDrawingCache(true);
mImg.setDrawingCacheEnabled(true);
mTemplateImg.buildDrawingCache(true);
mTemplateImg.setDrawingCacheEnabled(true);
// Create new thread to crop.
new Thread(new Runnable() {
#Override
public void run() {
if (mSelectedVersion == VERSION_1) {
croppedImg = cropImage(photoImg, bitmap);
Common common = new Common();
if (className.equalsIgnoreCase("Fb_List")) {
storeImage(croppedImg, "overlay" + pageCount + ".png");
} else {
Common.storeCropImage(croppedImg, "overlay1.png",
MaskCropActivity.this);
photoImg.recycle();
}
} /*
* else { croppedImg = ImageProcess.cropImageVer2(
* mImg.getDrawingCache(true),
* mTemplateImg.getDrawingCache(true), 320, 440); }
*/
// }
try {
mProgressDialog.dismiss();
} catch (Exception e) {
// TODO: handle exception
}
mImg.setDrawingCacheEnabled(false);
mTemplateImg.setDrawingCacheEnabled(false);
}
}).start();
}
private boolean storeImage(Bitmap imageData, String filename) {
String iconsStoragePath = Common.SDCARD_PATH
+ Common.UNZIPPED_PATH
+ Common.getPrefrenceValue(Constants.ZIPFILE_NAME,
MaskCropActivity.this) + "/custom";
File sdIconStorageDir = new File(iconsStoragePath);
sdIconStorageDir.mkdirs();
try {
String filePath = sdIconStorageDir.toString() + "/" + filename;
FileOutputStream fileOutputStream = new FileOutputStream(filePath);
BufferedOutputStream bos = new BufferedOutputStream(
fileOutputStream);
System.out.println("imageData" + imageData);
imageData.compress(CompressFormat.PNG, 90, bos);
Intent intent = new Intent(MaskCropActivity.this,
DesignPriviewEditing.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
finish();
bos.flush();
bos.close();
} catch (FileNotFoundException e) {
return false;
} catch (IOException e) {
return false;
}
return true;
}
public static int calculateInSampleSize(BitmapFactory.Options options,
int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
while ((halfHeight / inSampleSize) > reqHeight
&& (halfWidth / inSampleSize) > reqWidth) {
inSampleSize *= 2;
}
}
return inSampleSize;
}
public boolean onTouch(View v, MotionEvent event) {
mScaleDetector.onTouchEvent(event);
mRotateDetector.onTouchEvent(event);
mMoveDetector.onTouchEvent(event);
float scaledImageCenterX = (mImageWidth * mScaleFactor) / 2;
float scaledImageCenterY = (mImageHeight * mScaleFactor) / 2;
mMatrix.reset();
mMatrix.postScale(mScaleFactor, mScaleFactor);
mMatrix.postRotate(mRotationDegrees, scaledImageCenterX,
scaledImageCenterY);
mMatrix.postTranslate(mFocusX - scaledImageCenterX, mFocusY
- scaledImageCenterY);
float[] values = new float[9];
mMatrix.getValues(values);
relativeX = (int) event.getX();
relativeY = (int) event.getY();
ImageView view = (ImageView) v;
view.setImageMatrix(mMatrix);
return true;
}
private class ScaleListener extends
ScaleGestureDetector.SimpleOnScaleGestureListener {
#Override
public boolean onScale(ScaleGestureDetector detector) {
mScaleFactor *= detector.getScaleFactor();
mScaleFactor = Math.max(0.1f, Math.min(mScaleFactor, 10.0f));
return true;
}
}
private class RotateListener extends
RotateGestureDetector.SimpleOnRotateGestureListener {
#Override
public boolean onRotate(RotateGestureDetector detector) {
mRotationDegrees -= detector.getRotationDegreesDelta();
return true;
}
}
private class MoveListener extends
MoveGestureDetector.SimpleOnMoveGestureListener {
#Override
public boolean onMove(MoveGestureDetector detector) {
PointF d = detector.getFocusDelta();
mFocusX += d.x;
mFocusY += d.y;
System.out.println("ON MOVE LISTSNER" + mFocusX + "MFOCUS Y"
+ mFocusY);
return true;
}
}
#Override
public void onBackPressed() {
Intent intent = new Intent(MaskCropActivity.this,
CropImagesActivity.class);
System.out
.println("pageCount in mASK CROP ON BACK PRESSED" + pageCount);
intent.putExtra("pageCount", pageCount);
startActivity(intent);
finish();
}
public Bitmap cropImage(Bitmap img, Bitmap templateImage) {
Bitmap bm = Bitmap.createBitmap((int) (320 * metrics.density),
(int) (416 * metrics.density), Bitmap.Config.ARGB_8888);
Canvas combineImg = new Canvas(bm);
combineImg.drawBitmap(img, relativeX, relativeY, null);
combineImg.drawBitmap(templateImage, 0, 0, null);
return bm;
}

Android Face detection only works with drawables not with images from SD card

So I have code to detect up to 10 faces in any given image file and return to me info like the location of the eyes and other stuff like that. So when i tell it to use an image file that is stored in the drawable folder of resources for my project it works great. But when i have it try to find faces from a bitmap i import from the sd card it wont find any faces. But these are the same exact images. Any ideas? my code is bellow:
Edit:
After further inspection I found that when i insert this line of code System.out.println("Row Bytes: " + sourceImage.getRowBytes());
I get the drawable is 352 and The SD card image is 704. Which I think means that the drawable is being compressed in the .apk file but the SD card image obviously is not. Not sure if this would effect anything.
public class FaceView extends View {
private static final int NUM_FACES = 10; // max is 64
private static final boolean DEBUG = true;
private FaceDetector arrayFaces;
private FaceDetector.Face getAllFaces[] = new FaceDetector.Face[NUM_FACES];
private FaceDetector.Face getFace = null;
private PointF eyesMidPts[] = new PointF[NUM_FACES];
private float eyesDistance[] = new float[NUM_FACES];
private Bitmap sourceImage;
private Paint tmpPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint pOuterBullsEye = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint pInnerBullsEye = new Paint(Paint.ANTI_ALIAS_FLAG);
private int picWidth, picHeight;
private float xRatio, yRatio;
public FaceView(Context context) {
super(context);
pInnerBullsEye.setStyle(Paint.Style.FILL);
pInnerBullsEye.setColor(Color.RED);
pOuterBullsEye.setStyle(Paint.Style.STROKE);
pOuterBullsEye.setColor(Color.RED);
tmpPaint.setStyle(Paint.Style.STROKE);
tmpPaint.setTextAlign(Paint.Align.CENTER);
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
//********This code imports the image from the SD card which does not work
String imageInSD = Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/testfolder/" + "face1" + ".png";
Bitmap sourceImage = BitmapFactory.decodeFile(imageInSD,bfo);
//**********This code uses an image in the projects drawable folder, this code works.
sourceImage = BitmapFactory.decodeResource( getResources() ,R.drawable.face1, bfo);
picWidth = sourceImage.getWidth();
picHeight = sourceImage.getHeight();
arrayFaces = new FaceDetector( picWidth, picHeight, NUM_FACES );
arrayFaces.findFaces(sourceImage, getAllFaces);
for (int i = 0; i < getAllFaces.length; i++)
{
getFace = getAllFaces[i];
try {
PointF eyesMP = new PointF();
getFace.getMidPoint(eyesMP);
eyesDistance[i] = getFace.eyesDistance();
eyesMidPts[i] = eyesMP;
if (DEBUG)
{
Log.i("Face",
i + " " + getFace.confidence() + " " + getFace.eyesDistance() + " "
+ "Pose: ("+ getFace.pose(FaceDetector.Face.EULER_X) + ","
+ getFace.pose(FaceDetector.Face.EULER_Y) + ","
+ getFace.pose(FaceDetector.Face.EULER_Z) + ")"
+ "Eyes Midpoint: ("+eyesMidPts[i].x + "," + eyesMidPts[i].y +")"
);
}
}
catch (Exception e)
{
if (DEBUG) Log.e("Face", i + " is null");
}
}
}
#Override
protected void onDraw(Canvas canvas)
{
xRatio = getWidth()*1.0f / picWidth;
yRatio = getHeight()*1.0f / picHeight;
canvas.drawBitmap( sourceImage, null , new Rect(0,0,getWidth(),getHeight()),tmpPaint);
for (int i = 0; i < eyesMidPts.length; i++)
{
if (eyesMidPts[i] != null)
{
pOuterBullsEye.setStrokeWidth(eyesDistance[i] /6);
canvas.drawCircle(eyesMidPts[i].x*xRatio, eyesMidPts[i].y*yRatio, eyesDistance[i] / 2 , pOuterBullsEye);
canvas.drawCircle(eyesMidPts[i].x*xRatio, eyesMidPts[i].y*yRatio, eyesDistance[i] / 6 , pInnerBullsEye);
}
}
}
}
Alright I believe I know what your issue is here. The device can not render the image to a bitmap image as it sits in external memory. The face recognition is working its just not making it to the canvas. All devices have a rendering limit on my xoom its (2048x2048) I found that here. The reason that it works when you add the image as a resource is because your file is downsized as it builds the .apk (to be honest I'm not sure why it does this, but I left a little println for testing, someone else could answer that better). Anyway I just scaled the bitmap by dividing by 2 after your code looked for faces and before it tries to render the bitmap to the canvas. Now everything seems to work fine. You may want to adjust your face indicators but its functional. I hope this helps.
public class FaceView extends View {
private static final int NUM_FACES = 1; // max is 64
private static final boolean DEBUG = true;
private FaceDetector arrayFaces;
private FaceDetector.Face getAllFaces[] = new FaceDetector.Face[NUM_FACES];
private FaceDetector.Face getFace = null;
private PointF eyesMidPts[] = new PointF[NUM_FACES];
private float eyesDistance[] = new float[NUM_FACES];
private Bitmap sourceImage;
private Paint tmpPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint pOuterBullsEye = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint pInnerBullsEye = new Paint(Paint.ANTI_ALIAS_FLAG);
private int picWidth, picHeight;
private float xRatio, yRatio;
public FaceView(Context context) {
super(context);
pInnerBullsEye.setStyle(Paint.Style.FILL);
pInnerBullsEye.setColor(Color.RED);
pOuterBullsEye.setStyle(Paint.Style.STROKE);
pOuterBullsEye.setColor(Color.RED);
tmpPaint.setStyle(Paint.Style.STROKE);
tmpPaint.setTextAlign(Paint.Align.CENTER);
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
//********This code imports the image from the SD card which does not work
String imageInSD = Environment.getExternalStorageDirectory().getAbsolutePath() + "/face1" + ".jpg";
System.out.println(imageInSD);
sourceImage = BitmapFactory.decodeFile(imageInSD, bfo);
//Bitmap sourceImage;// = BitmapFactory.decodeFile(imageInSD,bfo);
//**********This code uses an image in the projects drawable folder, this code works.
//sourceImage = BitmapFactory.decodeResource( getResources() ,R.drawable.face1, bfo);
picWidth = sourceImage.getWidth();
picHeight = sourceImage.getHeight();
System.out.println(picWidth + "x" + picHeight);
arrayFaces = new FaceDetector( picWidth, picHeight, NUM_FACES );
arrayFaces.findFaces(sourceImage, getAllFaces);
sourceImage = Bitmap.createScaledBitmap (sourceImage, picWidth/2, picHeight/2, false);
for (int i = 0; i < getAllFaces.length; i++)
{
getFace = getAllFaces[i];
try {
PointF eyesMP = new PointF();
getFace.getMidPoint(eyesMP);
eyesDistance[i] = getFace.eyesDistance();
eyesMidPts[i] = eyesMP;
if (DEBUG)
{
Log.i("Face",
i + " " + getFace.confidence() + " " + getFace.eyesDistance() + " "
+ "Pose: ("+ getFace.pose(FaceDetector.Face.EULER_X) + ","
+ getFace.pose(FaceDetector.Face.EULER_Y) + ","
+ getFace.pose(FaceDetector.Face.EULER_Z) + ")"
+ "Eyes Midpoint: ("+eyesMidPts[i].x + "," + eyesMidPts[i].y +")"
);
}
}
catch (Exception e)
{
if (DEBUG) Log.e("Face", i + " is null");
}
}
}
#Override
protected void onDraw(Canvas canvas)
{
xRatio = getWidth()*1.0f / picWidth;
yRatio = getHeight()*1.0f / picHeight;
canvas.drawBitmap( sourceImage, null , new Rect(0,0,getWidth(),getHeight()),tmpPaint);
for (int i = 0; i < eyesMidPts.length; i++)
{
if (eyesMidPts[i] != null)
{
pOuterBullsEye.setStrokeWidth(eyesDistance[i] /6);
canvas.drawCircle(eyesMidPts[i].x*xRatio, eyesMidPts[i].y*yRatio, eyesDistance[i] / 2 , pOuterBullsEye);
canvas.drawCircle(eyesMidPts[i].x*xRatio, eyesMidPts[i].y*yRatio, eyesDistance[i] / 6 , pInnerBullsEye);
}
}
}
}
Turns out the issue is that the pictures taken by the Camera are saved as PNG files and the face detection can only successfully work from the SD card if it is using JPG files. Simply convert the files to JPG and it works fine.

Categories

Resources