OpenCV4Android: How to find the movements of head - android

I'm using Viola-Jones method to detect face, whenever the face is tilted there will be possibility of algorithm won't work properly.
I want to detect that movements, when it can not able to detect the face.
Can I implement this using motion detection or is there any other methods to find the movements.
Thanks in advance,

Yes you can do it by capture and compare them.
This will help you to detect them and then you can compare the x and y positions.
class DetectNose extends JPanel implements KeyListener, ActionListener {
private static final long serialVersionUID = 1L;
private static JFrame frame;
private BufferedImage image;
private CascadeClassifier face_cascade;
private Point center;
private JLabel label;
private Image scalledItemImage;
private double customY = 0;
private double customX = 0;
private Iterator<InputStream> iterator;
private ArrayList<BufferedImage> listOfCachedImages;
private int imageIndex = 1;
private int customZ = 0;
private Size size;
private Image scalledItemImageBackup;
private Point center1;
private int imgSize = 35;
private boolean isLocked;
public DetectNose(JFrame frame, List<Long> listOfOrnaments) {
super();
this.frame = frame;
this.frame.setFocusable(true);
this.frame.requestFocusInWindow();
this.frame.addKeyListener(this);
File f = null;
try {
System.out.println(System.getProperty("os.name"));
if (System.getProperty("os.name").contains("Windows")) {
f = new File("res/opencv_lib_win/opencv_java249.dll");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
} else {
f = new File("res/opencv_lib/libopencv_java246.so");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
}
} catch (Exception ex) {
ex.printStackTrace();
}
List<InputStream> ornaments = DatabaseHandler
.getOrnamentsImagesByListOfOrnaments(listOfOrnaments);
iterator = ornaments.iterator();
listOfCachedImages = new ArrayList<BufferedImage>();
try {
while (iterator.hasNext()) {
InputStream inputStream = iterator.next();
listOfCachedImages.add(ImageIO.read(inputStream));
}
setFirstOrnament();
} catch (IOException e) {
e.printStackTrace();
}
label = new JLabel(new ImageIcon(scalledItemImage));
add(label);
face_cascade = new CascadeClassifier(
"res/cascades/haarcascade_frontalface_alt_tree.xml");
if (face_cascade.empty()) {
System.out.println("--(!)Error loading A\n");
return;
} else {
System.out.println("Face classifier loaded up");
}
}
private void setFirstOrnament() {
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700, 700,
BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize, imgSize,
BufferedImage.TYPE_INT_RGB);
repaint();
System.out.println("imageIndex = " + imageIndex);
}
private void setPrevOrnament() {
if (imageIndex > 1) {
imageIndex--;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.nextButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.prevButton.setEnabled(false);
}
}
private void setNextOrnament() {
if (listOfCachedImages.size() > imageIndex) {
imageIndex++;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.prevButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.nextButton.setEnabled(false);
}
}
private BufferedImage getimage() {
return image;
}
public void setimage(BufferedImage newimage) {
image = newimage;
return;
}
public BufferedImage matToBufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
int type;
matrix.get(0, 0, data);
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
// bgr to rgb
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image2 = new BufferedImage(cols, rows, type);
image2.getRaster().setDataElements(0, 0, cols, rows, data);
return image2;
}
public void paintComponent(Graphics g) {
try {
this.frame.requestFocusInWindow();
BufferedImage temp = getimage();
g.drawImage(temp, 0, 0, temp.getWidth(), temp.getHeight() + 50,
this);
} catch (Exception ex) {
System.out.print("Trying to load images...");
}
}
public Mat detect(Mat inputframe) {
Mat mRgba = new Mat();
Mat mGrey = new Mat();
MatOfRect faces = new MatOfRect();
inputframe.copyTo(mRgba);
inputframe.copyTo(mGrey);
Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
Imgproc.equalizeHist(mGrey, mGrey);
try {
face_cascade.detectMultiScale(mGrey, faces);
} catch (Exception e) {
System.out.print(".");
}
frame.setLocationRelativeTo(null);
frame.setResizable(false);
for (Rect rect : faces.toArray()) {
center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height
* 0.5); // You can use this to point out as first detection and last detection
size = new Size(rect.width * 0.5, rect.height * 0.5);
Core.ellipse(mRgba, center, size, 0, 0, 360, new Scalar(255, 0,
255), 1, 8, 0);
repaint();
}
return mRgba;
}
Here center is the first detection point same way you can find the last detection point from the image.

Related

How to download an offline map for a particular country

I am developing an Android app. And I want a particular area map which I can add in my app and use it for offline navigation. How can it be possible?
I didn’t get a solution. I’m using the below link, but it doesn’t work.
I display a satellite map. I have put tiles files in the asset folder, but it does not work for me.
public class CustomMapTileProvider implements TileProvider {
private static final int TILE_WIDTH = 256;
private static final int TILE_HEIGHT = 256;
private static final int BUFFER_SIZE = 16 * 1024;
private AssetManager mAssets;
public CustomMapTileProvider(AssetManager assets) {
mAssets = assets;
}
private static final SparseArray<Rect> TILE_ZOOMS = new SparseArray<Rect>() {{
put(8, new Rect(135, 180, 135, 181 ));
put(9, new Rect(270, 361, 271, 363 ));
put(10, new Rect(541, 723, 543, 726 ));
put(11, new Rect(1082, 1447, 1086, 1452));
put(12, new Rect(2165, 2894, 2172, 2905));
put(13, new Rect(4330, 5789, 4345, 5810));
put(14, new Rect(8661, 11578, 8691, 11621));
}};
private boolean hasTile(int x, int y, int zoom) {
Rect b = TILE_ZOOMS.get(zoom);
return b == null ? false : (b.left <= x && x <= b.right && b.top <= y && y <= b.bottom);
}
#Override
public Tile getTile(int x, int y, int zoom) {
y = fixYCoordinate(y, zoom);
if (hasTile(x, y, zoom)) {
byte[] image = readTileImage(x, y, zoom);
return image == null ? null : new Tile(TILE_WIDTH, TILE_HEIGHT, image);
}
else {
return NO_TILE;
}
}
private int fixYCoordinate(int y, int zoom) {
int size = 1 << zoom; // size = 2^zoom
return size - 1 - y;
}
private byte[] readTileImage(int x, int y, int zoom) {
InputStream in = null;
ByteArrayOutputStream buffer = null;
try {
in = mAssets.open(getTileFilename(x, y, zoom));
//in = mAssets.open("world_countries.mbtiles");
buffer = new ByteArrayOutputStream();
int nRead;
byte[] data = new byte[BUFFER_SIZE];
while ((nRead = in.read(data, 0, BUFFER_SIZE)) != -1) {
buffer.write(data, 0, nRead);
}
buffer.flush();
return buffer.toByteArray();
}
catch (IOException e) {
e.printStackTrace();
return null;
}
catch (OutOfMemoryError e) {
e.printStackTrace();
return null;
}
finally {
if (in != null)
try {
in.close();
}
catch (Exception ignored) {
}
if (buffer != null)
try {
buffer.close();
}
catch (Exception ignored) {
}
}
}
private String getTileFilename(int x, int y, int zoom) {
return "map/" + zoom + '/' + x + '/' + y + ".png";
}
}

How to convert string to Barcode

How could I generate the Barcode in Android,
I have been did it with this lib com.google.zxing:core:3.2.1, but It only can make QRCode not Barcode, and I have tried to modify the code be like
Bitmap TextToImageEncode(String Value) throws WriterException {
BitMatrix bitMatrix;
try {
bitMatrix = new MultiFormatWriter().encode(
Value,
BarcodeFormat.DATA_MATRIX.QR_CODE,
QRcodeWidth, QRcodeWidth, null
);
} catch (IllegalArgumentException Illegalargumentexception) {
return null;
}
int bitMatrixWidth = bitMatrix.getWidth();
int bitMatrixHeight = bitMatrix.getHeight();
int[] pixels = new int[bitMatrixWidth * bitMatrixHeight];
for (int y = 0; y < bitMatrixHeight; y++) {
int offset = y * bitMatrixWidth;
for (int x = 0; x < bitMatrixWidth; x++) {
pixels[offset + x] = bitMatrix.get(x, y) ?
getResources().getColor(R.color.black):getResources().getColor(R.color.white);
}
}
Bitmap bitmap = Bitmap.createBitmap(bitMatrixWidth, bitMatrixHeight, Bitmap.Config.ARGB_4444);
bitmap.setPixels(pixels, 0, 500, 0, 0, bitMatrixWidth, bitMatrixHeight);
return bitmap;
}
how to make it, please let me know,
Thanks
Try this
compile 'com.google.zxing:core:3.2.1'
compile 'com.journeyapps:zxing-android-embedded:3.2.0#aar'
String text="" // Whatever you need to encode in the QR code
MultiFormatWriter multiFormatWriter = new MultiFormatWriter();
try {
BitMatrix bitMatrix = multiFormatWriter.encode(text, BarcodeFormat.QR_CODE,200,200);
BarcodeEncoder barcodeEncoder = new BarcodeEncoder();
Bitmap bitmap = barcodeEncoder.createBitmap(bitMatrix);
imageView.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
UPDATE
if its not help you try this
https://github.com/journeyapps/zxing-android-embedded
Try this one :
In build:gradle:
implementation 'com.google.zxing:core:3.2.1'
implementation 'com.journeyapps:zxing-android-embedded:3.2.0#aar'
And then write your own code like below.
public class DemoActivity extends AppCompatActivity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LinearLayout l = new LinearLayout(this);
l.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT));
l.setOrientation(LinearLayout.VERTICAL);
setContentView(l);
// barcode data
String barcode_data = "123456viralpatel";
// barcode image
Bitmap bitmap = null;
ImageView iv = new ImageView(this);
try {
bitmap = encodeAsBitmap(barcode_data, BarcodeFormat.CODE_128, 600, 300);
iv.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
l.addView(iv);
//barcode text
TextView tv = new TextView(this);
tv.setGravity(Gravity.CENTER_HORIZONTAL);
tv.setText(barcode_data);
l.addView(tv);
}
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
Bitmap encodeAsBitmap(String contents, BarcodeFormat format, int img_width, int img_height) throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contentsToEncode, format, img_width, img_height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
}

How to generate QR code using Zxing Library?

I am trying to generate Qr code for my App. The user will enter some text an the the data will be passed to next activity which will display the QR code.
Here is my code.
public class QRgenerator extends AppCompatActivity {
ImageView imageView;
String Qrcode;
public static final int WIDTH = 500;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_qrgenerator);
getID();
Intent intent = getIntent();
Qrcode = intent.getStringExtra("Data");
//creating thread to avoid ANR exception
Thread t = new Thread(new Runnable() {
#Override
public void run() {
//the message to be encoded in the qr code.
try {
synchronized (this) {
wait(5000);
//runonUIthread on the main thread
runOnUiThread(new Runnable() {
#Override
public void run() {
try {
Bitmap bitmap = null;
bitmap = encodeasBitmap(Qrcode);
imageView.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
;
} //end of catch block
} //end of rum method
});
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
t.start();
}
//method that returns the bitmap image of the QRcode.
public void getID() {
imageView = (ImageView) findViewById(R.id.imageView2);
}
public Bitmap encodeasBitmap(String str) throws WriterException {
BitMatrix result;
try {
result = new MultiFormatWriter().encode(str, BarcodeFormat.QR_CODE, WIDTH, WIDTH, null);
} catch (IllegalArgumentException e) {
//unsupported format
return null;
}
int h = result.getHeight();
int w = result.getWidth();
int[] pixels = new int[w * h];
for (int i = 0; i < h; i++) {
int offset = i * w;
for (int j = 0; j < w; j++) {
pixels[offset + j] = result.get(j, i)? R.color.black:R.color.white;
}
}
Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, 500, 0, 0, w, h);
return bitmap;
}
}
The problem is when i press the button and go to the next screen only white screen appears and there is nor QR code in the Image view. What could be the mistake?
public static Bitmap generateQRBitmap(String content, Context context,int flag){
Bitmap bitmap = null;
int width=256,height=256;
QRCodeWriter writer = new QRCodeWriter();
try {
BitMatrix bitMatrix = writer.encode(content, BarcodeFormat.QR_CODE, width, height);//256, 256
/* int width = bitMatrix.getWidth();
int height = bitMatrix.getHeight();*/
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
//bitmap.setPixel(x, y, bitMatrix.get(x, y) ? Color.BLACK : Color.WHITE);//guest_pass_background_color
bitmap.setPixel(x, y, bitMatrix.get(x, y) ? Color.BLACK : context.getResources().getColor(R.color.guest_pass_background_color));//Color.WHITE
}
}
} catch (WriterException e) {
e.printStackTrace();
}
return bitmap;
}
package com.hellofyc.qrcode;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.support.annotation.IntRange;
import android.support.annotation.NonNull;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.EncodeHintType;
import com.google.zxing.WriterException;
import com.google.zxing.common.BitMatrix;
import com.google.zxing.qrcode.QRCodeWriter;
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel;
import java.util.HashMap;
import java.util.Map;
public class QRCodeHelper {
private Bitmap mLogo;
private ErrorCorrectionLevel mErrorCorrectionLevel;
private int mMargin;
private String mContent;
private int mWidth = 400, mHeight = 400;
public static QRCodeHelper newInstance() {
return new QRCodeHelper();
}
public QRCodeHelper setLogo(Bitmap logo) {
mLogo = logo;
return this;
}
public QRCodeHelper setErrorCorrectionLevel(ErrorCorrectionLevel level) {
mErrorCorrectionLevel = level;
return this;
}
public QRCodeHelper setContent(String content) {
mContent = content;
return this;
}
public QRCodeHelper setWidthAndHeight(#IntRange(from = 1) int width, #IntRange(from = 1) int height) {
mWidth = width;
mHeight = height;
return this;
}
public QRCodeHelper setMargin(#IntRange(from = 0) int margin) {
mMargin = margin;
return this;
}
public Bitmap generate() {
Map<EncodeHintType, Object> hintsMap = new HashMap<>();
hintsMap.put(EncodeHintType.CHARACTER_SET, "utf-8");
hintsMap.put(EncodeHintType.ERROR_CORRECTION, mErrorCorrectionLevel);
hintsMap.put(EncodeHintType.MARGIN, mMargin);
try {
BitMatrix bitMatrix = new QRCodeWriter().encode(mContent, BarcodeFormat.QR_CODE, mWidth, mHeight, hintsMap);
int[] pixels = new int[mWidth * mHeight];
for (int i=0; i<mHeight; i++) {
for (int j=0; j<mWidth; j++) {
if (bitMatrix.get(j, i)) {
pixels[i * mWidth + j] = 0x00000000;
} else {
pixels[i * mWidth + j] = 0xffffffff;
}
}
}
Bitmap bitmap = Bitmap.createBitmap(pixels, 0, mWidth, mWidth, mHeight, Bitmap.Config.RGB_565);
Bitmap resultBitmap;
if (mLogo != null) {
resultBitmap = addLogo(bitmap, mLogo);
} else {
resultBitmap = bitmap;
}
return resultBitmap;
} catch (WriterException e) {
e.printStackTrace();
}
return null;
}
private boolean isInRect(int x, int y) {
return ((x > mMargin * 8 && x < mWidth - mMargin * 8) && (y > mMargin * 8 && y < mHeight - mMargin * 8));
}
private Bitmap addLogo(#NonNull Bitmap qrCodeBitmap, #NonNull Bitmap logo) {
int qrCodeWidth = qrCodeBitmap.getWidth();
int qrCodeHeight = qrCodeBitmap.getHeight();
int logoWidth = logo.getWidth();
int logoHeight = logo.getHeight();
Bitmap blankBitmap = Bitmap.createBitmap(qrCodeWidth, qrCodeHeight, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(blankBitmap);
// Paint paint = new Paint();
// paint.setAntiAlias(true);
// paint.setColor(Color.WHITE);
// RectF rect = new RectF(50, 50, 200, 200);
// canvas.drawRoundRect(rect, logoWidth, logoHeight, paint);
canvas.drawBitmap(qrCodeBitmap, 0, 0, null);
canvas.save(Canvas.ALL_SAVE_FLAG);
float scaleSize = 1.0f;
while ((logoWidth / scaleSize) > (qrCodeWidth / 5) || (logoHeight / scaleSize) > (qrCodeHeight / 5)) {
scaleSize *= 2;
}
float sx = 1.0f / scaleSize;
canvas.scale(sx, sx, qrCodeWidth / 2, qrCodeHeight / 2);
canvas.drawBitmap(logo, (qrCodeWidth - logoWidth) / 2, (qrCodeHeight - logoHeight) / 2, null);
canvas.restore();
return blankBitmap;
}
You have initialize before onCreate() method because they not initialize properly its takes some time to initialize. so put your initialization part in onStart() method.
You have to mention in gradle
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
compile 'com.android.support:appcompat-v7:25.0.1'
testCompile 'junit:junit:4.12'
//add this dependency
compile 'com.journeyapps:zxing-android-embedded:3.4.0'
}
declare Class in globally in Activity
//qr code scanner object
private IntentIntegrator qrScan;
put this line in onCreate() method.
//intializing scan object
qrScan = new IntentIntegrator(this);
Now call this method where you want on Button
#Override
public void onClick(View view) {
//initiating the qr code scan
qrScan.initiateScan();
}

How to generate an Barcode and convert it to Bitmap using new Google Vision API?

How to generate an Barcode and convert it to Bitmap using new Google Vision API?
Barcode barcode = new Barcode();
Barcode.Email email = new Barcode.Email();
email.address = "my_email#gmail.com";
email.subject = "My Subject;
email.body = "My body content.";
barcode.email = email;
//Implement conversion
Bitmap barcodeImage = barcodeToBitmap(barcode);// I do know this part.
You can detect your barcodes using Google Vision API and then use ZXing to generate the barcodes.
You could try something like this, it uses zxing library:
public static Bitmap getBitmap(String barcode, int barcodeType, int width, int height)
{
Bitmap barcodeBitmap = null;
BarcodeFormat barcodeFormat = convertToZXingFormat(barcodeType);
try
{
barcodeBitmap = encodeAsBitmap(barcode, barcodeFormat, width, height);
}
catch (WriterException e)
{
e.printStackTrace();
}
return barcodeBitmap;
}
private static BarcodeFormat convertToZXingFormat(int format)
{
switch (format)
{
case 8:
return BarcodeFormat.CODABAR;
case 1:
return BarcodeFormat.CODE_128;
case 2:
return BarcodeFormat.CODE_39;
case 4:
return BarcodeFormat.CODE_93;
case 32:
return BarcodeFormat.EAN_13;
case 64:
return BarcodeFormat.EAN_8;
case 128:
return BarcodeFormat.ITF;
case 512:
return BarcodeFormat.UPC_A;
case 1024:
return BarcodeFormat.UPC_E;
//default 128?
default:
return BarcodeFormat.CODE_128;
}
}
/**************************************************************
* getting from com.google.zxing.client.android.encode.QRCodeEncoder
*
* See the sites below
* http://code.google.com/p/zxing/
* http://code.google.com/p/zxing/source/browse/trunk/android/src/com/google/zxing/client/android/encode/EncodeActivity.java
* http://code.google.com/p/zxing/source/browse/trunk/android/src/com/google/zxing/client/android/encode/QRCodeEncoder.java
*/
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
private static Bitmap encodeAsBitmap(String contents, BarcodeFormat format, int img_width, int img_height) throws WriterException
{
if (contents == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contents);
if (encoding != null) {
hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contents, format, img_width, img_height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
}
You can use zxing lib to generate bitmap from string barcode
public static Bitmap getBitmapFromBarcode(String barcode) {
Bitmap barcodeBitmap = null;
try {
BarcodeEncoder barcodeEncoder = new BarcodeEncoder();
barcodeBitmap = barcodeEncoder.encodeBitmap(barcode, BarcodeFormat.CODE_128, 240, 80);
} catch (Exception e) {
e.printStackTrace();
}
return barcodeBitmap;
}
To add zxing to your project go to https://github.com/journeyapps/zxing-android-embedded

Full bitmap is not being populated with pixel data after parallelization.

I have an app that processes a bitmap with a spherize distortion. You can touch the screen and set the radius of a circle that will contain the distortion. Once the distort button is pressed a subset bitmap is created the same size of the radius and this subset bitmap is sent for processing. Once the subset is distorted it is put back on the original bitmap as an overlay using the x,y cords from the original touch event.
Everything works fine apart from that the last line of pixels (across the bottom) of the subset bitmap is not populated with pixel data. It looks like there is a black line at the bottom of the subset bitmap. The distortion class uses parallel programming. This checks the hardware at runtime to find out how many processor are available and the splits the bitmap up over the processor accordingly. I've had help with the parallelization and not sure how to find out why the black line is present. The looping seems to be in order, any ideas? Thanks in advance Matt.
.
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import android.graphics.Bitmap;
import android.os.Debug;
import android.util.Log;
public class MultiRuntimeProcessorFilter {
private static final String TAG = "mrpf";
private int x = 0;
private Bitmap input = null;
private int radius;
public void createBitmapSections(int nOp, int[] sections){
int processors = nOp;
int jMax = input.getHeight();
int aSectionSize = (int) Math.ceil(jMax/processors);
Log.e(TAG, "++++++++++ sections size = "+aSectionSize);
int k = 0;
for(int h=0; h<processors+1; h++){
sections[h] = k;
k+= aSectionSize;
}
}// end of createBitmapSections()
#SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
int []arr = new int[input.getWidth()*input.getHeight()];
int nrOfProcessors = Runtime.getRuntime().availableProcessors();
int[] sections = new int[nrOfProcessors+1];
createBitmapSections(nrOfProcessors,sections);
ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
for(int g=0; g<sections.length;g++){
Log.e(TAG, "++++++++++ sections= "+sections[g]);
}
// ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
Object[] task = new Object[nrOfProcessors];
for(int z = 0; z < nrOfProcessors; z++){
task[z] = (FutureTask<PartialResult>) threadPool.submit(new PartialProcessing(sections[z], sections[z+1] - 1, input, k));
Log.e(TAG, "++++++++++ task"+z+"= "+task[z].toString());
}
PartialResult[] results = new PartialResult[nrOfProcessors];
try{
for(int t = 0; t < nrOfProcessors; t++){
results[t] = ((FutureTask<PartialResult>) task[t]).get();
results[t].fill(arr);
}
}catch(Exception e){
e.printStackTrace();
}
Bitmap dst2 = Bitmap.createBitmap(arr,input.getWidth(),input.getHeight(),input.getConfig());
return dst2;
}//end of barrel()
public class PartialResult {
int startP;
int endP;
int[] storedValues;
public PartialResult(int startp, int endp, Bitmap input){
this.startP = startp;
this.endP = endp;
this.storedValues = new int[input.getWidth()*input.getHeight()];
}
public void addValue(int p, int result) {
storedValues[p] = result;
}
public void fill(int[] arr) {
for (int p = startP; p < endP; p++){
for(int b=0;b<radius;b++,x++)
arr[x] = storedValues[x];
}
Log.e(TAG, "++++++++++ x ="+x);
}
}//end of partialResult
public class PartialProcessing implements Callable<PartialResult> {
int startJ;
int endJ;
private int[] scalar;
private float xscale;
private float yscale;
private float xshift;
private float yshift;
private float thresh = 1;
private int [] s1;
private int [] s2;
private int [] s3;
private int [] s4;
private int [] s;
private Bitmap input;
private float k;
public PartialProcessing(int startj, int endj, Bitmap input, float k) {
this.startJ = startj;
this.endJ = endj;
this.input = input;
this.k = k;
s = new int[4];
scalar = new int[4];
s1 = new int[4];
s2 = new int[4];
s3 = new int[4];
s4 = new int[4];
}
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
// int [] scalar = new int[4];
// scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
void sampleImage(Bitmap arr, float idx0, float idx1)
{
// s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
// s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
}
#Override public PartialResult call() {
PartialResult partialResult = new PartialResult(startJ, endJ,input);
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
int p = startJ*radius;
int origPixel = 0;
int color = 0;
int i;
for (int j = startJ; j < endJ; j++){
for ( i = 0; i < width; i++, p++){
origPixel = input.getPixel(i,j);
float x = getRadialX((float)j,(float)i,centerX,centerY,k);
float y = getRadialY((float)j,(float)i,centerX,centerY,k);
sampleImage(input,x,y);
color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
//Log.e(TAG, "radius = "+radius);
if(((i-centerX)*(i-centerX) + (j-centerY)*(j-centerY)) <= radius*(radius/4)){
partialResult.addValue(p, color);
}else{
partialResult.addValue(p, origPixel);
}
}//end of inner for
}//end of outer for
return partialResult;
}//end of call
}// end of partialprocessing
}//end of MultiProcesorFilter
.
[update] I'll post the view class that calls the barrel method. this class gets the touch events and sets the radius of the distortion prior to processing. You can see more how everything is set up before the distortion is applied.
public class TouchView extends View{
private File tempFile;
private byte[] imageArray;
private Bitmap bgr;
private Bitmap crop;
private Bitmap crop2;
private Bitmap overLay;
private Bitmap overLay2;
private Paint pTouch;
private float centreX;
private float centreY;
private float centreA;
private float centreB;
private Boolean xyFound = false;
private Boolean abFound = false;
private int Progress = 1;
private static final String TAG = "*********TouchView";
private Filters f = null;
private Filters f2 = null;
private boolean bothCirclesInPlace = false;
private MultiProcessorFilter mpf;
private MultiProcessorFilter mpf2;
private MultiRuntimeProcessorFilter mrpf;
private MultiRuntimeProcessorFilter mrpf2;
private int radius = 50;
protected boolean isLocked = false;
protected boolean isSaved = false;
protected byte [] data;
private float distance1;
private float distance2;
public TouchView(Context context) {
super(context);
}
public TouchView(Context context, AttributeSet attr) {
super(context,attr);
Log.e(TAG, "++++++++++ inside touchview constructor");
tempFile = new File(Environment.getExternalStorageDirectory().
getAbsolutePath() + "/"+"image.jpeg");
imageArray = new byte[(int)tempFile.length()];
// new Thread(new Runnable() {
// public void run() {
try{
InputStream is = new FileInputStream(tempFile);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
int i = 0;
while (dis.available() > 0 ) {
imageArray[i] = dis.readByte();
i++;
}
dis.close();
} catch (Exception e) {
e.printStackTrace();
}
// }
// }).start();
Bitmap bm = BitmapFactory.decodeByteArray(imageArray, 0, imageArray.length);
if(bm == null){
Log.e(TAG, "bm = null");
}else{
Log.e(TAG, "bm = not null");
}
bgr = bm.copy(bm.getConfig(), true);
overLay = null;
overLay2 = null;
bm.recycle();
pTouch = new Paint(Paint.ANTI_ALIAS_FLAG);
// pTouch.setXfermode(new PorterDuffXfermode(Mode.SRC_OUT));
pTouch.setColor(Color.RED);
pTouch.setStyle(Paint.Style.STROKE);
}// end of touchView constructor
public void findCirclePixels(){
//f = new Filters();
// f2 = new Filters();
//mpf = new MultiProcessorFilter();
//mpf2 = new MultiProcessorFilter();
mrpf = new MultiRuntimeProcessorFilter();
mrpf2 = new MultiRuntimeProcessorFilter();
crop = Bitmap.createBitmap(bgr,Math.max((int)centreX-radius,0),Math.max((int)centreY-radius,0),radius*2,radius*2);
crop2 = Bitmap.createBitmap(bgr,Math.max((int)centreA-radius,0),Math.max((int)centreB-radius,0),radius*2,radius*2);
new Thread(new Runnable() {
public void run() {
float prog = (float)Progress/150001;
// final Bitmap bgr3 = f.barrel(crop,prog);
// final Bitmap bgr4 = f2.barrel(crop2,prog);
//final Bitmap bgr3 = mpf.barrel(crop,prog);
// final Bitmap bgr4 = mpf2.barrel(crop2,prog);
final Bitmap bgr3 = mrpf.barrel(crop,prog,radius*2);
final Bitmap bgr4 = mrpf2.barrel(crop2,prog, radius*2);
TouchView.this.post(new Runnable() {
public void run() {
TouchView.this.overLay = bgr3;
TouchView.this.overLay2 = bgr4;
TouchView.this.invalidate();
}
});
}
}).start();
}// end of findCirclePixels()
#Override
public boolean onTouchEvent(MotionEvent ev) {
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN: {
int w = getResources().getDisplayMetrics().widthPixels;
int h = getResources().getDisplayMetrics().heightPixels;
if(ev.getX() <radius || ev.getX() > w - radius ){
// Log.e(TAG, "touch event is too near width edge!!!!!!!!!!");
showToastMessage("You touched too near the screen edge");
break;
}
if(ev.getY() <radius || ev.getY() > h - radius ){
// Log.e(TAG, "touch event is too near height edge!!!!!!!!!!");
showToastMessage("You touched too near the screen edge");
break;
}
distance1 = (float) Math.sqrt(Math.pow(ev.getX() - centreX, 2.0) + Math.pow(ev.getY() - centreY, 2.0));
distance2 = (float) Math.sqrt(Math.pow(ev.getX() - centreA, 2.0) + Math.pow(ev.getY() - centreB, 2.0));
Log.e(TAG, "dist1 = "+distance1 +" distance2 = " + distance2);
if(isLocked == false){
if(abFound == false){
centreA = (int) ev.getX();
centreB = (int) ev.getY();
abFound = true;
invalidate();
}
if(xyFound == false){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
invalidate();
}
if(abFound == true && xyFound == true){
bothCirclesInPlace = true;
}
break;
}
}
case MotionEvent.ACTION_MOVE: {
if(isLocked == false){
/*if(xyFound == false){
centreX = (int) ev.getX()-70;
centreY = (int) ev.getY()-70;
xyFound = true;
}else{
centreA = (int) ev.getX()-70;
centreB = (int) ev.getY()-70;
bothCirclesInPlace = true;
invalidate();
}
*/
if(distance1 < distance2){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
invalidate();
}else{
centreA = (int) ev.getX();
centreB = (int) ev.getY();
bothCirclesInPlace = true;
invalidate();
}
break;
}
}
case MotionEvent.ACTION_UP:
break;
}
return true;
}//end of onTouchEvent
public void initSlider(final HorizontalSlider slider)
{
slider.setOnProgressChangeListener(changeListener);
}
private OnProgressChangeListener changeListener = new OnProgressChangeListener() {
#Override
public void onProgressChanged(View v, int progress) {
if(isLocked == true){
setProgress(progress);
}else{
Toast.makeText(TouchView.this.getContext(), "press lock before applying distortion ", Toast.LENGTH_SHORT).show();
}
}
};
#Override
public void onDraw(Canvas canvas){
super.onDraw(canvas);
Log.e(TAG, "******about to draw bgr ");
canvas.drawBitmap(bgr, 0, 0, null);
if(isSaved == false){
if (isLocked == true && bothCirclesInPlace == true){
if(overLay != null)
canvas.drawBitmap(overLay, centreX-radius, centreY-radius, null);
if(overLay2 != null)
canvas.drawBitmap(overLay2, centreA-radius, centreB-radius, null);
}
if(bothCirclesInPlace == true && isLocked == false){
canvas.drawCircle(centreX, centreY, radius,pTouch);
canvas.drawCircle(centreA, centreB, radius,pTouch);
}
}else{
// String mFilePath : Absolute Path of the file to be saved
// Bitmap mBitmap1 : First bitmap. This goes as background.
// Bitmap mCBitmap : Bitmap associated with the Canvas. All draws on the canvas are drawn into this bitmap.
// Bitmap mBitmap2 : Second bitmap. This goes on top of first (in this example serves as foreground.
// Paint mPaint1 : Paint to draw first bitmap
// Paint mPaint2 : Paint to draw second bitmap on top of first bitmap
isSaved = false;
Bitmap mCBitmap = Bitmap.createBitmap(bgr.getWidth(), bgr.getHeight(), bgr.getConfig());
Canvas tCanvas = new Canvas(mCBitmap);
tCanvas.drawBitmap(bgr, 0, 0, null);
if(overLay != null)
tCanvas.drawBitmap(overLay, centreX-radius, centreY-radius, null);
if(overLay2 != null)
tCanvas.drawBitmap(overLay2, centreA-radius, centreB-radius, null);
canvas.drawBitmap(mCBitmap, 0, 0, null);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
mCBitmap.compress(CompressFormat.JPEG, 100 /*ignored for PNG*/, bos);
data = bos.toByteArray();
try {
bos.flush();
bos.close();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
bos.flush();
bos.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if ( data == null){
Log.e(TAG, "data in touchview before save clicked is null");
}else{
Log.e(TAG, "data in touchview before saved clicked is not null");
}
}
}//end of onDraw
protected void setProgress(int progress2) {
Log.e(TAG, "***********in SETPROGRESS");
this.Progress = progress2;
findCirclePixels();
}
public int getRadius() {
return radius;
}
public void setRadius(int r) {
radius = r;
invalidate();
}
public void showToastMessage(String mess){
Toast.makeText(TouchView.this.getContext(), mess.toString(), Toast.LENGTH_SHORT).show();
}
}
My guess would be that when the bottom of the image is processed, the operation operates partially on the input image, and partially outside of the image, due to the radius in your barrel method. Edges can often cause issues when operating outside the bounds of an actual image, giving 0 as a result, which can cause a black line...
I suggest to try to increase the size of your image:
#SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
// Add an offset to the width and height equal to the radius
// To avoid performing processing outside the bounds of the input image
int []arr = new int[(input.getWidth() + this.radius) * (input.getHeight() + this.radius)];
// Continue...
Again, this is my first guess, and I have no time to check right now, but surely, investigating the edge first, would be my recommendation.
just a guess, what happen if you put this
BitmapDrawable bmpd = new BitmapDrawable(input);
int []arr = new int[(bmpd.getIntrinsicWidth() + this.radius) * (bmpd. getIntrinsicHeight() + this.radius)];
Your problem most likely has to do with your assumed coordinate system of the image and of the spherize algorithm.
See MathWorks Image Coordinate Systems
I expect that you are treating your input/output images according to the Pixel Indices method, but the spherize algorithm is processing your data using the Spatial Coordinate System. This often causes the outermost border of a processed image to be blank because the algorithm has translated your image up and to the left by 0.5 pixels. Coordinate 3 in the original system is now 3.5 in the new system and has fallen outside the bounds of computation.
This is actually a huge problem in 2D to 3D image processing algorithms as the projection between the two spaces is not exactly trivial and tiny implementation differences cause noticeable problems. Notice how the Pixel Indices coordinate system is 3x3, but the Spatial Coordinate system is essentially 4x4.
Try setting your spherize barrel to be width+1/height+1 instead of width/height and see if that fills out your missing row.

Categories

Resources