Related
Hello Everyone I'm trying to create a pdf programatically from a set of images. I'm loading images on Listview from array list. after displaying the images on click floating action button I'm willing to create a new PDF file. I'm successfully creating file, Unfortunately I can able to see only one image on the pdf file out of 5 images. For reference purpose here I'm sharing the code which I'm trying to achieve. Please help me in creating and displaying list of images on PDF
import android.Manifest;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.graphics.pdf.PdfDocument;
import android.os.Build;
import android.os.Environment;
import android.support.annotation.RequiresApi;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.view.WindowManager;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.itextpdf.text.BadElementException;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Image;
import com.itextpdf.text.PageSize;
import com.itextpdf.text.Paragraph;
import com.itextpdf.text.pdf.PdfWriter;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.FileHandler;
public class DisplayPreview extends AppCompatActivity {
String TAG = DisplayPreview.class.getName();
ListView imagesLV;
FloatingActionButton fabPDF;
TextView tv_link;
ImageView iv_image;
LinearLayout ll_pdflayout;
public static int REQUEST_PERMISSIONS = 1;
boolean boolean_permission;
boolean boolean_save;
Bitmap bitmap;
ProgressDialog progressDialog;
ArrayList<String> selectedImagesList;
String targetPdf;
DisplayPreviewAdapter displayPreviewAdapter;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_display_preview);
Intent intent = getIntent();
selectedImagesList = intent.getStringArrayListExtra("selectedArray");
fn_permission();
imagesLV = findViewById(R.id.imagesLV);
displayPreviewAdapter = new DisplayPreviewAdapter(this, selectedImagesList);
imagesLV.setAdapter(displayPreviewAdapter);
fabPDF = findViewById(R.id.fabPDF);
fabPDF.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (boolean_save) {
Intent intent = new Intent(getApplicationContext(), PDFViewActivity.class);
intent.putExtra("pdfFile", targetPdf);
startActivity(intent);
} else {
if (boolean_permission) {
progressDialog = new ProgressDialog(DisplayPreview.this);
progressDialog.setMessage("Please wait");
bitmap = loadBitmapFromView(imagesLV, imagesLV.getWidth(), imagesLV.getHeight());
makePDF();
} else {
}
makePDF();
}
}
});
}
private void makePDF() {
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = displaymetrics.heightPixels;
float width = displaymetrics.widthPixels;
int convertHighet = (int) hight, convertWidth = (int) width;
// Resources mResources = getResources();
// Bitmap bitmap = BitmapFactory.decodeResource(mResources, R.drawable.img_1);
PdfDocument document = new PdfDocument();
PdfDocument.PageInfo pageInfo = new PdfDocument.PageInfo.Builder(convertWidth, convertHighet, 1).create();
PdfDocument.Page page = document.startPage(pageInfo);
Canvas canvas = page.getCanvas();
Paint paint = new Paint();
canvas.drawPaint(paint);
bitmap = Bitmap.createScaledBitmap(getBitmapFromView(imagesLV), convertWidth, convertHighet, true);
// bitmap = Bitmap.createScaledBitmap(bitmap, convertWidth, convertHighet, true);
paint.setColor(Color.BLUE);
canvas.drawBitmap(bitmap, 0, 0, null);
document.finishPage(page);
// write the document content
// String targetPdf = "/sdcard/test.pdf";
targetPdf = "mnt/sdcard/testing_1.pdf";
File filePath = new File(targetPdf);
try {
document.writeTo(new FileOutputStream(filePath));
boolean_save = true;
} catch (IOException e) {
e.printStackTrace();
Toast.makeText(this, "Something wrong: " + e.toString(), Toast.LENGTH_LONG).show();
}
// close the document
document.close();
}
public static Bitmap loadBitmapFromView(View v, int width, int height) {
Bitmap b = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
v.draw(c);
return b;
}
public static Bitmap getBitmapFromView(View view) {
Bitmap returnedBitmap = Bitmap.createBitmap(view.getWidth(), view.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(returnedBitmap);
Drawable bgDrawable = view.getBackground();
if (bgDrawable != null)
bgDrawable.draw(canvas);
else
canvas.drawColor(Color.WHITE);
view.draw(canvas);
return returnedBitmap;
}
private void fn_permission() {
if ((ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) ||
(ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED)) {
if ((ActivityCompat.shouldShowRequestPermissionRationale(DisplayPreview.this, android.Manifest.permission.READ_EXTERNAL_STORAGE))) {
} else {
ActivityCompat.requestPermissions(DisplayPreview.this, new String[]{android.Manifest.permission.READ_EXTERNAL_STORAGE},
REQUEST_PERMISSIONS);
}
if ((ActivityCompat.shouldShowRequestPermissionRationale(DisplayPreview.this, Manifest.permission.WRITE_EXTERNAL_STORAGE))) {
} else {
ActivityCompat.requestPermissions(DisplayPreview.this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_PERMISSIONS);
}
} else {
boolean_permission = true;
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_PERMISSIONS) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
boolean_permission = true;
} else {
Toast.makeText(getApplicationContext(), "Please allow the permission", Toast.LENGTH_LONG).show();
}
}
}
}
After hours of Struggle I found my own answer in creating multiple pages in a single PDF using android also image annotating with text here is the following source code
import android.annotation.SuppressLint;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.Layout;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Image;
import com.itextpdf.text.PageSize;
import com.itextpdf.text.pdf.PdfWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
public class DisplayPreview extends AppCompatActivity {
String TAG = DisplayPreview.class.getName();
ListView imagesLV;
FloatingActionButton fabPDF;
Bitmap anImage;
TextView tv_link;
ImageView iv_image;
LinearLayout ll_pdflayout;
public static int REQUEST_PERMISSIONS = 1;
boolean boolean_permission;
boolean boolean_save;
Bitmap bitmap;
ProgressDialog progressDialog;
ArrayList<String> selectedImagesList;
String targetPdf;
DisplayPreviewAdapter displayPreviewAdapter;
String familyStatus, userNameStr;
public static final Integer[] IMAGES = {
R.drawable.img_1,
R.drawable.img_2,
R.drawable.img_3,
R.drawable.img_4,
R.drawable.img_5,
R.drawable.img_6,
R.drawable.img_7
// ,R.drawable.img_8,
// R.drawable.img_9
};
private ProgressDialog pDialog;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_display_preview);
Intent intent = getIntent();
selectedImagesList = intent.getStringArrayListExtra("selectedArray");
familyStatus = intent.getStringExtra("familyStatus");
userNameStr = intent.getStringExtra("userNameStr");
targetPdf = "mnt/sdcard/testing_2.pdf";
imagesLV = findViewById(R.id.imagesLV);
displayPreviewAdapter = new DisplayPreviewAdapter(this, selectedImagesList, IMAGES);
// imagesLV.setAdapter(displayPreviewAdapter);
fabPDF = findViewById(R.id.fabPDF);
Drawable myDrawable = getResources().getDrawable(R.drawable.img_1);
anImage = ((BitmapDrawable) myDrawable).getBitmap();
createMultiPagePDF();
fabPDF.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (boolean_save) {
Intent intent = new Intent(getApplicationContext(), PDFViewActivity.class);
intent.putExtra("pdfFile", targetPdf);
startActivity(intent);
} else {
if (boolean_permission) {
progressDialog = new ProgressDialog(DisplayPreview.this);
progressDialog.setMessage("Please wait");
// bitmap = loadBitmapFromView(imagesLV, imagesLV.getWidth(), imagesLV.getHeight());
bitmap = loadBitmapFromView(imagesLV, imagesLV.getWidth(), imagesLV.getHeight());
// makePDF();
createMultiplePDF();
} else {
}
createMultiplePDF();
// makePDF();
}
}
});
}
private void createMultiPagePDF() {
pDialog = new ProgressDialog(DisplayPreview.this);
pDialog.setMessage("Please wait...Creating Invitation");
pDialog.setCancelable(false);
pDialog.show();
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
// This method will be executed once the timer is over
// Start your app main activity
if (boolean_save) {
Intent intent = new Intent(getApplicationContext(), PDFViewActivity.class);
intent.putExtra("pdfFile", targetPdf);
startActivity(intent);
} else {
if (boolean_permission) {
progressDialog = new ProgressDialog(DisplayPreview.this);
progressDialog.setMessage("Please wait");
// bitmap = loadBitmapFromView(imagesLV, imagesLV.getWidth(), imagesLV.getHeight());
bitmap = loadBitmapFromView(imagesLV, imagesLV.getWidth(), imagesLV.getHeight());
// makePDF();
createMultiplePDF();
} else {
}
createMultiplePDF();
// makePDF();
}
}
}, 4000);
}
public static Bitmap loadBitmapFromView(View v, int width, int height) {
Bitmap b = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
v.draw(c);
return b;
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_PERMISSIONS) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
boolean_permission = true;
} else {
Toast.makeText(getApplicationContext(), "Please allow the permission", Toast.LENGTH_LONG).show();
}
}
}
public void createMultiplePDF() {
// targetPdf = "mnt/sdcard/testing_1.pdf";
targetPdf = "mnt/sdcard/" + userNameStr + ".pdf";
File filePath = new File(targetPdf);
// Document document = new Document();
Document document = new Document(PageSize.A4, 0, 0, 0, 0);
// step 2
PdfWriter writer = null;
try {
writer = PdfWriter.getInstance(document, new FileOutputStream(filePath));
} catch (DocumentException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
// step 3
document.open();
// step 4
try {
Drawable myDrawable = null;
for (int i = 0; i < selectedImagesList.size(); i++) {
//
String imagePath = selectedImagesList.get(i).replaceAll("^\"|\"$", "");
Log.i(TAG, "Image Path is :: 271 :: " + imagePath);
// if (imagePath.equals("R.drawable.img_8")) {
if (imagePath.equals(userNameStr)) {
Bitmap processedBitmap = ProcessingBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
processedBitmap.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_2")) {
myDrawable = getResources().getDrawable(R.drawable.img_2);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_3")) {
myDrawable = getResources().getDrawable(R.drawable.img_3);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_4")) {
myDrawable = getResources().getDrawable(R.drawable.img_4);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_5")) {
myDrawable = getResources().getDrawable(R.drawable.img_5);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_6")) {
myDrawable = getResources().getDrawable(R.drawable.img_6);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
if (imagePath.equals("R.drawable.img_7")) {
myDrawable = getResources().getDrawable(R.drawable.img_7);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}
/* if (imagePath.equals("R.drawable.img_8")) {
myDrawable = getResources().getDrawable(R.drawable.img_8);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
} if (imagePath.equals("R.drawable.img_9")) {
myDrawable = getResources().getDrawable(R.drawable.img_9);
Bitmap myLogo = ((BitmapDrawable) myDrawable).getBitmap();
ByteArrayOutputStream stream = new ByteArrayOutputStream();
myLogo.compress(Bitmap.CompressFormat.JPEG, 40, stream);
Image myImg = Image.getInstance(stream.toByteArray());
DisplayMetrics displaymetrics = new DisplayMetrics();
this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
float hight = (float) ((displaymetrics.heightPixels / 2) * (1.1));
float width = (float) ((displaymetrics.widthPixels / 2) * (1.6));
// myImg.scaleToFit(hight, width);
myImg.scaleToFit(PageSize.ARCH_A);
myImg.setAlignment(Image.ALIGN_CENTER);
document.add(myImg);
writer.setPageEmpty(false);
}*/
}
document.close();
pDialog.dismiss();
Intent intent = new Intent(getApplicationContext(), PDFViewActivity.class);
intent.putExtra("pdfFile", targetPdf);
startActivity(intent);
finish();
} catch (DocumentException e) {
e.printStackTrace();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
#SuppressLint("ResourceAsColor")
private Bitmap ProcessingBitmap() {
Resources resources = DisplayPreview.this.getResources();
float scale = resources.getDisplayMetrics().density;
Bitmap bm1 = BitmapFactory.decodeResource(getResources(), R.drawable.img_1);
Bitmap.Config config = bm1.getConfig();
if (config == null) {
config = Bitmap.Config.ARGB_8888;
}
// newBitmap = Bitmap.createBitmap(bm1.getWidth(), bm1.getHeight(), config);
anImage = Bitmap.createBitmap(bm1.getWidth(), bm1.getHeight(), config);
// Canvas newCanvas = new Canvas(newBitmap);
Canvas newCanvas = new Canvas(anImage);
newCanvas.drawBitmap(bm1, 0, 0, null);
String captionString = userNameStr.trim();
if (captionString != null) {
TextPaint paintText = new TextPaint(Paint.ANTI_ALIAS_FLAG);
paintText.setColor(R.color.wedding_guest_name);
paintText.setTextSize(100);
paintText.setStyle(Paint.Style.FILL_AND_STROKE);
paintText.setTextAlign(Paint.Align.CENTER);
int textWidth = newCanvas.getWidth() - (int) (250 * scale);
StaticLayout textLayout = new StaticLayout(
captionString, paintText, textWidth, Layout.Alignment.ALIGN_LEFT, 1.0f, 1.4f, false);
Rect rectText = new Rect();
paintText.getTextBounds(captionString, 0, captionString.length(), rectText);
float xPos = (newCanvas.getWidth() / 2.16f);
float yPos = (int) ((newCanvas.getHeight() / 2.4) - ((paintText.descent() + paintText.ascent()) / 3.0));
float x = newCanvas.getWidth()/2;
// float y = newCanvas.getHeight()/2;
// draw text to the Canvas center
newCanvas.save();
// newCanvas.translate(xPos, yPos);
newCanvas.translate(x, yPos);
textLayout.draw(newCanvas);
newCanvas.restore();
// drawMultiLineText(captionString,xPos, yPos, paintText, newCanvas);
// newCanvas.drawText(captionString,xPos, yPos, paintText);
} else {
Toast.makeText(getApplicationContext(),
"caption empty!",
Toast.LENGTH_LONG).show();
}
// return newBitmap;
return anImage;
}
}
I suggest you to use iTextPdf to create pdf in easy way
compile 'com.itextpdf:itextg:5.5.10'
Add itextpdf library into your project app build.gradle file
implementation 'com.itextpdf:itext-pdfa:5.5.5'
Below is the code to create pdf file and add image into each cell in table
Document document = new Document(PageSize.A4, 10, 10, 10, 10);
try {
File f = new File(Environment.getExternalStorageDirectory() + "/*foldername*");
f = new File(f.getAbsolutePath() + "/" + * filename *);
if (!f.exists()) {
f.mkdir();
}
PdfWriter pdfWriter = PdfWriter.getInstance(document, new FileOutputStream(f.getAbsolutePath() + "/" + * filename * +".pdf"));
document.open();
for (int l = 0; l < list.size; l++) {
PdfPTable thirdPart = new PdfPTable(col);
thirdPart.getDefaultCell().setBorder(Rectangle.NO_BORDER);
thirdPart.setWidthPercentage(100.0f);
thirdPart.setHorizontalAlignment(Element.ALIGN_LEFT);
Image str = list.get(l);
PdfPCell imageCell = new PdfPCell(str);
thirdPart.addCell(imageCell);
}
document.add(thirdPart);
} catch (Exception e) {
e.printStackTrace();
}
document.close();
I'm writing a small Android program in Android Studio that uses MediaProjection to grab a screenshot of the whole screen and then I want to pass that screenshot as a Bitmap to my System Overlay (based on the chatheads example). When the MediaProjection runs and the Imagereader creates the bitmaps, I'm passing the bitmap into a class global variable so I can pass it into my System Overlay service and display it. I'm running into an issue where when the Imageview inside the service tries to read the bmp, I get an error as follows:
java.lang.RuntimeException: Canvas: trying to use a recycled bitmap android.graphics.Bitmap#347a4d5
From my understanding, the garbage collector quickly eats up the passed Bitmap and when the System Overlay tries to grab it, the data is gone. Could someone point me in the right direction on how to keep the bitmap?
Main Activity
package com.example.chatheads;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.media.ImageReader;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.PixelFormat;
import android.graphics.Point;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.Image;
import android.media.ImageReader;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.OrientationEventListener;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class MainActivity extends Activity {
Button startService,stopService;
private static final String TAG = MainActivity.class.getName();
private static final int REQUEST_CODE = 100;
private static String STORE_DIRECTORY;
private static int IMAGES_PRODUCED;
private static final String SCREENCAP_NAME = "screencap";
private static final int VIRTUAL_DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC;
private static MediaProjection sMediaProjection;
private MediaProjectionManager mProjectionManager;
private ImageReader mImageReader;
private Handler mHandler;
private Display mDisplay;
private VirtualDisplay mVirtualDisplay;
private int mDensity;
private int mWidth;
private int mHeight;
private int mRotation;
private OrientationChangeCallback mOrientationChangeCallback;
private Image image = null;
private Bitmap bitmap = null;
Globals sharedData = Globals.getInstance();
public Bitmap getObjectContainer() {
return bitmap;
}
private class ImageAvailableListener implements ImageReader.OnImageAvailableListener {
#Override
public void onImageAvailable(ImageReader reader) {
FileOutputStream fos = null;
try {
image = mImageReader.acquireLatestImage();
if (image != null) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * mWidth;
// create bitmap
bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
sharedData.setScreenshot(bitmap);
// write bitmap to a file
// fos = new FileOutputStream(STORE_DIRECTORY + "/myscreen_" + IMAGES_PRODUCED + ".png");
//bitmap.compress(CompressFormat.JPEG, 100, fos);
IMAGES_PRODUCED++;
//Log.e(TAG, "captured image: " + IMAGES_PRODUCED);
String s = ("captured image: " + String.valueOf(IMAGES_PRODUCED));
Toast toast1 = Toast.makeText(getBaseContext(),s, Toast.LENGTH_SHORT);
toast1.show();
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (fos!=null) {
try {
fos.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
if (bitmap!=null) {
bitmap.recycle();
}
if (image!=null) {
image.close();
}
}
}
}
private class OrientationChangeCallback extends OrientationEventListener {
public OrientationChangeCallback(Context context) {
super(context);
}
#Override
public void onOrientationChanged(int orientation) {
synchronized (this) {
final int rotation = mDisplay.getRotation();
if (rotation != mRotation) {
mRotation = rotation;
try {
// clean up
if(mVirtualDisplay != null) mVirtualDisplay.release();
if(mImageReader != null) mImageReader.setOnImageAvailableListener(null, null);
// re-create virtual display depending on device width / height
createVirtualDisplay();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
private class MediaProjectionStopCallback extends MediaProjection.Callback {
#Override
public void onStop() {
Log.e("ScreenCapture", "stopping projection.");
mHandler.post(new Runnable() {
#Override
public void run() {
if(mVirtualDisplay != null) mVirtualDisplay.release();
if(mImageReader != null) mImageReader.setOnImageAvailableListener(null, null);
if(mOrientationChangeCallback != null) mOrientationChangeCallback.disable();
sMediaProjection.unregisterCallback(MediaProjectionStopCallback.this);
}
});
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
sharedData.setValue(1);
// call for the projection manager
mProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
// start projection
Button startButton = (Button)findViewById(R.id.startButton);
startButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startProjection();
}
});
// stop projection
Button stopButton = (Button)findViewById(R.id.stopButton);
stopButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
stopProjection();
}
});
// start capture handling thread
new Thread() {
#Override
public void run() {
Looper.prepare();
mHandler = new Handler();
Looper.loop();
}
}.start();
startService=(Button)findViewById(R.id.startService);
stopService=(Button)findViewById(R.id.stopService);
startService.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startService(new Intent(getApplication(), ChatHeadService.class));
}
});
stopService.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
stopService(new Intent(getApplication(), ChatHeadService.class));
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_CODE) {
sMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
if (sMediaProjection != null) {
File externalFilesDir = getExternalFilesDir(null);
if (externalFilesDir != null) {
STORE_DIRECTORY = externalFilesDir.getAbsolutePath() + "/screenshots/";
File storeDirectory = new File(STORE_DIRECTORY);
if (!storeDirectory.exists()) {
boolean success = storeDirectory.mkdirs();
if (!success) {
Log.e(TAG, "failed to create file storage directory.");
return;
}
}
} else {
Log.e(TAG, "failed to create file storage directory, getExternalFilesDir is null.");
return;
}
// display metrics
DisplayMetrics metrics = getResources().getDisplayMetrics();
mDensity = metrics.densityDpi;
mDisplay = getWindowManager().getDefaultDisplay();
// create virtual display depending on device width / height
createVirtualDisplay();
// register orientation change callback
mOrientationChangeCallback = new OrientationChangeCallback(this);
if (mOrientationChangeCallback.canDetectOrientation()) {
mOrientationChangeCallback.enable();
}
// register media projection stop callback
sMediaProjection.registerCallback(new MediaProjectionStopCallback(), mHandler);
}
}
}
/****************************************** UI Widget Callbacks *******************************/
private void startProjection() {
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
}
private void stopProjection() {
mHandler.post(new Runnable() {
#Override
public void run() {
if (sMediaProjection != null) {
sMediaProjection.stop();
}
}
});
}
/****************************************** Factoring Virtual Display creation ****************/
private void createVirtualDisplay() {
// get width and height
Point size = new Point();
mDisplay.getSize(size);
mWidth = size.x;
mHeight = size.y;
// start capture reader
mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
mVirtualDisplay = sMediaProjection.createVirtualDisplay(SCREENCAP_NAME, mWidth, mHeight, mDensity, VIRTUAL_DISPLAY_FLAGS, mImageReader.getSurface(), null, mHandler);
mImageReader.setOnImageAvailableListener(new ImageAvailableListener(), mHandler);
}
}
Service
package com.example.chatheads;
import android.app.Service;
import android.content.Intent;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.os.IBinder;
import android.util.DisplayMetrics;
import android.view.Display;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import android.widget.ImageView;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.view.View;
import android.widget.Toast;
public class ChatHeadService extends Service {
private WindowManager windowManager;
private ImageView chatHead;
WindowManager.LayoutParams params;
Globals sharedData = Globals.getInstance();
#Override
public void onCreate() {
super.onCreate();
windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
DisplayMetrics displayMetrics = new DisplayMetrics();
windowManager.getDefaultDisplay().getMetrics(displayMetrics);
int h = displayMetrics.heightPixels, w = displayMetrics.widthPixels;
Bitmap.Config conf = Bitmap.Config.ARGB_8888; // see other conf types
final Bitmap bmp = Bitmap.createBitmap(w, h, conf); // this creates a MUTABLE bitmap
Canvas canvas = new Canvas(bmp);
Paint paint = new Paint();
paint.setColor(Color.BLACK);
paint.setStrokeWidth(5);
paint.setAlpha(60);
Rect rect=new Rect(0, 0, bmp.getWidth(), bmp.getHeight());
canvas.drawRect(rect,paint);
paint.setColor(Color.GREEN);
canvas.drawLine(0, 0, 0, h, paint);
canvas.drawLine(0, 0, w, 0, paint);
canvas.drawLine(w, 0, h, w, paint);
canvas.drawLine(0, 0, 0, w, paint);
chatHead = new ImageView(this);
chatHead.setImageResource(R.drawable.face1);
params= new WindowManager.LayoutParams(
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.WRAP_CONTENT,
WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE,
PixelFormat.TRANSLUCENT);
params.gravity = Gravity.TOP | Gravity.LEFT;
params.x = 0;
params.y = 1000;
//this code is for dragging the chat head
chatHead.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
boolean isExpanded = false;
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = params.x;
initialY = params.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
int n = sharedData.getValue();
//Toast toast1 = Toast.makeText(getBaseContext(),String.valueOf(n), Toast.LENGTH_SHORT);
//toast1.show();
if (isExpanded == true){
chatHead.setImageResource(R.drawable.face1);
isExpanded=false;
params.x = 0;
params.y = 1000;
windowManager.updateViewLayout(chatHead, params);
}
else{
//chatHead.setImageResource(R.drawable.clear);
Bitmap bmp2 = sharedData.getScreenshot();
chatHead.setImageBitmap(bmp2);
isExpanded = true;
params.x = 0;
params.y = 0;
windowManager.updateViewLayout(chatHead, params);
}
return true;
case MotionEvent.ACTION_UP:
//chatHead.setImageResource(R.drawable.test);
return true;
/*case MotionEvent.ACTION_MOVE:
params.x = initialX
+ (int) (event.getRawX() - initialTouchX);
params.y = initialY
+ (int) (event.getRawY() - initialTouchY);
windowManager.updateViewLayout(chatHead, params);
return true;*/
}
return false;
}
});
windowManager.addView(chatHead, params);
}
#Override
public void onDestroy() {
super.onDestroy();
if (chatHead != null)
windowManager.removeView(chatHead);
}
#Override
public IBinder onBind(Intent intent) {
// TODO Auto-generated method stub
return null;
}
}
Globals
package com.example.chatheads;
import android.graphics.Bitmap;
public class Globals {
private static Globals instance = new Globals();
// Getter-Setters
public static Globals getInstance() {
return instance;
}
public static void setInstance(Globals instance) {
Globals.instance = instance;
}
private Globals() {
}
private int testi;
private Bitmap bmpscreenshot;
public Bitmap getScreenshot(){
return bmpscreenshot;
}
public void setScreenshot(Bitmap bmp){
this.bmpscreenshot = bmp;
}
public int getValue() {
return testi;
}
public void setValue(int testi) {
this.testi = testi;
}
}
You seem to be calling bitmap.recycle() too soon. Call it after the service has actually processed the data.
If you want a pass a Bitmap from Activity to service means, convert the bitmap into ByteArray
Intent i = new Intent(this, YourService.class);
Bitmap b; // your bitmap
ByteArrayOutputStream bs = new ByteArrayOutputStream();
b.compress(Bitmap.CompressFormat.PNG, 50, bs);
i.putExtra("byteArray", bs.toByteArray());
startService(i);
Override onStartCommand in your service
public int onStartCommand (Intent intent, int flags, int startId) {
Bitmap b = BitmapFactory.decodeByteArray(
getIntent().getByteArrayExtra("byteArray"),0,getIntent()
.getByteArrayExtra("byteArray").length);
return null;
}
I'm using Color Blob detector Sample code of OpenCV (Sample Code), but app is crashing at Core.multiply(contour, new Scalar(4,4), contour); giving some fatal error
libc: Fatal signal 11 (SIGSEGV), code 1, fault addr 0x0 in tid 11016 (Thread-4857)
and detailed Log is available to get idea of exact reason. In some other part of code I was using Core.add(fg, bg, markers); and app was giving same fatal error there as well. What is the issue? Please guide towards the solution. I 'm using openCV(3.1)
Code is as following:
package com.iu.kamraapp.utils;
import android.util.Log;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Cache
Mat mPyrDownMat = new Mat();
Mat mHsvMat = new Mat();
Mat mMask = new Mat();
Mat mDilatedMask = new Mat();
Mat mHierarchy = new Mat();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum() {
return mSpectrum;
}
public void setMinContourArea(double area) {
mMinContourArea = area;
}
public void process(Mat rgbaImage) {
try {
Imgproc.pyrDown(rgbaImage, mPyrDownMat);
Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
Imgproc.dilate(mMask, mDilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
Core.multiply(contour, new Scalar(4, 4), contour); // issue is here
mContours.add(contour);
}
}
}
catch (Exception e) { e.printStackTrace(); }
}
public List<MatOfPoint> getContours() {
return mContours;
}
}
package com.iu.kamraapp;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import com.iu.kamraapp.utils.AppGlobal;
import com.iu.kamraapp.utils.ColorBlobDetector;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
import java.util.List;
public class ColorBlobActivity extends AppCompatActivity implements View.OnTouchListener,CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
Context context;
int screenWidth, screenHeight;
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
setContentView(R.layout.activity_main);
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
screenWidth = AppGlobal.getScreenResolution(context, false);
screenHeight = AppGlobal.getScreenResolution(context, true);
} else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
OpenCVLoader.initDebug(true);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
try {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int) event.getX() - xOffset;
int y = (int) event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x > 4) ? x - 4 : 0;
touchedRect.y = (y > 4) ? y - 4 : 0;
touchedRect.width = (x + 4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y + 4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width * touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
} catch (Exception e) { e.printStackTrace(); }
return false; // don't need subsequent touch events
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.d(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
As I use the back camera to cap a frame, by default the Android application is landscape so to get the input frame I use
Core.flip(currentFrame, currentFrame, 1);//flip around Y-axi
After some image enhancement and findcontour using opencv,
I have the following problems:
a. Object moves left hand side, drawcirle moves downward.
b. Object moves right hand side, drawcircle moves upward.
c. Object moves upward, drawcircile moves left hand side.
d. Object moves downward, drawcircle moves right hand side.
In other word, the drawcircle (output) should be clockwise 90 to get the image of the source 1.
Code shown as follows:
package com.mtyiuaa.writingintheair;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.content.Intent;
import android.view.ViewDebug;
import android.widget.Button;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Point;
import org.opencv.core.MatOfPoint;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.core.Rect;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgproc.Moments;
import org.opencv.highgui.VideoCapture;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
private static final int THRESH_BINARY = 1;
private static final int THRESH_TOZERO = 4;
private static String TAG = "MainActivity";
JavaCameraView javaCameraView;
JavaCameraView javaCameraView2;
VideoCapture videoCapture;
Mat mRgba;
Mat temp;
Mat previousFrame;
Mat GpreviousFrame; // gray-level frame of previous Frame
Mat currentFrame;
Mat GcurrentFrame; // gray-level frame of current Frame
Mat diffFrame;
Mat imgGray;
Mat imgHSV;
Mat imgCanny;
Mat inputFrame;
Mat FlipFrame;
Mat outputFrame;
Mat imgthresholding;
Mat imgNormalization;
Mat imgGaussianSmothing;
int max_Binary_value = 255;
int thresh = 20;
Boolean CameraActive;
Boolean firstIteration= true;
int[] theObject = {0,0};
int x=0, y=0;
int FRAME_WIDTH = 1280;
int FRAME_HEIGHT = 720;
//max number of objects to be detected in frame
int MAX_NUM_OBJECTS=50;
//Minimum and Maximum object area
int MIN_OBJECT_AREA = 20*20;
int MAX_OBJECT_AREA = (int) ((FRAME_HEIGHT*FRAME_WIDTH)/1.5);
//MatOfPoint allcontours = new MatOfPoint();
//bounding rectangle of the object, we will use the center of this as its position.
BaseLoaderCallback mLoaderCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status){
case BaseLoaderCallback.SUCCESS:{
javaCameraView.enableView();
//javaCameraView2.enableView();
break;
}
default:{
super.onManagerConnected(status);
break;
}
}
}
};
static{
}
//JavaCameraView javaCameraView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
javaCameraView = (JavaCameraView)findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(SurfaceView.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
#Override
protected void onPause(){
super.onPause();
if(javaCameraView!=null) {
CameraActive = false;
javaCameraView.disableView();
}
}
#Override
protected void onDestroy(){
super.onDestroy(); // call the basic function
if(javaCameraView!=null){
javaCameraView.disableView();
}
}
#Override
protected void onResume(){
super.onResume(); //call based class
if(OpenCVLoader.initDebug()){
Log.i(TAG, "OpenCV loaded successfully");
mLoaderCallBack.onManagerConnected(LoaderCallbackInterface.SUCCESS);
//grab a new instance by using Basecallbackloader
}
else {
Log.i(TAG, "OpenCV not loaded");
//recall opencvLoader if not loaded
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_10, this, mLoaderCallBack);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
//Mat::Mat(int rows, int cols, int type)
// initialize all Mat object when onCamera starts
CameraActive = true;
// 4 channels are used
mRgba = new Mat(height, width, CvType.CV_8SC4);
FlipFrame = new Mat(height, width, CvType.CV_8SC4);
previousFrame =new Mat(height, width, CvType.CV_8SC4);
currentFrame = new Mat(height, width, CvType.CV_8SC4);
diffFrame =new Mat(height, width, CvType.CV_8SC4);
// 1 channel is used.
GcurrentFrame = new Mat(height, width, CvType.CV_8SC1);
GpreviousFrame = new Mat(height, width, CvType.CV_8SC1);
imgGray= new Mat(height, width, CvType.CV_8SC1);
imgHSV = new Mat (height, width, CvType.CV_8SC1);
imgCanny = new Mat(height, width, CvType.CV_8SC1);
imgGaussianSmothing = new Mat(height, width, CvType.CV_8SC1);
imgthresholding = new Mat(height, width, CvType.CV_8SC1);
imgNormalization = new Mat(height,width, CvType.CV_8SC1);
inputFrame = new Mat(height, width, CvType.CV_8SC1);
outputFrame = new Mat(height, width, CvType.CV_8SC1);
temp = new Mat(height, width, CvType.CV_8SC1);
}
#Override
public void onCameraViewStopped() {
mRgba.release();
FlipFrame.release();
previousFrame.release();
currentFrame.release();
diffFrame.release();
GcurrentFrame.release();
GpreviousFrame.release();
imgGray.release();
imgHSV.release();
imgCanny.release();
imgGaussianSmothing.release();
imgthresholding.release();
imgNormalization.release();
inputFrame.release();
outputFrame.release();
temp.release();
CameraActive = false;
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
while(CameraActive) {
Mat temp2 = new Mat();
Mat temp3 = new Mat();
currentFrame = inputFrame.rgba();
Core.flip(currentFrame, currentFrame, 1);//flip aroud Y-axis
RGB2HSV(currentFrame).copyTo(temp2);
FilterHSVImage(temp2).copyTo(temp2);
//CannyDetector(temp2).copyTo(temp4);
MorphOperation(temp2).copyTo(temp2);
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(temp2,contours,hierarchy,Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
temp2.copyTo(temp3);
FindLargestContours(temp3, contours);
//return outputFrame;
}
return null;
}
// Edge Detector using Canny
// Goal: Edge image is less sensitive to lighting conditon
public Mat CannyDetector(Mat inputFrame) {
Imgproc.Canny(inputFrame, imgCanny, 50, 150);
return imgCanny;
}
private Mat RGB2Gray (Mat inputFrame){
Imgproc.cvtColor(inputFrame, imgGray, Imgproc.COLOR_RGB2GRAY);
return imgGray;
}
private Mat RGB2HSV (Mat inputFrame){
Imgproc.cvtColor(inputFrame, imgHSV, Imgproc.COLOR_RGB2HSV);
return imgHSV;
}
private Mat FilterHSVImage(Mat inputFrame){
Core.inRange(inputFrame, new Scalar(0, 100, 100), new Scalar(10, 255, 255), imgthresholding);
//Core.inRange(temp2, new Scalar(160, 100, 100), new Scalar(179, 255, 255), temp2);
return imgthresholding;
}
private Mat MorphOperation (Mat inputFrame){
//Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2*dilation_size + 1, 2*dilation_size+1));
//Imgproc.dilate(source, destination, element1);
//Highgui.imwrite("dilation.jpg", destination);
Mat erodeElement =Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3,3));
Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size (8,8));
Imgproc.dilate(inputFrame, inputFrame, erodeElement);
Imgproc.dilate(inputFrame, inputFrame, erodeElement);
Imgproc.erode(inputFrame, inputFrame, dilateElement);
Imgproc.erode(inputFrame, inputFrame, dilateElement);
return inputFrame;
}
private Mat Threshold(Mat inputFrame){
Imgproc.threshold(inputFrame, imgthresholding, thresh, max_Binary_value, Imgproc.THRESH_TOZERO);
return imgthresholding;
}
private Mat ThresholdToBinary(Mat inputFrame){
Imgproc.threshold(inputFrame, imgthresholding, thresh, max_Binary_value, Imgproc.THRESH_BINARY);
//Imgproc.threshold(inputFrame, imgthresholding, thresh, max_Binary_value, THRESH_BINARY);
return imgthresholding;
}
private Mat Normalization(Mat inputFrame, double min, double max){
//double E_Max =
Core.normalize(inputFrame, imgNormalization, min, max, Core.NORM_MINMAX);
return imgNormalization;
}
private Mat drawObject(int x, int y, Mat inputFrame) {
Point point = new Point(x, y);
Point pointA = new Point(x, y - 25);
Point pointB = new Point(x, y + 25);
Point pointC = new Point(x - 25, y);
Point pointD = new Point(x + 25, y);
Scalar scalar = new Scalar(255, 0, 0);
Core.circle(inputFrame,point,20,scalar,2);
if(y-25>0) Core.line(inputFrame,point,pointA,scalar,2);
else Core.line(inputFrame,point,new Point(x,0),scalar,2);
if(y+25<FRAME_HEIGHT) Core.line(inputFrame,point,pointB,scalar,2);
else Core.line(inputFrame,point,new Point(x,FRAME_HEIGHT),scalar,2);
if(x-25>0)Core.line(inputFrame,point,pointC,scalar,2);
else Core.line(inputFrame,point,new Point(0,y),scalar,2);
if(x+25<FRAME_WIDTH) Core.line(inputFrame,point,pointD,scalar,2);
else Core.line(inputFrame,point,new Point(FRAME_WIDTH,y),scalar,2);
Core.putText(inputFrame, "Tracking object at (" + Integer.toString(x)+" , "+ Integer.toString(y)+ ")",point, 1, 1,scalar, 2);
// putText(inputFrame,intToString(x)+","+intToString(y),Point(x,y+30),1,1,Scalar(0,255,0),2);
Log.i(TAG, "Draw x at "+Integer.toString(x)+ " Draw y at "+ Integer.toString(y));
inputFrame.copyTo(outputFrame);
return outputFrame;
}
private void TrackFilteredObject (int x, int y, Mat filteredImage, Mat sourceImage){
boolean objectFound = false;
Mat temp3 = new Mat();
filteredImage.copyTo(temp3);
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(temp3,contours,hierarchy,Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);
//Point[] contourPoints = (Point[]) contours.toArray();
double refArea = 0;
if (hierarchy.size().height>0 && hierarchy.size().width>0){
// int numObjects = hierarchy.size();
//if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
//if(numObjects<MAX_NUM_OBJECTS) {
for (int index = 0; index >= 0; index =(int)hierarchy.get(index,0)[0]){
//hierarchy[index][0]) {
Moments moment = Imgproc.moments(contours.get(index), true);
double area = moment.get_m00();
//if the area is less than 20 px by 20px then it is probably just noise
//if the area is the same as the 3/2 of the image size, probably just a bad filter
//we only want the object with the largest area so we safe a reference area each
//iteration and compare it to the area in the next iteration.
if (area > MIN_OBJECT_AREA && area < MAX_OBJECT_AREA && area > refArea) {
// x = moment.m10 / area;
x= (int) (moment.get_m10()/area);
y = (int) (moment.get_m01()/area);
objectFound = true;
refArea = area;
} else objectFound = false;
}
//}
}
}
}
Replace x with y, it's pretty simple man come on
I am trying to run this code that I found from the net:
package com.opencv.grabcut.android;
import java.io.IOException;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.widget.ImageView;
import android.widget.Toast;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
public class Grabcut extends Activity implements OnTouchListener {
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Scalar color = new Scalar(255, 0, 0, 255);
Point tl, br;
int counter;
Bitmap bitmapResult, bitmapBackground;
Mat dst = new Mat();
final String pathToImage = "/mnt/sdcard/gcut.png";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.grabcut_main);
imageView = (ImageView) this.findViewById(R.id.imageView);
bitmap = BitmapFactory.decodeFile(pathToImage);
Toast msg = Toast.makeText(Grabcut.this, "Press top left and bottom right of the foreground image", Toast.LENGTH_LONG);
msg.setGravity(Gravity.TOP|Gravity.LEFT, 0, 0);
msg.show();
bitmapResult = bitmap.copy(bitmap.getConfig(), true);
canvas = new Canvas(bitmapResult);
imageView.setImageBitmap(bitmapResult);
imageView.setOnTouchListener(this);
tl = new Point();
br = new Point();
counter = 0;
}
//#Override
public boolean onTouch(View v, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
if (counter == 0) {
tl.x = event.getX();
tl.y = event.getY();
counter++;
} else if (counter == 1) {
br.x = event.getX();
br.y = event.getY();
counter++;
Mat img = new Mat();
img = Highgui.imread(pathToImage);
Mat background = new Mat();
try {
background = Utils.loadResource(getApplicationContext(),
R.drawable.wall );
} catch (IOException e) {
e.printStackTrace();
}
backgroundSubtracting(img, background);
Highgui.imwrite("/mnt/sdcard/GRABCUT/rect.png", dst);
Bitmap jpg = BitmapFactory
.decodeFile("/mnt/sdcard/GRABCUT/rect.png");
imageView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
imageView.setAdjustViewBounds(true);
imageView.setPadding(2, 2, 2, 2);
imageView.setImageBitmap(jpg);
imageView.invalidate();
}
}
return true;
}
private void backgroundSubtracting(Mat img, Mat background) {
Mat firstMask = new Mat();
Mat bgModel = new Mat();
Mat fgModel = new Mat();
Mat mask;
Mat source = new Mat(1, 1, CvType.CV_8U, new Scalar(3.0));
dst = new Mat();
Rect rect = new Rect(tl, br);
Imgproc.grabCut(img, firstMask, rect, bgModel, fgModel, 1, 0 /* GC_INIT_WITH_RECT */);
Core.compare(firstMask, source/* GC_PR_FGD */, firstMask, Core.CMP_EQ);
Mat foreground = new Mat(img.size(), CvType.CV_8UC3, new Scalar(255,
255, 255));
img.copyTo(foreground, firstMask);
Core.rectangle(img, tl, br, color);
Mat tmp = new Mat();
Imgproc.resize(background, tmp, img.size());
background = tmp;
mask = new Mat(foreground.size(), CvType.CV_8UC1, new Scalar(255, 255, 255));
Imgproc.cvtColor(foreground, mask, 6/* COLOR_BGR2GRAY */);
Imgproc.threshold(mask, mask, 254, 255, 1 /* THRESH_BINARY_INV */);
Mat vals = new Mat(1, 1, CvType.CV_8UC3, new Scalar(0.0));
background.copyTo(dst);
background.setTo(vals, mask);
enter code here
Core.add(background, foreground, dst, mask);
firstMask.release();
source.release();
bgModel.release();
fgModel.release();
vals.release();
}
}
There is one error that i don't understand in there
the error goes like this: "wall cannot be resolved or is not a field"
Pardone me if did not explain it clearly, I quite new on android development...
please help.. thanks :)
Copy an image file with file name as "wall.png" under /res/drawable/.
That is suppose to fix the error.