Can someone explain what exactly is going on and how I can rectify it. So I understand tesseract has to have an image to target and presumably this would be the bitmap I've captured and tried to save. That saved location would be my _path variable. Now what is the DATA_PATH for tesseract? Does the image need to be stored in a folder called 'tesseract' ? Do I create that folder and store some kind of training in it? I'm looking for an explanation rather than a code example.
http://gaut.am/making-an-ocr-android-app-using-tesseract/ - I am trying to follow this tutorial and checking others to try and understand the paths which all of them use.
public class MainActivity extends Activity {
private static ImageView imageView;
protected String _path;
// protected static Bitmap bit;
static File myDir;
protected static Bitmap mImageBitmap;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
this.imageView = (ImageView) this.findViewById(R.id.imageView1);
Button photoButton = (Button) this.findViewById(R.id.button1);
_path = Environment.getExternalStorageDirectory() + "/images/test.bmp";
Toast t = Toast.makeText(getApplicationContext(), "HEELLLLLLOO", 100000);
t.show();
photoButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// CALL THE PICTURE
dispatchTakePictureIntent(0);
}
});
}
private void handleSmallCameraPhoto(Intent intent) {
Bundle extras = intent.getExtras();
mImageBitmap = (Bitmap) extras.get("data");
imageView.setImageBitmap(mImageBitmap);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile( _path, options );
imageView.setImageBitmap(bitmap);
//_path = path to the image to be OCRed
ExifInterface exif;
try {
exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
if (rotate != 0) {
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap & convert to ARGB_8888, required by tess
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
}
TessBaseAPI baseApi = new TessBaseAPI();
// DATA_PATH = Path to the storage
// lang for which the language data exists, usually "eng"
baseApi.init(_path, "eng"); //THIS SHOULD BE DATA_PATH ?
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
System.out.println(recognizedText);
baseApi.end();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data){
handleSmallCameraPhoto(data);
}
private void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(takePictureIntent, actionCode);
}
protected static void identifyunicode() {
// DATA_PATH = Path to the storage
// lang for which the language data exists, usually "eng"
/*
* TessBaseAPI baseApi = new TessBaseAPI();
* baseApi.init(myDir.toString(), "eng"); // myDir + //
* "/tessdata/eng.traineddata" // must be present baseApi.setImage(bit);
* String recognizedText = baseApi.getUTF8Text(); // Log or otherwise //
* display this // string... baseApi.end();
*/
}
}
DataPath is the path where you copied your tessdata files from Assets.
import java.io.File;
import java.util.ArrayList;
import android.os.Environment;
public class Utils {
public static boolean isSDCardMounted() {
boolean isMounted = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
isMounted = true;
} else if (Environment.MEDIA_BAD_REMOVAL.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_CHECKING.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_NOFS.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_REMOVED.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_UNMOUNTABLE.equals(state)) {
isMounted = false;
} else if (Environment.MEDIA_UNMOUNTED.equals(state)) {
isMounted = false;
}
return isMounted;
}
public static boolean isDirectoryExists(final String filePath) {
boolean isDirectoryExists = false;
File mFilePath = new File(filePath);
if(mFilePath.exists()) {
isDirectoryExists = true;
} else {
isDirectoryExists = mFilePath.mkdirs();
}
return isDirectoryExists;
}
public static boolean deleteFile(final String filePath) {
boolean isFileExists = false;
File mFilePath = new File(filePath);
if(mFilePath.exists()) {
mFilePath.delete();
isFileExists = true;
}
return isFileExists;
}
public static String getDataPath() {
String returnedPath = "";
final String mDirName = "tesseract";
final String mDataDirName = "tessdata";
if(isSDCardMounted()) {
final String mSDCardPath = Environment.getExternalStorageDirectory() + File.separator + mDirName;
if(isDirectoryExists(mSDCardPath)) {
final String mSDCardDataPath = Environment.getExternalStorageDirectory() + File.separator + mDirName +
File.separator + mDataDirName;
isDirectoryExists(mSDCardDataPath);
return mSDCardPath;
}
}
return returnedPath;
}
}
Activity Class
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import android.app.Activity;
import android.content.Intent;
import android.content.res.AssetManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.ColorMatrix;
import android.graphics.ColorMatrixColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.widget.TextView;
import com.googlecode.tesseract.android.TessBaseAPI;
public class AndroidCommonTest extends Activity {
private static final String TAG = AndroidCommonTest.class.getSimpleName();
private TextView txtGotTime = null;
private final int START_CODE = 101;
private String mDirPath = null;
private Uri mOutPutUri = null;
private static final String lang = "eng";
private String mPath = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
txtGotTime = (TextView) findViewById(R.id.txtGotTime);
mDirPath = Utils.getDataPath();
mPath = mDirPath + File.separator + "test.jpg";
android.util.Log.i(TAG, "mDirPath: " + mDirPath + " mPath: " + mPath);
if (!(new File(mDirPath + File.separator + "tessdata" + File.separator + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = getAssets();
InputStream in = assetManager.open("tessdata" + File.separator + lang + ".traineddata");
OutputStream out = new FileOutputStream(mDirPath + File.separator
+ "tessdata" + File.separator + lang + ".traineddata");
byte[] buf = new byte[8024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
} catch (IOException e) {
android.util.Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString());
}
} else {
processImage(mDirPath + File.separator + "six.jpg", 0);
}
}
public void getTime(View view) {
android.util.Log.i(TAG, "mDirPath: " + mDirPath + " mPath: " + mPath);
if(mDirPath != null && mDirPath.length() > 0) {
mOutPutUri = Uri.fromFile(new File(mPath));
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, mOutPutUri);
startActivityForResult(intent, START_CODE);
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == START_CODE) {
if(resultCode == Activity.RESULT_OK) {
int rotation = -1;
long fileSize = new File(mPath).length();
android.util.Log.i(TAG, "fileSize " + fileSize);
//Suppose Device Supports ExifInterface
ExifInterface exif;
try {
exif = new ExifInterface(mPath);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90 :
rotation = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180 :
rotation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270 :
rotation = 270;
break;
case ExifInterface.ORIENTATION_NORMAL:
case ExifInterface.ORIENTATION_UNDEFINED:
rotation = 0;
break;
}
android.util.Log.i(TAG, "Exif:rotation " + rotation);
if (rotation != -1) {
processImage(mPath, rotation);
} else {
//Device Does Not Support ExifInterface
Cursor mediaCursor = getContentResolver().query(mOutPutUri,
new String[] { MediaStore.Images.ImageColumns.ORIENTATION,
MediaStore.MediaColumns.SIZE },
null, null, null);
if (mediaCursor != null && mediaCursor.getCount() != 0 ) {
while(mediaCursor.moveToNext()){
long size = mediaCursor.getLong(1);
android.util.Log.i(TAG, "Media:size " + size);
if(size == fileSize){
rotation = mediaCursor.getInt(0);
break;
}
}
android.util.Log.i(TAG, "Media:rotation " + rotation);
processImage(mPath, rotation);
} else {
android.util.Log.i(TAG, "Android Problem");
txtGotTime.setText("Android Problem");
}
}
}
catch (IOException exception) {
exception.printStackTrace();
}
} else if(resultCode == Activity.RESULT_CANCELED) {
android.util.Log.i(TAG, "RESULT_CANCELED");
txtGotTime.setText("RESULT_CANCELED");
}
}
}
private void processImage(final String filePath, final int rotation) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
options.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
if (bitmap != null) {
int width = bitmap.getWidth();
int height = bitmap.getHeight();
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false);
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(mDirPath, lang);
baseApi.setPageSegMode(100);
baseApi.setPageSegMode(7);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
android.util.Log.i(TAG, "recognizedText: 1 " + recognizedText);
baseApi.end();
if(lang.equalsIgnoreCase("eng")) {
recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
}
android.util.Log.i(TAG, "recognizedText: 2 " + recognizedText.trim());
txtGotTime.setText(recognizedText.trim());
}
}
private void saveImageAndroid(final Bitmap passedBitmap) {
try {
FileOutputStream mFileOutStream = new FileOutputStream(mDirPath + File.separator + "savedAndroid.jpg");
passedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, mFileOutStream);
mFileOutStream.flush();
mFileOutStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
Put tessdata folder in Assets.
Do not forget to give reference path of Tess Library.
Thanks.
Related
I am very new to android and for some reason I have to implement a code that would let open the front camera by default instead of rear camera. My minimum SDK is API 9 and Target SDK is API 21. I have a PicturePlugin.Java file and the code is:
public class PicturePlugin extends CordovaPlugin
{
private String callback="data";
private int IMAGE_TAKEN=1;
private CallbackContext callbackContext;
private String TAG="FilePlugin";
private int imageWidth,imageHeight;
private String imagePath;
private File destImageFile;
private String mode = "BACK";
private static String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
CameraManager manager;
private static final String TAG1 = null ;
public boolean execute(String action, JSONArray args, CallbackContext callbackContext)
{
this.callbackContext = callbackContext;
this.cordova.getActivity().getApplicationContext().getPackageName();
try
{
JSONObject object=(JSONObject) args.get(0);
imageWidth=object.getInt("targetWidth");
imageHeight=object.getInt("targetHeight");
if(object.has("mode")){
if(object.getString("mode").equals("FRONT")){
mode = "FRONT";
openFrontFacingCamera();
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra("android.intent.extras.CAMERA_FACING", android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
}else{
mode = "BACK";
}
Intent camera=new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
imagePath = getCapturedImageExternal();
destImageFile = new File(imagePath);
camera.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(destImageFile));
this.cordova.setActivityResultCallback(PicturePlugin.this);
cordova.getActivity().startActivityForResult(camera,IMAGE_TAKEN);
}
catch (Exception e)
{
Log.i(TAG, "Exception "+e.getMessage());
callbackContext.error("failed");
}
return true;
}
private Camera openFrontFacingCamera()
{
int cameraCount = 0;
Camera cam = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for ( int camId = 0; camId < cameraCount; camId++ ) {
Camera.getCameraInfo( camId, cameraInfo );
if ( cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT ) {
try {
cam = Camera.open( camId );
} catch (RuntimeException e) {
Log.e(TAG1, "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
return cam;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent)
{
if (requestCode == IMAGE_TAKEN && resultCode == Activity.RESULT_OK) {
File finalFile = null;
File fileTobeDeleted = null;
Bitmap photo = null;
File sd = new File(Environment.getExternalStorageDirectory(),
Constants.GOBIZMO_IMAGE_DIR);
String destinationImagePath = File.separator
+ Constants.TEMP_CAMERA_IMAGE + ".JPEG";
File destination = new File(sd, destinationImagePath);
sd.setWritable(true);
try {
String encoded;
imagePath = destImageFile.getAbsolutePath();
finalFile = new File(imagePath);
fileTobeDeleted = new File(imagePath);
int angle = getAngle(finalFile.getAbsolutePath());
if (finalFile.exists()) {
photo = BitmapFactory.decodeFile(finalFile
.getAbsolutePath());
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(photo, imageWidth, imageHeight, true);
photo = Bitmap.createBitmap(scaledBitmap, 0, 0, imageWidth, imageHeight, matrix, true);
// //////New orientation fix for all
// devices///////////////////////
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
photo.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
encoded = Base64.encodeToString(byteArray, Base64.DEFAULT);
Log.e("base 64 image", encoded);
JSONObject object = new JSONObject();
object.put(callback, encoded);
finalFile.delete();
fileTobeDeleted.delete();
//imagePath = destination.getPath();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.success(encoded);
} catch (Exception e) {
e.printStackTrace();
Log.i(TAG, "onActivityResult " + e.getMessage());
finalFile.delete();
fileTobeDeleted.delete();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.error("failed");
}
}
} catch (Exception exp) {
exp.printStackTrace();
Log.i(TAG, "onActivityResult " + exp.getMessage());
try {
finalFile.delete();
fileTobeDeleted.delete();
imagePath = destination.getPath();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.error("failed");
}catch(Exception innerexception){
innerexception.printStackTrace();
}
}
}
}
public Uri getImageUri(Context inContext, Bitmap inImage)
{
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
String path = Images.Media.insertImage(inContext.getContentResolver(), inImage, "Title", null);
return Uri.parse(path);
}
public String getRealPathFromURI(Uri uri)
{
Cursor cursor = cordova.getActivity().getContentResolver().query(uri, null, null, null, null);
cursor.moveToFirst();
int idx = cursor.getColumnIndex(Images.ImageColumns.DATA);
return cursor.getString(idx);
}
public static String getCapturedImageExternal() {
// External sdcard location
File mediaStorageDir = new File(
android.os.Environment.getExternalStorageDirectory(),
"Gobizmo image");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()){
mediaStorageDir.mkdirs();
}
if (!mediaStorageDir.exists() && !mediaStorageDir.mkdirs()) {
Log.d(Constants.GOBIZMO_IMAGE_DIR, "Oops! Failed create "
+ Constants.GOBIZMO_IMAGE_DIR + " directory");
return null;
}
// Create a timestamp
return mediaStorageDir.getPath() + File.separator + Constants.TEMP_CAMERA_IMAGE
+ ".JPEG";
}
public static String getRealPathFromURI(Context context, Uri contentUri) {
String filepath = "";
String uriPath = contentUri.toString();
// Handle local file and remove url encoding
if (uriPath.startsWith("file://")) {
filepath = uriPath.replace("file://", "");
try {
return URLDecoder.decode(filepath, "UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
try {
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = context.getContentResolver().query(contentUri,
projection, null, null, null);
if (cursor != null && cursor.getCount() != 0) {
int column_index = cursor
.getColumnIndex(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
filepath = cursor.getString(column_index);
}
} catch (Exception e) {
Log.e("Path Error", e.toString());
}
return filepath;
}
private int getAngle(String path)
{
int angle=0;
try
{
ExifInterface ei = new ExifInterface(path);
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
Log.i(TAG, "getAngle "+orientation);
switch(orientation)
{
case ExifInterface.ORIENTATION_ROTATE_90:
angle=90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
angle=180;
break;
case ExifInterface.ORIENTATION_ROTATE_270 :
angle=270;
default:
angle=0;
}
}
catch(Exception ex)
{
Log.i("getAngle", "getAngle :: "+ex.getMessage());
}
return angle;
}
}
Please help me. This is my first plugin handling.
This code will work whenever the JSON object 'mode' will be found:
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.UUID;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONObject;
import org.maxmobility.util.Constants;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.hardware.camera2.CameraManager;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.MediaStore;
import android.provider.MediaStore.Images;
import android.util.Base64;
import android.util.Log;
import android.view.SurfaceHolder;
public class PicturePlugin extends CordovaPlugin
{
private String callback="data";
private int IMAGE_TAKEN=1;
private CallbackContext callbackContext;
private String TAG="FilePlugin";
private int imageWidth,imageHeight;
private String imagePath;
private File destImageFile;
private String mode = "BACK";
private static String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
static Camera camera = null;
private static final String TAG1 = null ;
public boolean execute(String action, JSONArray args, CallbackContext callbackContext)
{
this.callbackContext = callbackContext;
this.cordova.getActivity().getApplicationContext().getPackageName();
try
{
JSONObject object=(JSONObject) args.get(0);
imageWidth=object.getInt("targetWidth");
imageHeight=object.getInt("targetHeight");
//checking whether json object has "mode" or not
if(object.has("mode"))
{
//if json object has "mode" and that is "FRONT"
if(object.getString("mode").equals("FRONT"))
{
mode = "FRONT";
Intent camera=new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
camera.putExtra("android.intent.extras.CAMERA_FACING", 1); // used this line of code to start the intent of camera
//The ID is 1 that opens FRONT CAMERA of a device
imagePath = getCapturedImageExternal();
destImageFile = new File(imagePath);
camera.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(destImageFile));
this.cordova.setActivityResultCallback(PicturePlugin.this);
cordova.getActivity().startActivityForResult(camera,IMAGE_TAKEN);
}
else
{
// if json object has "mode" and that is "BACK"
mode = "BACK";
Intent camera=new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
camera.putExtra("android.intent.extras.CAMERA_FACING", 0); //ID is 0 that opens REAR CAMERA of a device
imagePath = getCapturedImageExternal();
destImageFile = new File(imagePath);
camera.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(destImageFile));
this.cordova.setActivityResultCallback(PicturePlugin.this);
cordova.getActivity().startActivityForResult(camera,IMAGE_TAKEN);
}
}
else
{
//if no "mode" is found in json object then by default rear camera is opening
Intent camera=new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
camera.putExtra("android.intent.extras.CAMERA_FACING", 0);
imagePath = getCapturedImageExternal();
destImageFile = new File(imagePath);
camera.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(destImageFile));
this.cordova.setActivityResultCallback(PicturePlugin.this);
cordova.getActivity().startActivityForResult(camera,IMAGE_TAKEN);
}
}
catch (Exception e)
{
Log.i(TAG, "Exception "+e.getMessage());
callbackContext.error("failed");
}
return true;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent)
{
if (requestCode == IMAGE_TAKEN && resultCode == Activity.RESULT_OK) {
File finalFile = null;
File fileTobeDeleted = null;
Bitmap photo = null;
File sd = new File(Environment.getExternalStorageDirectory(),
Constants.GOBIZMO_IMAGE_DIR);
String destinationImagePath = File.separator
+ Constants.TEMP_CAMERA_IMAGE + ".JPEG";
File destination = new File(sd, destinationImagePath);
sd.setWritable(true);
try {
String encoded;
imagePath = destImageFile.getAbsolutePath();
finalFile = new File(imagePath);
fileTobeDeleted = new File(imagePath);
int angle = getAngle(finalFile.getAbsolutePath());
if (finalFile.exists()) {
photo = BitmapFactory.decodeFile(finalFile
.getAbsolutePath());
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(photo, imageWidth, imageHeight, true);
photo = Bitmap.createBitmap(scaledBitmap, 0, 0, imageWidth, imageHeight, matrix, true);
// //////New orientation fix for all
// devices///////////////////////
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
photo.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
encoded = Base64.encodeToString(byteArray, Base64.DEFAULT);
Log.e("base 64 image", encoded);
JSONObject object = new JSONObject();
object.put(callback, encoded);
finalFile.delete();
fileTobeDeleted.delete();
//imagePath = destination.getPath();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.success(encoded);
} catch (Exception e) {
e.printStackTrace();
Log.i(TAG, "onActivityResult " + e.getMessage());
finalFile.delete();
fileTobeDeleted.delete();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.error("failed");
}
}
} catch (Exception exp) {
exp.printStackTrace();
Log.i(TAG, "onActivityResult " + exp.getMessage());
try {
finalFile.delete();
fileTobeDeleted.delete();
imagePath = destination.getPath();
if(new File(imagePath).exists()){
new File(imagePath).delete();
}
callbackContext.error("failed");
}catch(Exception innerexception){
innerexception.printStackTrace();
}
}
}
}
public Uri getImageUri(Context inContext, Bitmap inImage)
{
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
String path = Images.Media.insertImage(inContext.getContentResolver(), inImage, "Title", null);
return Uri.parse(path);
}
public String getRealPathFromURI(Uri uri)
{
Cursor cursor = cordova.getActivity().getContentResolver().query(uri, null, null, null, null);
cursor.moveToFirst();
int idx = cursor.getColumnIndex(Images.ImageColumns.DATA);
return cursor.getString(idx);
}
public static String getCapturedImageExternal() {
// External sdcard location
File mediaStorageDir = new File(
android.os.Environment.getExternalStorageDirectory(),
"Gobizmo image");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()){
mediaStorageDir.mkdirs();
}
if (!mediaStorageDir.exists() && !mediaStorageDir.mkdirs()) {
Log.d(Constants.GOBIZMO_IMAGE_DIR, "Oops! Failed create "
+ Constants.GOBIZMO_IMAGE_DIR + " directory");
return null;
}
// Create a timestamp
return mediaStorageDir.getPath() + File.separator + Constants.TEMP_CAMERA_IMAGE
+ ".JPEG";
}
public static String getRealPathFromURI(Context context, Uri contentUri) {
String filepath = "";
String uriPath = contentUri.toString();
// Handle local file and remove url encoding
if (uriPath.startsWith("file://")) {
filepath = uriPath.replace("file://", "");
try {
return URLDecoder.decode(filepath, "UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
try {
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = context.getContentResolver().query(contentUri,
projection, null, null, null);
if (cursor != null && cursor.getCount() != 0) {
int column_index = cursor
.getColumnIndex(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
filepath = cursor.getString(column_index);
}
} catch (Exception e) {
Log.e("Path Error", e.toString());
}
return filepath;
}
private int getAngle(String path)
{
int angle=0;
try
{
ExifInterface ei = new ExifInterface(path);
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
Log.i(TAG, "getAngle "+orientation);
switch(orientation)
{
case ExifInterface.ORIENTATION_ROTATE_90:
angle=90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
angle=180;
break;
case ExifInterface.ORIENTATION_ROTATE_270 :
angle=270;
default:
angle=0;
}
}
catch(Exception ex)
{
Log.i("getAngle", "getAngle :: "+ex.getMessage());
}
return angle;
}
}
Do not forget to add permission to use front camera in your manifest.xml.
it will be:
<uses-feature android:name="android.hardware.camera.front" /> <!-- used this feature to open the front camera -->
This Code Help you
private Camera openFrontFacingCameraGingerbread() {
int cameraCount = 0;
Camera cam = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for (int camIdx = 0; camIdx<cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
try {
cam = Camera.open(camIdx);
} catch (RuntimeException e) {
Log.e("Your_TAG", "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
return cam;
}
With add this permission in your app
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.camera.front" android:required="false" />
This might help.
For the full code https://github.com/googlesamples/android-Camera2Basic
Get the front or back lens by:
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// Choose front or back lens
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
continue;
}
We can not see the json object you are parsing, if this condition is met,
object.getString("mode").equals("FRONT")
then your code is looking for the cameras and opening the front one...
I am trying to save a video from VideoView to the SD card, by touching the VideoView. No errors in LogCat, but my Toast I set up says "Error during video saving", which tells me my boolean I set up did not get to the part where it's set to true. It must be in the file I/O part. One part that works though, is a directory does get created, but the file itself is not found in the directory, it's empty.
Any mistakes anyone sees?
I will post my two main methods that are important, then below I'll post the whole class.
UPDATE: Here is my working code (changed a couple lines in the saveVideo method.)
// save your video to SD card
protected void saveVideo(final Uri uriVideo){
// click the video to save it
mVideoView.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View v, MotionEvent event) {
boolean success = false;
// make the directory
File vidDir = new File(android.os.Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_MOVIES) + File.separator + "Saved iCute Videos");
vidDir.mkdirs();
// create unique identifier
Random generator = new Random();
int n = 100;
n = generator.nextInt(n);
// create file name
String videoName = "Video_" + n + ".mp4";
File fileVideo = new File(vidDir.getAbsolutePath(), videoName);
try {
fileVideo.createNewFile();
success = true;
} catch (IOException e) {
e.printStackTrace();
}
if (success) {
Toast.makeText(getApplicationContext(), "Video saved!",
Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getApplicationContext(),
"Error during video saving", Toast.LENGTH_LONG).show();
}
return true;
}
});
}
My old, unworking saveVideo() method.
// save your video to SD card
protected void saveVideo(final Uri uriVideo){
// click the video to save it
mVideoView.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View v, MotionEvent event) {
String sourceVideoName = uriVideo.getPath();
boolean success = false;
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
// make the directory
File vidDir = new File(android.os.Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_MOVIES) + File.separator + "Saved iCute Videos");
vidDir.mkdirs();
// create unique identifier
Random generator = new Random();
int n = 100;
n = generator.nextInt(n);
// create file name
String videoName = "Video_" + n + ".mp4";
fileVideo = new File(vidDir.getPath(), videoName);
try {
bis = new BufferedInputStream(new FileInputStream(sourceVideoName));
bos = new BufferedOutputStream(new FileOutputStream(fileVideo, false));
byte[] buf = new byte[8192];
bis.read(buf);
do {
bos.write(buf);
} while (bis.read(buf) != 0);
success = true;
} catch (IOException e) {
}finally {
try {
if (bis != null) bis.close();
if (bos != null) bos.close();
} catch (IOException e) {
}
}
if (success) {
Toast.makeText(getApplicationContext(), "Video saved!",
Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getApplicationContext(),
"Error during video saving", Toast.LENGTH_LONG).show();
}
return true;
}
});
}
My dispatchTakeVideoIntent() method. I commented out some code that might be needed... I kept getting a null pointer though on fileUri line (was it from fileUri or fileVideo?), so not sure why.
// Captures video from Android camera component
protected void dispatchTakeVideoIntent() {
Intent takeVideoIntent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
if (takeVideoIntent.resolveActivity(getPackageManager()) != null) {
// // set name of video
// Uri fileUri = Uri.fromFile(fileVideo);
// takeVideoIntent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// set the video image quality to high
takeVideoIntent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1);
startActivityForResult(takeVideoIntent, ACTION_TAKE_VIDEO);
}
}
MakePhotoVideo.java
package org.azurespot.makecute;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import android.widget.VideoView;
import org.azurespot.R;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Random;
public class MakePhotoVideo extends ActionBarActivity {
private static final int ACTION_TAKE_PHOTO = 1;
private static final int ACTION_TAKE_VIDEO = 2;
private static final String BITMAP_STORAGE_KEY = "viewbitmap";
private static final String IMAGEVIEW_VISIBILITY_STORAGE_KEY = "imageviewvisibility";
private ImageView mImageView;
private Bitmap mImageBitmap;
private static final String VIDEO_STORAGE_KEY = "viewvideo";
private static final String VIDEOVIEW_VISIBILITY_STORAGE_KEY = "videoviewvisibility";
private VideoView mVideoView;
private Uri mVideoUri;
File fileVideo;
private String mCurrentPhotoPath;
private static final String JPEG_FILE_PREFIX = "IMG_";
private static final String JPEG_FILE_SUFFIX = ".jpg";
private PhotoStorageDirFactory mPhotoStorageDirFactory = null;
/* Photo album for this application */
private String getAlbumName() {
return getString(R.string.album_name);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_make_photo_video);
mImageView = (ImageView) findViewById(R.id.taken_photo);
mVideoView = (VideoView) findViewById(R.id.video_view);
mVideoView.setVisibility(View.INVISIBLE);
mImageBitmap = null;
mVideoUri = null;
Button photoBtn = (Button) findViewById(R.id.click);
setBtnListenerOrDisable(
photoBtn,
mTakePicOnClickListener,
MediaStore.ACTION_IMAGE_CAPTURE
);
Button videoBtn = (Button) findViewById(R.id.record_video);
setBtnListenerOrDisable(
videoBtn,
mTakeVidOnClickListener,
MediaStore.ACTION_VIDEO_CAPTURE
);
mPhotoStorageDirFactory = new BasePhotoDirFactory();
// Shows the up carat near app icon in ActionBar
getSupportActionBar().setDisplayUseLogoEnabled(false);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
private File getAlbumDir() {
File storageDir = null;
if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) {
storageDir = mPhotoStorageDirFactory.getAlbumStorageDir(getAlbumName());
if (storageDir != null) {
if (! storageDir.mkdirs()) {
if (! storageDir.exists()){
Log.d("Camera", "failed to create directory");
return null;
}
}
}
} else {
Log.v(getString(R.string.app_name), "External storage is not mounted READ/WRITE.");
}
return storageDir;
}
private File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = JPEG_FILE_PREFIX + timeStamp + "_";
File albumF = getAlbumDir();
File imageF = File.createTempFile(imageFileName, JPEG_FILE_SUFFIX, albumF);
return imageF;
}
private File setUpPhotoFile() throws IOException {
File f = createImageFile();
mCurrentPhotoPath = f.getAbsolutePath();
return f;
}
private void setPic() {
/* There isn't enough memory to open up more than a couple camera photos */
/* So pre-scale the target bitmap into which the file is decoded */
/* Get the size of the ImageView */
int targetW = mImageView.getWidth();
int targetH = mImageView.getHeight();
/* Get the size of the image */
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
/* Figure out which way needs to be reduced less */
int scaleFactor = 1;
if ((targetW > 0) || (targetH > 0)) {
scaleFactor = Math.min(photoW/targetW, photoH/targetH);
}
/* Set bitmap options to scale the image decode target */
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
/* Decode the JPEG file into a Bitmap */
Bitmap bitmap = BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
bitmap = rotateBitmap(bitmap, 90);
savePhoto(bitmap);
/* Associate the Bitmap to the ImageView, make sure the VideoView
* is cleared to replace with ImageView */
mImageView.setImageBitmap(bitmap);
mVideoUri = null;
mImageView.setVisibility(View.VISIBLE);
mVideoView.setVisibility(View.INVISIBLE);
}
// save your photo to SD card
private void savePhoto(final Bitmap bitmapPhoto){
// set OnClickListener to save the photo
mImageView.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
boolean success = false;
File photoDir = new File(Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_PICTURES) + "/Saved iCute Photos");
photoDir.mkdirs();
Random generator = new Random();
int n = 10000;
n = generator.nextInt(n);
String photoName = "Image_"+ n +".jpg";
File filePhoto = new File (photoDir, photoName);
// if (filePhoto.exists ()) filePhoto.delete ();
try {
FileOutputStream out = new FileOutputStream(filePhoto);
bitmapPhoto.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
success = true;
} catch (Exception e) {
e.printStackTrace();
}
if (success) {
Toast.makeText(getApplicationContext(), "Image saved!",
Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getApplicationContext(),
"Error during image saving", Toast.LENGTH_LONG).show();
}
}
});
}
// save your video to SD card
protected void saveVideo(final Uri uriVideo){
// click the video to save it
mVideoView.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View v, MotionEvent event) {
String sourceVideoName = uriVideo.getPath();
boolean success = false;
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
// make the directory
File vidDir = new File(android.os.Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_MOVIES) + File.separator + "Saved iCute Videos");
vidDir.mkdirs();
// create unique identifier
Random generator = new Random();
int n = 100;
n = generator.nextInt(n);
// create file name
String videoName = "Video_" + n + ".mp4";
fileVideo = new File(vidDir.getPath(), videoName);
try {
bis = new BufferedInputStream(new FileInputStream(sourceVideoName));
bos = new BufferedOutputStream(new FileOutputStream(fileVideo, false));
byte[] buf = new byte[8192];
bis.read(buf);
do {
bos.write(buf);
} while (bis.read(buf) != 0);
success = true;
} catch (IOException e) {
}finally {
try {
if (bis != null) bis.close();
if (bos != null) bos.close();
} catch (IOException e) {
}
}
if (success) {
Toast.makeText(getApplicationContext(), "Video saved!",
Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getApplicationContext(),
"Error during video saving", Toast.LENGTH_LONG).show();
}
return true;
}
});
}
public Bitmap rotateBitmap(Bitmap source, int angle)
{
Matrix matrix = new Matrix();
matrix.set(matrix);
matrix.setRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(),
source.getHeight(), matrix, false);
}
private void galleryAddPic() {
Intent mediaScanIntent = new Intent("android.intent.action.MEDIA_SCANNER_SCAN_FILE");
File f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
}
private void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
switch(actionCode) {
case ACTION_TAKE_PHOTO:
File f;
try {
f = setUpPhotoFile();
mCurrentPhotoPath = f.getAbsolutePath();
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f));
} catch (IOException e) {
e.printStackTrace();
f = null;
mCurrentPhotoPath = null;
}
break;
default:
break;
} // switch
startActivityForResult(takePictureIntent, actionCode);
}
// Captures video from Android camera component
protected void dispatchTakeVideoIntent() {
Intent takeVideoIntent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
if (takeVideoIntent.resolveActivity(getPackageManager()) != null) {
// // set name of video
// Uri fileUri = Uri.fromFile(fileVideo);
// takeVideoIntent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// set the video image quality to high
takeVideoIntent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1);
startActivityForResult(takeVideoIntent, ACTION_TAKE_VIDEO);
}
}
private void handleCameraPhoto() {
if (mCurrentPhotoPath != null) {
setPic();
galleryAddPic();
mCurrentPhotoPath = null;
}
}
// Post recorded video into VideoView
private void handleCameraVideo(Intent intent) {
mVideoUri = intent.getData();
mVideoView.setVideoURI(mVideoUri);
mImageBitmap = null;
mVideoView.setVisibility(View.VISIBLE);
mImageView.setVisibility(View.INVISIBLE);
mVideoView.start();
saveVideo(mVideoUri);
Log.d("VIDEO INTENT: ", "END OF METHOD");
}
Button.OnClickListener mTakePicOnClickListener =
new Button.OnClickListener() {
#Override
public void onClick(View v) {
dispatchTakePictureIntent(ACTION_TAKE_PHOTO);
}
};
Button.OnClickListener mTakeVidOnClickListener =
new Button.OnClickListener() {
#Override
public void onClick(View v) {
dispatchTakeVideoIntent();
}
};
// Intent data is how the photo and video transfer into their views
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case ACTION_TAKE_PHOTO: {
if (resultCode == RESULT_OK) {
handleCameraPhoto();
}
break;
} // ACTION_TAKE_PHOTO
case ACTION_TAKE_VIDEO: {
if (resultCode == RESULT_OK) {
handleCameraVideo(data);
}
break;
} // ACTION_TAKE_VIDEO
} // switch
}
// Some lifecycle callbacks so that the image can survive orientation change
#Override
protected void onSaveInstanceState(Bundle outState) {
outState.putParcelable(BITMAP_STORAGE_KEY, mImageBitmap);
outState.putParcelable(VIDEO_STORAGE_KEY, mVideoUri);
outState.putBoolean(IMAGEVIEW_VISIBILITY_STORAGE_KEY, (mImageBitmap != null) );
outState.putBoolean(VIDEOVIEW_VISIBILITY_STORAGE_KEY, (mVideoUri != null) );
super.onSaveInstanceState(outState);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mImageBitmap = savedInstanceState.getParcelable(BITMAP_STORAGE_KEY);
mVideoUri = savedInstanceState.getParcelable(VIDEO_STORAGE_KEY);
mImageView.setImageBitmap(mImageBitmap);
mImageView.setVisibility(
savedInstanceState.getBoolean(IMAGEVIEW_VISIBILITY_STORAGE_KEY) ?
ImageView.VISIBLE : ImageView.INVISIBLE
);
mVideoView.setVideoURI(mVideoUri);
mVideoView.setVisibility(
savedInstanceState.getBoolean(VIDEOVIEW_VISIBILITY_STORAGE_KEY) ?
ImageView.VISIBLE : ImageView.INVISIBLE
);
}
/**
* Indicates whether the specified action can be used as an intent. This
* method queries the package manager for installed packages that can
* respond to an intent with the specified action. If no suitable package is
* found, this method returns false.
* http://android-developers.blogspot.com/2009/01/can-i-use-this-intent.html
*
* #param context The application's environment.
* #param action The Intent action to check for availability.
*
* #return True if an Intent with the specified action can be sent and
* responded to, false otherwise.
*/
public static boolean isIntentAvailable(Context context, String action) {
final PackageManager packageManager = context.getPackageManager();
final Intent intent = new Intent(action);
List<ResolveInfo> list =
packageManager.queryIntentActivities(intent,
PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
private void setBtnListenerOrDisable(
Button btn,
Button.OnClickListener onClickListener,
String intentName
) {
if (isIntentAvailable(this, intentName)) {
btn.setOnClickListener(onClickListener);
} else {
btn.setText(
getText(R.string.cannot).toString() + " " + btn.getText());
btn.setClickable(false);
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Makes the UP caret go back to the previous fragment MakeCuteFragment
switch (item.getItemId()) {
case android.R.id.home:
android.app.FragmentManager fm= getFragmentManager();
fm.popBackStack();
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
}
Just update the lines in dispatchTakeVideoIntent()
provide a proper path to create fileVideo first and make sure to call createNewFile() before putting in intent.
I have a camera app that uses only the portrait mode (restricted via android manifest file). Following code is my SurfaceView used for the Camera
public class CameraPreview extends SurfaceView implements SensorEventListener, SurfaceHolder.Callback {
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
private Activity mActivity;
private static boolean DEBUGGING = true;
private static final String LOG_TAG = "CameraPreviewSample";
private static final String CAMERA_PARAM_ORIENTATION = "orientation";
private static final String CAMERA_PARAM_LANDSCAPE = "landscape";
private static final String CAMERA_PARAM_PORTRAIT = "portrait";
protected List<Camera.Size> mPreviewSizeList;
protected List<Camera.Size> mPictureSizeList;
protected Camera.Size mPreviewSize;
protected Camera.Size mPictureSize;
// Constructor that obtains context and camera
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera camera) {
super(context);
mActivity=(Activity)context;
mCamera = camera;
this.mCamera = camera;
this.mSurfaceHolder = this.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
// left blank for now
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.stopPreview();
mCamera.release();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
if (mSurfaceHolder.getSurface() == null) {
// preview surface does not exist
return;
}
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (Exception e) {
// intentionally left blank for a test
}
try {
Camera.Parameters cameraParams = mCamera.getParameters();
boolean portrait = isPortrait();
configureCameraParameters(cameraParams, portrait);
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d("CameraView", "Error starting camera preview: " + e.getMessage());
}
}
protected void configureCameraParameters(Camera.Parameters cameraParams, boolean portrait) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO) { // for 2.1 and before
if (portrait) {
cameraParams.set(CAMERA_PARAM_ORIENTATION, CAMERA_PARAM_PORTRAIT);
} else {
cameraParams.set(CAMERA_PARAM_ORIENTATION, CAMERA_PARAM_LANDSCAPE);
}
} else { // for 2.2 and later
int angle;
Display display = mActivity.getWindowManager().getDefaultDisplay();
switch (display.getRotation()) {
case Surface.ROTATION_0: // This is display orientation
angle = 90; // This is camera orientation
break;
case Surface.ROTATION_90:
angle = 0;
break;
case Surface.ROTATION_180:
angle = 270;
break;
case Surface.ROTATION_270:
angle = 180;
break;
default:
angle = 90;
break;
}
Log.v(LOG_TAG, "angle: " + angle);
mCamera.setDisplayOrientation(angle);
}
cameraParams.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
cameraParams.setPictureSize(mPictureSize.width, mPictureSize.height);
if (DEBUGGING) {
Log.v(LOG_TAG, "Preview Actual Size - w: " + mPreviewSize.width + ", h: " + mPreviewSize.height);
Log.v(LOG_TAG, "Picture Actual Size - w: " + mPictureSize.width + ", h: " + mPictureSize.height);
}
mCamera.setParameters(cameraParams);
}
public boolean isPortrait() {
return (mActivity.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT);
}
#Override
public void onAccuracyChanged(Sensor arg0, int arg1) {
// TODO Auto-generated method stub
}
#Override
public void onSensorChanged(SensorEvent arg0) {
// TODO Auto-generated method stub
}
}
When the application saves an an images (taken in portrait mode) It will be saved upside down. But in landscape mode it saves the image correctly. And as I have mentioned, the app has restricted the orientation to portrait mode only.
Also I tried to change EXIF within the Activity (Actually I am using a Fragment) data of the image file once it been saved, and then recreate the bitmap with new exif data using the following code, but still no success
private Bitmap changeExifData(String imagePath){
Bitmap correctBmp = null;
try {
File f = new File(imagePath);
ExifInterface exif = new ExifInterface(f.getPath());
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int angle = 0;
if (orientation == ExifInterface.ORIENTATION_ROTATE_90) {
angle = 90;
}
else if (orientation == ExifInterface.ORIENTATION_ROTATE_180) {
angle = 180;
}
else if (orientation == ExifInterface.ORIENTATION_ROTATE_270) {
angle = 270;
}
Matrix mat = new Matrix();
mat.postRotate(angle);
Bitmap bmp = BitmapFactory.decodeStream(new FileInputStream(f), null, null);
correctBmp = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), mat, true);
}
catch (IOException e) {
Log.w("TAG", "-- Error in setting image");
}
catch(OutOfMemoryError oom) {
Log.w("TAG", "-- OOM Error in setting image");
}
return correctBmp;
}
Your help is greatly appreciated.
Here is how I solved it (full implementation that includes saving the image). Image taken in in portrait mode will be rotated 90 degrees.
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.provider.MediaStore;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
public class ImageTakeAndShow extends Activity {
private static final int ACTION_TAKE_PHOTO = 1;
private static final String BITMAP_STORAGE_KEY = "viewbitmap";
private static final String IMAGEVIEW_VISIBILITY_STORAGE_KEY = "imageviewvisibility";
//private ImageView mImageView;
private Bitmap mImageBitmap;
private String mCurrentPhotoPath;
private static final String JPEG_FILE_PREFIX = "IMG_";
private static final String JPEG_FILE_SUFFIX = ".jpg";
TelephonyManager myPhonenumber;
static String device_id;
double latitude ,longitude;
Button Btn;
File f;
//save picture
private File getAlbumDir() {
File file = null;
if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) {
file = Environment.getExternalStorageDirectory().getAbsoluteFile();
file = new File(Environment.getExternalStorageDirectory()
+ File.separator + "Snap/Capture_Image");
Log.e("file", file.toString());
if (file != null) {
if (! file.mkdirs()) {
if (! file.exists()){
Log.d("Camera", "failed to create directory");
return null;
}
}
}
} else {
Log.v(getString(R.string.app_name), "External storage is not mounted READ/WRITE.");
}
return file;
}
//create file name
private File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = JPEG_FILE_PREFIX + timeStamp + "_";
File albumF = getAlbumDir();
File imageF = File.createTempFile(imageFileName, JPEG_FILE_SUFFIX, albumF);
return imageF;
}
private File setUpPhotoFile() throws IOException {
File f = createImageFile();
mCurrentPhotoPath = f.getAbsolutePath();
return f;
}
//set picture width and height and rotate 90 degrees
private void setPic() {
/* Get the size of the image */
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
int photoW = bmOptions.outWidth/2;
int photoH = bmOptions.outHeight/2;
Log.e("photoW", Integer.valueOf(photoW).toString());
Log.e("photoH", Integer.valueOf(photoH).toString());
int scaleFactor = 1;
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
try{
Bitmap bitmap = BitmapFactory.decodeFile(mCurrentPhotoPath);
bitmap = Bitmap.createScaledBitmap(bitmap, 800, 600, true);
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
File f = new File(mCurrentPhotoPath.toString());
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
Matrix matrix = new Matrix();
matrix.postRotate(90);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, 800,640,matrix, true);
FileOutputStream fos2 = new FileOutputStream(mCurrentPhotoPath.toString());
rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, fos2);
fos2.close();
}catch (Exception e) {
e.printStackTrace();
}catch (OutOfMemoryError o) {
o.printStackTrace();
}
}
private void galleryAddPic() {
Intent mediaScanIntent = new Intent("android.intent.action.MEDIA_SCANNER_SCAN_FILE");
f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
Log.e("path f", f.toString());
}
//Camera activity
private void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
switch(actionCode) {
case ACTION_TAKE_PHOTO:
File f = null;
try {
f = setUpPhotoFile();
mCurrentPhotoPath = f.getAbsolutePath();
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f));
} catch (IOException e) {
e.printStackTrace();
f = null;
mCurrentPhotoPath = null;
}
break;
default:
break;
}
startActivityForResult(takePictureIntent, actionCode);
}
private void handleBigCameraPhoto() {
if (mCurrentPhotoPath != null) {
setPic();
galleryAddPic();
mCurrentPhotoPath = null;
}
}
Button.OnClickListener mTakePicOnClickListener =
new Button.OnClickListener() {
#Override
public void onClick(View v) {
dispatchTakePictureIntent(ACTION_TAKE_PHOTO);
}
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myPhonenumber = (TelephonyManager)getSystemService(Context.TELEPHONY_SERVICE);
device_id = myPhonenumber.getDeviceId();
mImageBitmap = null;
dispatchTakePictureIntent(ACTION_TAKE_PHOTO);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case ACTION_TAKE_PHOTO: {
if (resultCode == RESULT_OK) {
handleBigCameraPhoto();
final Intent intent = new Intent(getApplicationContext(), Image_Priview.class);
intent.putExtra("image parth", f.toString());
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
finish();
new Handler().postDelayed(new Runnable() {
public void run() {
}
}, 1000);
}else if (resultCode == RESULT_CANCELED){
File file = new File(mCurrentPhotoPath);
file.delete();
finish();
}
break;
}
}
}
// Some lifecycle callbacks so that the image can survive orientation change
#Override
protected void onSaveInstanceState(Bundle outState) {
outState.putParcelable(BITMAP_STORAGE_KEY, mImageBitmap);
outState.putBoolean(IMAGEVIEW_VISIBILITY_STORAGE_KEY, (mImageBitmap != null) );
super.onSaveInstanceState(outState);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mImageBitmap = savedInstanceState.getParcelable(BITMAP_STORAGE_KEY);
}
public static boolean isIntentAvailable(Context context, String action) {
final PackageManager packageManager = context.getPackageManager();
final Intent intent = new Intent(action);
List<ResolveInfo> list =
packageManager.queryIntentActivities(intent,
PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
}
I have developed one OCR application which works perfectly on Android version 4.0 but not on 2.2 and 2.3 Android version. I don't know what's the problem with this versions. I searched a lot on Google but no reason found.
package com.datumdroid.android.ocr.simple;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.zip.GZIPInputStream;
import android.app.Activity;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import com.googlecode.tesseract.android.TessBaseAPI;
public class SimpleAndroidOCRActivity extends Activity {
public static final String PACKAGE_NAME = "com.datumdroid.android.ocr.simple";
public static final String DATA_PATH = Environment
.getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";
// You should have the trained data file in assets folder
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
public static final String lang = "eng";
private static final String TAG = "SimpleAndroidOCR.java";
protected Button _button;
// protected ImageView _image;
protected EditText _field;
protected String _path;
protected boolean _taken;
protected static final String PHOTO_TAKEN = "photo_taken";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" };
for (String path : paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path + " on
sdcard failed");
return;
} else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
// lang.traineddata file with the app (in assets folder)
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
// This area needs work and optimization
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = getAssets();
InputStream in = assetManager.open("tessdata/eng.traineddata");
//GZIPInputStream gin = new GZIPInputStream(in);
OutputStream out = new FileOutputStream(DATA_PATH
+ "tessdata/eng.traineddata");
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
//while ((lenf = gin.read(buff)) > 0) {
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
//gin.close();
out.close();
Log.v(TAG, "Copied " + lang + " traineddata");
} catch (IOException e) {
Log.e(TAG, "Was unable to copy " + lang + " traineddata " +
e.toString());
}
}
// _image = (ImageView) findViewById(R.id.image);
_field = (EditText) findViewById(R.id.field);
_button = (Button) findViewById(R.id.button);
_button.setOnClickListener(new ButtonClickHandler());
_path = DATA_PATH + "/ocr.jpg";
}
public class ButtonClickHandler implements View.OnClickListener {
public void onClick(View view) {
Log.v(TAG, "Starting Camera app");
startCameraActivity();
}
}
// Simple android photo capture:
// http://labs.makemachine.net/2010/03/simple-android-photo-capture/
protected void startCameraActivity() {
File file = new File(_path);
Uri outputFileUri = Uri.fromFile(file);
final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, 0);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "resultCode: " + resultCode);
if (resultCode == -1) {
onPhotoTaken();
} else {
Log.v(TAG, "User cancelled");
}
}
#Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN, _taken);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
Log.i(TAG, "onRestoreInstanceState()");
if (savedInstanceState.getBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN)) {
onPhotoTaken();
}
}
protected void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
// _image.setImageBitmap( bitmap );
Log.v(TAG, "Before baseApi");
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(DATA_PATH, lang);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
baseApi.end();
// You now have the text in recognizedText var, you can do anything with it.
// We will display a stripped out trimmed alpha-numeric version of it (if lang is
eng)
// so that garbage doesn't make it to the display.
Log.v(TAG, "OCRED TEXT: " + recognizedText);
if ( lang.equalsIgnoreCase("eng") ) {
recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
}
recognizedText = recognizedText.trim();
if ( recognizedText.length() != 0 ) {
_field.setText(_field.getText().toString().length() == 0 ? recognizedText :
_field.getText() + " " + recognizedText);
_field.setSelection(_field.getText().toString().length());
}
// Cycle done.
}
// www.Gaut.am was here
// Thanks for reading!
}
I've follow this tutorial to learn about ocr in android.
http://gaut.am/making-an-ocr-android-app-using-tesseract/
After successfully run the program on my device, take a photo, it force close. As if it failed to analyze the image.
I already put the trained data on my /asset/testdata/
the trained data is downloaded from :
http://code.google.com/p/tesseract-ocr/downloads/list
My logcat show that I've error on
java.lang.ExceptionInInitializerError
Can you teach me how to fix it?
This is the code:
package com.datumdroid.android.ocr.simple;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.zip.GZIPInputStream;
import android.app.Activity;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import com.googlecode.tesseract.android.TessBaseAPI;
public class SimpleAndroidOCRActivity extends Activity {
public static final String PACKAGE_NAME = "com.datumdroid.android.ocr.simple";
public static final String DATA_PATH = Environment
.getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";
// You should have the trained data file in assets folder
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
public static final String lang = "eng";
private static final String TAG = "SimpleAndroidOCR.java";
protected Button _button;
// protected ImageView _image;
protected EditText _field;
protected String _path;
protected boolean _taken;
protected static final String PHOTO_TAKEN = "photo_taken";
#Override
public void onCreate(Bundle savedInstanceState) {
String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" };
for (String path : paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path
+ " on sdcard failed");
return;
} else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
// lang.traineddata file with the app (in assets folder)
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
// This area needs work and optimization
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata"))
.exists()) {
try {
AssetManager assetManager = getAssets();
InputStream in = assetManager.open("tessdata/eng.traineddata");
// GZIPInputStream gin = new GZIPInputStream(in);
OutputStream out = new FileOutputStream(DATA_PATH
+ "tessdata/eng.traineddata");
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
// while ((lenf = gin.read(buff)) > 0) {
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
// gin.close();
out.close();
Log.v(TAG, "Copied " + lang + " traineddata");
} catch (IOException e) {
Log.e(TAG,
"Was unable to copy " + lang + " traineddata "
+ e.toString());
}
}
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// _image = (ImageView) findViewById(R.id.image);
_field = (EditText) findViewById(R.id.field);
_button = (Button) findViewById(R.id.button);
_button.setOnClickListener(new ButtonClickHandler());
_path = DATA_PATH + "/ocr.jpg";
}
public class ButtonClickHandler implements View.OnClickListener {
public void onClick(View view) {
Log.v(TAG, "Starting Camera app");
startCameraActivity();
}
}
// Simple android photo capture:
// http://labs.makemachine.net/2010/03/simple-android-photo-capture/
protected void startCameraActivity() {
File file = new File(_path);
Uri outputFileUri = Uri.fromFile(file);
final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, 0);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "resultCode: " + resultCode);
if (resultCode == -1) {
onPhotoTaken();
} else {
Log.v(TAG, "User cancelled");
}
}
#Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN, _taken);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
Log.i(TAG, "onRestoreInstanceState()");
if (savedInstanceState.getBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN)) {
onPhotoTaken();
}
}
protected void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
System.out.println("Bitmap Selesai");
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
// _image.setImageBitmap( bitmap );
//System.loadLibrary("tess");
System.out.println("Masuk");
Log.v(TAG, "Before baseApi");
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(DATA_PATH, lang);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
baseApi.end();
System.out.println("Keluar");
// You now have the text in recognizedText var, you can do anything with
// it.
// We will display a stripped out trimmed alpha-numeric version of it
// (if lang is eng)
// so that garbage doesn't make it to the display.
Log.v(TAG, "OCRED TEXT: " + recognizedText);
if (lang.equalsIgnoreCase("eng")) {
recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
}
recognizedText = recognizedText.trim();
if (recognizedText.length() != 0) {
_field.setText(_field.getText().toString().length() == 0 ? recognizedText
: _field.getText() + " " + recognizedText);
_field.setSelection(_field.getText().toString().length());
}
// Cycle done.
}
// www.Gaut.am was here
// Thanks for reading!
}
Do you have this in your AndroidManifest.xml?:
<uses-permission android:name="WRITE_EXTERNAL_STORAGE" />
I notice that you're trying to create files/directories during initialization, maybe this will help
Can you try to add System.loadLibrary("tess"); before initializing tesseract-ocr?