OpenCV Android - Process image that has been captured - android

I have a project that captures an image using OpenCV's CamerabridgeViewBase and another project that detects a pingpong ball. The problem is I can't combine those two projects. What I want is to capture a live feed, then return a single image that already has a detection in it.
Here are the classes of the project that can capture an image and store it in the gallery:
public final class MainActivity extends ActionBarActivity
implements CvCameraViewListener2 {
// A tag for log output.
private static final String TAG =
MainActivity.class.getSimpleName();
// A key for storing the index of the active camera.
private static final String STATE_CAMERA_INDEX = "cameraIndex";
// A key for storing the index of the active image size.
private static final String STATE_IMAGE_SIZE_INDEX =
"imageSizeIndex";
// An ID for items in the image size submenu.
private static final int MENU_GROUP_ID_SIZE = 2;
// The index of the active camera.
private int mCameraIndex;
// The index of the active image size.
private int mImageSizeIndex;
// Whether the active camera is front-facing.
// If so, the camera view should be mirrored.
private boolean mIsCameraFrontFacing;
// The number of cameras on the device.
private int mNumCameras;
// The image sizes supported by the active camera.
private List<Size> mSupportedImageSizes;
// The camera view.
private CameraBridgeViewBase mCameraView;
// Whether the next camera frame should be saved as a photo.
private boolean mIsPhotoPending;
// A matrix that is used when saving photos.
private Mat mBgr;
// Whether an asynchronous menu action is in progress.
// If so, menu interaction should be disabled.
private boolean mIsMenuLocked;
// The OpenCV loader callback.
private BaseLoaderCallback mLoaderCallback =
new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(final int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.d(TAG, "OpenCV loaded successfully");
mCameraView.enableView();
//mCameraView.enableFpsMeter();
mBgr = new Mat();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks.
#SuppressLint("NewApi")
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Window window = getWindow();
window.addFlags(
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (savedInstanceState != null) {
mCameraIndex = savedInstanceState.getInt(
STATE_CAMERA_INDEX, 0);
mImageSizeIndex = savedInstanceState.getInt(
STATE_IMAGE_SIZE_INDEX, 0);
} else {
mCameraIndex = 0;
mImageSizeIndex = 0;
}
final Camera camera;
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.GINGERBREAD) {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
mIsCameraFrontFacing =
(cameraInfo.facing ==
CameraInfo.CAMERA_FACING_FRONT);
mNumCameras = Camera.getNumberOfCameras();
camera = Camera.open(mCameraIndex);
} else { // pre-Gingerbread
// Assume there is only 1 camera and it is rear-facing.
mIsCameraFrontFacing = false;
mNumCameras = 1;
camera = Camera.open();
}
final Parameters parameters = camera.getParameters();
camera.release();
mSupportedImageSizes =
parameters.getSupportedPreviewSizes();
final Size size = mSupportedImageSizes.get(mImageSizeIndex);
mCameraView = new JavaCameraView(this, mCameraIndex);
mCameraView.setMaxFrameSize(size.width, size.height);
mCameraView.setCvCameraViewListener(this);
setContentView(mCameraView);
}
public void onSaveInstanceState(Bundle savedInstanceState) {
// Save the current camera index.
savedInstanceState.putInt(STATE_CAMERA_INDEX, mCameraIndex);
// Save the current image size index.
savedInstanceState.putInt(STATE_IMAGE_SIZE_INDEX,
mImageSizeIndex);
super.onSaveInstanceState(savedInstanceState);
}
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks.
#SuppressLint("NewApi")
#Override
public void recreate() {
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.HONEYCOMB) {
super.recreate();
} else {
finish();
startActivity(getIntent());
}
}
#Override
public void onPause() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onPause();
}
#Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0,
this, mLoaderCallback);
mIsMenuLocked = false;
}
#Override
public void onDestroy() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onDestroy();
}
#Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
if (mNumCameras < 2) {
// Remove the option to switch cameras, since there is
// only 1.
menu.removeItem(R.id.menu_next_camera);
}
int numSupportedImageSizes = mSupportedImageSizes.size();
if (numSupportedImageSizes > 1) {
final SubMenu sizeSubMenu = menu.addSubMenu(
R.string.menu_image_size);
for (int i = 0; i < numSupportedImageSizes; i++) {
final Size size = mSupportedImageSizes.get(i);
sizeSubMenu.add(MENU_GROUP_ID_SIZE, i, Menu.NONE,
String.format("%dx%d", size.width,
size.height));
}
}
return true;
}
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks (for recreate).
#SuppressLint("NewApi")
#Override
public boolean onOptionsItemSelected(final MenuItem item) {
if (mIsMenuLocked) {
return true;
}
if (item.getGroupId() == MENU_GROUP_ID_SIZE) {
mImageSizeIndex = item.getItemId();
recreate();
return true;
}
switch (item.getItemId()) {
case R.id.menu_next_camera:
mIsMenuLocked = true;
// With another camera index, recreate the activity.
mCameraIndex++;
if (mCameraIndex == mNumCameras) {
mCameraIndex = 0;
}
mImageSizeIndex = 0;
recreate();
return true;
case R.id.menu_take_photo:
mIsMenuLocked = true;
// Next frame, take the photo.
mIsPhotoPending = true;
return true;
default:
return super.onOptionsItemSelected(item);
}
}
#Override
public void onCameraViewStarted(final int width,
final int height) {
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(final CvCameraViewFrame inputFrame) {
final Mat rgba = inputFrame.rgba();
if (mIsPhotoPending) {
mIsPhotoPending = false;
takePhoto(rgba);
}
if (mIsCameraFrontFacing) {
// Mirror (horizontally flip) the preview.
Core.flip(rgba, rgba, 1);
}
return rgba;
}
private void takePhoto(final Mat rgba) {
// Determine the path and metadata for the photo.
final long currentTimeMillis = System.currentTimeMillis();
final String appName = getString(R.string.app_name);
final String galleryPath =
Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).toString();
final String albumPath = galleryPath + File.separator +
appName;
final String photoPath = albumPath + File.separator +
currentTimeMillis + LabActivity.PHOTO_FILE_EXTENSION;
final ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, photoPath);
values.put(Images.Media.MIME_TYPE,
LabActivity.PHOTO_MIME_TYPE);
values.put(Images.Media.TITLE, appName);
values.put(Images.Media.DESCRIPTION, appName);
values.put(Images.Media.DATE_TAKEN, currentTimeMillis);
// Ensure that the album directory exists.
File album = new File(albumPath);
if (!album.isDirectory() && !album.mkdirs()) {
Log.e(TAG, "Failed to create album directory at " +
albumPath);
onTakePhotoFailed();
return;
}
// Try to create the photo.
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
if (!Imgcodecs.imwrite(photoPath, mBgr)) {
Log.e(TAG, "Failed to save photo to " + photoPath);
onTakePhotoFailed();
}
Log.d(TAG, "Photo saved successfully to " + photoPath);
// Try to insert the photo into the MediaStore.
Uri uri;
try {
uri = getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
} catch (final Exception e) {
Log.e(TAG, "Failed to insert photo into MediaStore");
e.printStackTrace();
// Since the insertion failed, delete the photo.
File photo = new File(photoPath);
if (!photo.delete()) {
Log.e(TAG, "Failed to delete non-inserted photo");
}
onTakePhotoFailed();
return;
}
// Open the photo in LabActivity.
final Intent intent = new Intent(this, LabActivity.class);
intent.putExtra(LabActivity.EXTRA_PHOTO_URI, uri);
intent.putExtra(LabActivity.EXTRA_PHOTO_DATA_PATH,
photoPath);
runOnUiThread(new Runnable() {
#Override
public void run() {
startActivity(intent);
}
});
}
private void onTakePhotoFailed() {
mIsMenuLocked = false;
// Show an error message.
final String errorMessage =
getString(R.string.photo_error_message);
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(MainActivity.this, errorMessage,
Toast.LENGTH_SHORT).show();
}
});
}
--
public final class LabActivity extends ActionBarActivity {
public static final String PHOTO_FILE_EXTENSION = ".png";
public static final String PHOTO_MIME_TYPE = "image/png";
public static final String EXTRA_PHOTO_URI =
"com.nummist.secondsight.LabActivity.extra.PHOTO_URI";
public static final String EXTRA_PHOTO_DATA_PATH =
"com.nummist.secondsight.LabActivity.extra.PHOTO_DATA_PATH";
private Uri mUri;
private String mDataPath;
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Intent intent = getIntent();
mUri = intent.getParcelableExtra(EXTRA_PHOTO_URI);
mDataPath = intent.getStringExtra(EXTRA_PHOTO_DATA_PATH);
final ImageView imageView = new ImageView(this);
imageView.setImageURI(mUri);
setContentView(imageView);
}
#Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.activity_lab, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_delete:
deletePhoto();
return true;
case R.id.menu_edit:
editPhoto();
return true;
case R.id.menu_share:
sharePhoto();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/*
* Show a confirmation dialog. On confirmation ("Delete"), the
* photo is deleted and the activity finishes.
*/
private void deletePhoto() {
final AlertDialog.Builder alert = new AlertDialog.Builder(
LabActivity.this);
alert.setTitle(R.string.photo_delete_prompt_title);
alert.setMessage(R.string.photo_delete_prompt_message);
alert.setCancelable(false);
alert.setPositiveButton(R.string.delete,
new DialogInterface.OnClickListener() {
#Override
public void onClick(final DialogInterface dialog,
final int which) {
getContentResolver().delete(
Images.Media.EXTERNAL_CONTENT_URI,
MediaStore.MediaColumns.DATA + "=?",
new String[] { mDataPath });
finish();
}
});
alert.setNegativeButton(android.R.string.cancel, null);
alert.show();
}
/*
* Show a chooser so that the user may pick an app for editing
* the photo.
*/
private void editPhoto() {
final Intent intent = new Intent(Intent.ACTION_EDIT);
intent.setDataAndType(mUri, PHOTO_MIME_TYPE);
startActivity(Intent.createChooser(intent,
getString(R.string.photo_edit_chooser_title)));
}
/*
* Show a chooser so that the user may pick an app for sending
* the photo.
*/
private void sharePhoto() {
final Intent intent = new Intent(Intent.ACTION_SEND);
intent.setType(PHOTO_MIME_TYPE);
intent.putExtra(Intent.EXTRA_STREAM, mUri);
intent.putExtra(Intent.EXTRA_SUBJECT,
getString(R.string.photo_send_extra_subject));
intent.putExtra(Intent.EXTRA_TEXT,
getString(R.string.photo_send_extra_text));
startActivity(Intent.createChooser(intent,
getString(R.string.photo_send_chooser_title)));
}
}
And here are the classes of the pingpong ball detection project:
public class MainActivity extends AppCompatActivity {
private static final String TAG = "ObjTrackActivity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewTresholded;
public static boolean bShowTresholded = false;
public MainActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(new ObjTrackViewer(this));
}
public boolean onCreateOptionsMenu(Menu menu){
Log.i(TAG, "onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewTresholded = menu.add("Preview Thresholded");
return true;
}
public boolean onOptionsItemSelected(MenuItem item){
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA)
bShowTresholded = false;
else if (item == mItemPreviewTresholded)
bShowTresholded = true;
return true;
}
}
--
public class ObjTrackViewer extends SampleViewBase {
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
public ObjTrackViewer(Context context) {
super(context);
}
#Override
protected void onPreviewStared(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
#Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
#Override
protected Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;
CircleObjectTrack(getFrameWidth(), getFrameHeight(), data, rgba, MainActivity.bShowTresholded);
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return bmp;
}
public native void CircleObjectTrack(int width, int height, byte yuv[], int[] rgba, boolean debug);
static {
System.loadLibrary("objtrack_opencv_jni");
}
}
--
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
//if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
// mCamera.setPreviewTexture( new SurfaceTexture(10) );
//else
mCamera.setPreviewDisplay(null);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* #param previewWidth - the width of the preview frames that will be delivered via processFrame
* #param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
}
}

Related

TransactionTooLargeException occure on switching new activity

i'm developing image capture app in which after capturing image if you do not want to save image, press back button [ onBackPress i launch new activity i.e BottomNavigationActivity] it should switch to home activity (like normal camera app doing).
#Override
public void onBackPressed()
{
startActivity(new Intent(getApplicationContext(), BottomNavigationActivity.class));
}
But when i press back button TransactionTooLargeException occur and then app crash. i don't know why this happen even i am not transferring any huge amount of data like arrayList etc. help me to solve this problem
thanks in advance.
CameraFragment.java [here i capture the image]
public class CameraFragment extends Fragment implements SurfaceHolder.Callback, Camera.PictureCallback
{
public static final String TAG = CameraFragment.class.getSimpleName();
public static final String CAMERA_ID_KEY = "camera_id";
public static final String CAMERA_FLASH_KEY = "flash_mode";
public static final String IMAGE_INFO = "image_info";
private static final int PICTURE_SIZE_MAX_WIDTH = 1280;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
private int mCameraID;
private String mFlashMode;
private Camera mCamera;
private SquareCameraPreview mPreviewView;
private SurfaceHolder mSurfaceHolder;
private boolean mIsSafeToTakePhoto = false;
private ImageParameters mImageParameters;
private CameraOrientationListener mOrientationListener;
ImageView iv_camera_close;
public static Fragment newInstance() {
return new CameraFragment();
}
public CameraFragment() {}
#Override
public void onAttach(Context context) {
super.onAttach(context);
mOrientationListener = new CameraOrientationListener(context);
}
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Restore your state here because a double rotation with this fragment
// in the backstack will cause improper state restoration
// onCreate() -> onSavedInstanceState() instead of going through onCreateView()
if (savedInstanceState == null) {
mCameraID = getBackCameraID();
mFlashMode = CameraSettingPreferences.getCameraFlashMode(getActivity());
mImageParameters = new ImageParameters();
}
else
{
mCameraID = savedInstanceState.getInt(CAMERA_ID_KEY);
mFlashMode = savedInstanceState.getString(CAMERA_FLASH_KEY);
mImageParameters = savedInstanceState.getParcelable(IMAGE_INFO);
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
return inflater.inflate(R.layout.squarecamera__fragment_camera, container, false);
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mOrientationListener.enable();
mPreviewView = (SquareCameraPreview) view.findViewById(R.id.camera_preview_view);
mPreviewView.getHolder().addCallback(CameraFragment.this);
final View topCoverView = view.findViewById(R.id.cover_top_view);
final View btnCoverView = view.findViewById(R.id.cover_bottom_view);
iv_camera_close = view.findViewById(R.id.iv_camera_close);
iv_camera_close.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View v)
{
startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
mImageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (savedInstanceState == null) {
ViewTreeObserver observer = mPreviewView.getViewTreeObserver();
observer.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
mImageParameters.mPreviewWidth = mPreviewView.getWidth();
mImageParameters.mPreviewHeight = mPreviewView.getHeight();
mImageParameters.mCoverWidth = mImageParameters.mCoverHeight
= mImageParameters.calculateCoverWidthHeight();
// Log.d(TAG, "parameters: " + mImageParameters.getStringValues());
// Log.d(TAG, "cover height " + topCoverView.getHeight());
resizeTopAndBtmCover(topCoverView, btnCoverView);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mPreviewView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
} else {
mPreviewView.getViewTreeObserver().removeGlobalOnLayoutListener(this);
}
}
});
} else {
if (mImageParameters.isPortrait()) {
topCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
btnCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
} else {
topCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
btnCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
}
}
final ImageView swapCameraBtn = (ImageView) view.findViewById(R.id.change_camera);
swapCameraBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mCameraID == CameraInfo.CAMERA_FACING_FRONT) {
mCameraID = getBackCameraID();
} else {
mCameraID = getFrontCameraID();
}
restartPreview();
}
});
final View changeCameraFlashModeBtn = view.findViewById(R.id.flash);
changeCameraFlashModeBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_AUTO)) {
mFlashMode = Camera.Parameters.FLASH_MODE_ON;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_ON)) {
mFlashMode = Camera.Parameters.FLASH_MODE_OFF;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_OFF)) {
mFlashMode = Camera.Parameters.FLASH_MODE_AUTO;
}
setupFlashMode();
setupCamera();
}
});
setupFlashMode();
final ImageView takePhotoBtn = (ImageView) view.findViewById(R.id.capture_image_button);
takePhotoBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void setupFlashMode() {
View view = getView();
if (view == null) return;
final TextView autoFlashIcon = (TextView) view.findViewById(R.id.auto_flash_icon);
if (Camera.Parameters.FLASH_MODE_AUTO.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Auto");
} else if (Camera.Parameters.FLASH_MODE_ON.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("On");
} else if (Camera.Parameters.FLASH_MODE_OFF.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Off");
}
}
#Override
public void onSaveInstanceState(Bundle outState) {
// Log.d(TAG, "onSaveInstanceState");
outState.putInt(CAMERA_ID_KEY, mCameraID);
outState.putString(CAMERA_FLASH_KEY, mFlashMode);
outState.putParcelable(IMAGE_INFO, mImageParameters);
super.onSaveInstanceState(outState);
}
private void resizeTopAndBtmCover( final View topCover, final View bottomCover) {
ResizeAnimation resizeTopAnimation
= new ResizeAnimation(topCover, mImageParameters);
resizeTopAnimation.setDuration(800);
resizeTopAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
topCover.startAnimation(resizeTopAnimation);
ResizeAnimation resizeBtmAnimation
= new ResizeAnimation(bottomCover, mImageParameters);
resizeBtmAnimation.setDuration(800);
resizeBtmAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
bottomCover.startAnimation(resizeBtmAnimation);
}
private void getCamera(int cameraID) {
try {
mCamera = Camera.open(cameraID);
mPreviewView.setCamera(mCamera);
} catch (Exception e) {
Log.d(TAG, "Can't open camera with id " + cameraID);
e.printStackTrace();
}
}
/**
* Restart the camera preview
*/
private void restartPreview() {
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
getCamera(mCameraID);
startCameraPreview();
}
/**
* Start the camera preview
*/
private void startCameraPreview() {
determineDisplayOrientation();
setupCamera();
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
setSafeToTakePhoto(true);
setCameraFocusReady(true);
} catch (IOException e) {
Log.d(TAG, "Can't start camera preview due to IOException " + e);
e.printStackTrace();
}
}
/**
* Stop the camera preview
*/
private void stopCameraPreview() {
setSafeToTakePhoto(false);
setCameraFocusReady(false);
// Nulls out callbacks, stops face detection
mCamera.stopPreview();
mPreviewView.setCamera(null);
}
private void setSafeToTakePhoto(final boolean isSafeToTakePhoto) {
mIsSafeToTakePhoto = isSafeToTakePhoto;
}
private void setCameraFocusReady(final boolean isFocusReady) {
if (this.mPreviewView != null) {
mPreviewView.setIsFocusReady(isFocusReady);
}
}
private void determineDisplayOrientation() {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraID, cameraInfo);
// Clockwise rotation needed to align the window display to the natural position
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: {
degrees = 0;
break;
}
case Surface.ROTATION_90: {
degrees = 90;
break;
}
case Surface.ROTATION_180: {
degrees = 180;
break;
}
case Surface.ROTATION_270: {
degrees = 270;
break;
}
}
int displayOrientation;
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
// Orientation is angle of rotation when facing the camera for
// the camera image to match the natural orientation of the device
displayOrientation = (cameraInfo.orientation + degrees) % 360;
displayOrientation = (360 - displayOrientation) % 360;
} else {
displayOrientation = (cameraInfo.orientation - degrees + 360) % 360;
}
mImageParameters.mDisplayOrientation = displayOrientation;
mImageParameters.mLayoutOrientation = degrees;
mCamera.setDisplayOrientation(mImageParameters.mDisplayOrientation);
}
private void setupCamera() {
// Never keep a global parameters
Camera.Parameters parameters = mCamera.getParameters();
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
// Set continuous picture focus, if it's supported
if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
final View changeCameraFlashModeBtn = getView().findViewById(R.id.flash);
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null && flashModes.contains(mFlashMode)) {
parameters.setFlashMode(mFlashMode);
changeCameraFlashModeBtn.setVisibility(View.VISIBLE);
} else {
changeCameraFlashModeBtn.setVisibility(View.INVISIBLE);
}
mCamera.setParameters(parameters);
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPreviewSizes(), PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPictureSizes(), PICTURE_SIZE_MAX_WIDTH);
}
private Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
Size size;
int numOfSizes = sizes.size();
for (int i = 0; i < numOfSizes; i++) {
size = sizes.get(i);
boolean isDesireRatio = (size.width / 4) == (size.height / 3);
boolean isBetterSize = (bestSize == null) || size.width > bestSize.width;
if (isDesireRatio && isBetterSize) {
bestSize = size;
}
}
if (bestSize == null) {
Log.d(TAG, "cannot find the best camera size");
return sizes.get(sizes.size() - 1);
}
return bestSize;
}
private int getFrontCameraID() {
PackageManager pm = getActivity().getPackageManager();
if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
return CameraInfo.CAMERA_FACING_FRONT;
}
return getBackCameraID();
}
private int getBackCameraID() {
return CameraInfo.CAMERA_FACING_BACK;
}
private void takePicture() {
if (mIsSafeToTakePhoto) {
setSafeToTakePhoto(false);
mOrientationListener.rememberOrientation();
Camera.ShutterCallback shutterCallback = null;
Camera.PictureCallback raw = null;
// postView callback occurs when a scaled, fully processed
// postView image is available.
Camera.PictureCallback postView = null;
mCamera.takePicture(shutterCallback, raw, postView, this);
}
}
#Override
public void onResume() {
super.onResume();
if (mCamera == null) {
restartPreview();
}
}
#Override
public void onStop() {
mOrientationListener.disable();
// stop the preview
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
CameraSettingPreferences.saveCameraFlashMode(getActivity(), mFlashMode);
super.onStop();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mSurfaceHolder = holder;
getCamera(mCameraID);
startCameraPreview();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// The surface is destroyed with the visibility of the SurfaceView is set to View.Invisible
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode != Activity.RESULT_OK) return;
switch (requestCode) {
case 1:
Uri imageUri = data.getData();
break;
default:
super.onActivityResult(requestCode, resultCode, data);
}
}
/**
* A picture has been taken
* #param data
* #param camera
*/
#Override
public void onPictureTaken(byte[] data, Camera camera) {
int rotation = getPhotoRotation();
getFragmentManager()
.beginTransaction()
.replace(
R.id.fragment_container,
EditSavePhotoFragment.newInstance(data, rotation, mImageParameters.createCopy()),
EditSavePhotoFragment.TAG)
.addToBackStack(null)
.commit();
setSafeToTakePhoto(true);
}
private int getPhotoRotation() {
int rotation;
int orientation = mOrientationListener.getRememberedNormalOrientation();
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(mCameraID, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
rotation = (info.orientation - orientation + 360) % 360;
} else {
rotation = (info.orientation + orientation) % 360;
}
return rotation;
}
/**
* When orientation changes, onOrientationChanged(int) of the listener will be called
*/
private static class CameraOrientationListener extends OrientationEventListener {
private int mCurrentNormalizedOrientation;
private int mRememberedNormalOrientation;
public CameraOrientationListener(Context context) {
super(context, SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
public void onOrientationChanged(int orientation) {
if (orientation != ORIENTATION_UNKNOWN) {
mCurrentNormalizedOrientation = normalize(orientation);
}
}
/**
* #param degrees Amount of clockwise rotation from the device's natural position
* #return Normalized degrees to just 0, 90, 180, 270
*/
private int normalize(int degrees) {
if (degrees > 315 || degrees <= 45) {
return 0;
}
if (degrees > 45 && degrees <= 135) {
return 90;
}
if (degrees > 135 && degrees <= 225) {
return 180;
}
if (degrees > 225 && degrees <= 315) {
return 270;
}
throw new RuntimeException("The physics as we know them are no more. Watch out for anomalies.");
}
public void rememberOrientation() {
mRememberedNormalOrientation = mCurrentNormalizedOrientation;
}
public int getRememberedNormalOrientation() {
rememberOrientation();
return mRememberedNormalOrientation;
}
}
}
EditSavePhotoFragment.java [here i display the preview of image]
this is code to switch activity , when i called this my app crashes
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
below is full source code
public class EditSavePhotoFragment extends Fragment {
public static final String TAG = EditSavePhotoFragment.class.getSimpleName();
public static String BITMAP_KEY = "bitmap_byte_array";
public static String ROTATION_KEY = "rotation";
public static String IMAGE_INFO = "image_info";
private static final int REQUEST_STORAGE = 1;
SharedPreferences pref;
SharedPreferences.Editor editor;
private static final String PHOTO_PREF = "PHOTO_PREF";
private static final String CAPTURE_IMAGE_PATH = "CAPTURE_IMAGE_PATH";
public static Fragment newInstance(byte[] bitmapByteArray, int rotation,
#NonNull ImageParameters parameters) {
Fragment fragment = new EditSavePhotoFragment();
Bundle args = new Bundle();
args.putByteArray(BITMAP_KEY, bitmapByteArray);
args.putInt(ROTATION_KEY, rotation);
args.putParcelable(IMAGE_INFO, parameters);
fragment.setArguments(args);
return fragment;
}
public EditSavePhotoFragment() {}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState)
{
View view = inflater.inflate(R.layout.squarecamera__fragment_edit_save_photo, container, false);
pref = getContext().getSharedPreferences(PHOTO_PREF, 0);
editor = pref.edit();
return view;
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
int rotation = getArguments().getInt(ROTATION_KEY);
byte[] data = getArguments().getByteArray(BITMAP_KEY);
ImageParameters imageParameters = getArguments().getParcelable(IMAGE_INFO);
if (imageParameters == null) {
return;
}
final ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
imageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
final View topView = view.findViewById(R.id.topView);
if (imageParameters.mIsPortrait) {
topView.getLayoutParams().height = imageParameters.mCoverHeight;
} else {
topView.getLayoutParams().width = imageParameters.mCoverWidth;
}
rotatePicture(rotation, data, photoImageView);
view.findViewById(R.id.save_photo).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
savePicture(photoImageView);
}
});
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
}
private void rotatePicture(int rotation, byte[] data, ImageView photoImageView) {
Bitmap bitmap = ImageUtility.decodeSampledBitmapFromByte(getActivity(), data);
// Log.d(TAG, "original bitmap width " + bitmap.getWidth() + " height " + bitmap.getHeight());
if (rotation != 0) {
Bitmap oldBitmap = bitmap;
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
bitmap = Bitmap.createBitmap(
oldBitmap, 0, 0, oldBitmap.getWidth(), oldBitmap.getHeight(), matrix, false
);
oldBitmap.recycle();
}
photoImageView.setImageBitmap(bitmap);
}
private void savePicture(ImageView imageView)
{
/*
try
{
SaveImageMethod(imageView);
}
catch (IOException e)
{
e.printStackTrace();
}
*/
requestForPermission();
}
private void SaveImageMethod(ImageView iv) throws IOException
{
BitmapDrawable draw = (BitmapDrawable) iv.getDrawable();
Bitmap bitmap = draw.getBitmap();
FileOutputStream outStream = null;
File sdCard = Environment.getExternalStorageDirectory();
File dir = new File(sdCard.getAbsolutePath() + "/SelfiLife");
dir.mkdirs();
String fileName = String.format("%d.jpg", System.currentTimeMillis());
File outFile = new File(dir, fileName);
Log.d("MainActivity","new IMAGE PATH = "+outFile);
editor.putString(CAPTURE_IMAGE_PATH, String.valueOf(outFile));
editor.commit();
outStream = new FileOutputStream(outFile);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
outStream.flush();
outStream.close();
}
private void requestForPermission() {
RuntimePermissionActivity.startActivity(EditSavePhotoFragment.this,
REQUEST_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (Activity.RESULT_OK != resultCode) return;
if (REQUEST_STORAGE == requestCode && data != null) {
final boolean isGranted = data.getBooleanExtra(RuntimePermissionActivity.REQUESTED_PERMISSION, false);
final View view = getView();
if (isGranted && view != null) {
ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
Bitmap bitmap = ((BitmapDrawable) photoImageView.getDrawable()).getBitmap();
Uri photoUri = ImageUtility.savePicture(getActivity(), bitmap);
((CameraActivity) getActivity()).returnPhotoUri(photoUri);
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
}

Android camera photo preview is coming inverse

I am working on Camera related app. I have successfully completed capturing photo and photo preview also. But one issue arised. When I take photo, photo Capture is successful but preview is being inverse. I have tried so many solutions from stack overflow though luck did not favor for me so far.
My Camera Preview class is:
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private Context mContext;
public CameraPreview(Context context, Camera camera) {
super(context);
mContext = context;
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
// create the surface and start camera preview
if (mCamera == null) {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
} catch (IOException e) {
Log.d(VIEW_LOG_TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void refreshCamera(Camera camera) {
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
setCamera(camera);
// TODO: don't hardcode cameraId '0' here... figure this out later.
setCameraDisplayOrientation(mContext, Camera.CameraInfo.CAMERA_FACING_BACK, mCamera);
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(VIEW_LOG_TAG, "Error starting camera preview: " + e.getMessage());
}
}
public static void setCameraDisplayOrientation(Context context, int cameraId, Camera camera) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
// Compensate for the mirror image.
result = (360 - result) % 360;
} else {
// Back-facing camera.
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
refreshCamera(mCamera);
}
public void setCamera(Camera camera) {
//method to set a camera instance
mCamera = camera;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
// mCamera.release();
}
}
and my PhotoCaputeActivity class is below:
public class PhotoCaptureActivity extends AppCompatActivity {
private static final String TAG = "PhotoCaptureActivity";
//Arraylist for image timer animation
int[] imageArray = {R.drawable.ic_five_128, R.drawable.ic_four_128,
R.drawable.ic_three_128, R.drawable.ic_two_128, R.drawable.ic_one_128,
R.drawable.ic_smiley_128
};
int i = 0;
private final static int DELAY = 1000;
private final Handler handler = new Handler();
Timer timer = new Timer();
Thread timerThread;
// Create variable to handle progress and set it to 0.
private int progress = 0;
Bitmap bitmap,photoCaptureBitmap;
private Camera mCamera;
private CameraPreview mPreview;
private PictureCallback mPicture;
private ImageButton capture, switchCamera;
private Context myContext;
private LinearLayout cameraPreview;
private boolean cameraFront = false;
private ImageView capturedImageHolder;
private ProgressBar progressBar_take_photo;
ImageButton next_button_take_photo;
ImageButton back_button_take_photo;
TextView photo_title_text;
ImageView image_animation;
private MyPreferences myPreferences;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getSupportActionBar().setDisplayShowTitleEnabled(false);
setContentView(R.layout.photo_capture_activity);
//setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
Logger.addLogAdapter(new AndroidLogAdapter());
myContext = this;
initialize();
myPreferences = MyPreferences.getPreferences(this);
nextButton();
backButton();
progressBar();
nameTextShow();
}
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
cameraFront = true;
break;
}
}
return cameraId;
}
private int findBackFacingCamera() {
int cameraId = -1;
//Search for the back facing camera
//get the number of cameras
int numberOfCameras = Camera.getNumberOfCameras();
//for every camera check
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_BACK) {
cameraId = i;
cameraFront = false;
break;
}
}
return cameraId;
}
public void onResume() {
super.onResume();
if (!hasCamera(myContext)) {
Toast toast = Toast.makeText(myContext, "Sorry, your phone does not have a camera!", Toast.LENGTH_LONG);
toast.show();
finish();
}
if (mCamera == null) {
//if the front facing camera does not exist
if (findFrontFacingCamera() < 0) {
//Toast.makeText(this, "", Toast.LENGTH_LONG).show();
//switchCamera.setVisibility(View.GONE);
}
mCamera = Camera.open(findFrontFacingCamera());
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}else{
//Visibility
cameraPreview.setVisibility(View.GONE);
capturedImageHolder.setVisibility(View.VISIBLE);
String photoCapturePreview = myPreferences.getVisitorPhoto();
if(!photoCapturePreview.equals("")) {
photoCaptureBitmap = decodeToBase64(photoCapturePreview);
}
capturedImageHolder.setImageBitmap(photoCaptureBitmap);
}
}
public static Bitmap decodeToBase64(String input) {
byte[] decodedByte = Base64.decode(input, 0);
return BitmapFactory.decodeByteArray(decodedByte, 0, decodedByte.length);
}
// Code for initialization
public void initialize() {
cameraPreview = (LinearLayout) findViewById(R.id.cameraFrame);
mPreview = new CameraPreview(myContext, mCamera);
cameraPreview.addView(mPreview);
capture = (ImageButton) findViewById(R.id.button_capture);
capture.setOnClickListener(captrureListener);
//switchCamera = (Button) findViewById(R.id.button_ChangeCamera);
//switchCamera.setOnClickListener(switchCameraListener);
image_animation = (ImageView) findViewById(R.id.timer_text);
capturedImageHolder = (ImageView) findViewById(R.id.captured_image);
next_button_take_photo = (ImageButton) findViewById(R.id.next_button_take_photo);
back_button_take_photo = (ImageButton) findViewById(R.id.back_button_take_photo);
progressBar_take_photo = (ProgressBar) findViewById(R.id.progressBar_take_photo);
photo_title_text = (TextView) findViewById(R.id.photo_title_text);
}
//Next Button operation based on mobile number input
private void nextButton() {
next_button_take_photo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
hideKeyboard();
if (bitmap != null) {
Intent newIntent = new Intent(getApplicationContext(), ConfirmationActivity.class);
startActivity(newIntent);
}
}
});
}
//Back button operation
private void backButton() {
back_button_take_photo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent newIntent = new Intent(getApplicationContext(), AddressInputActivity.class);
startActivity(newIntent);
}
});
}
//Progress bar operation
private void progressBar() {
//simpleProgressBar.setMax(100); // 100 maximum value for the progress value
//simpleProgressBar.setProgress(50); // 50 default progress value for the progress bar
// Get the Drawable custom_progressbar
//Drawable draw=res.getDrawable(R.drawable.custom_progressbar);
// set the drawable as progress drawable
//progressBar.setProgressDrawable(draw);
}
//Normal text show
private void nameTextShow() {
String vsitorName = myPreferences.getVisitorName();
photo_title_text.setText(vsitorName + ", please smile for the Camera");
}
OnClickListener switchCameraListener = new OnClickListener() {
#Override
public void onClick(View v) {
//get the number of cameras
int camerasNumber = Camera.getNumberOfCameras();
if (camerasNumber > 1) {
//release the old camera instance
//switch camera, from the front and the back and vice versa
releaseCamera();
chooseCamera();
} else {
Toast toast = Toast.makeText(myContext, "Sorry, your phone has only one camera!", Toast.LENGTH_LONG);
toast.show();
}
}
};
public void chooseCamera() {
//if the camera preview is the front
if (cameraFront) {
int cameraId = findBackFacingCamera();
if (cameraId >= 0) {
//open the backFacingCamera
//set a picture callback
//refresh the preview
mCamera = Camera.open(cameraId);
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}
} else {
int cameraId = findFrontFacingCamera();
if (cameraId >= 0) {
//open the backFacingCamera
//set a picture callback
//refresh the preview
mCamera = Camera.open(cameraId);
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}
}
}
#Override
protected void onPause() {
super.onPause();
//when on Pause, release camera in order to be used from other applications
releaseCamera();
}
private boolean hasCamera(Context context) {
//check if the device has camera
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
} else {
return false;
}
}
private PictureCallback getPictureCallback() {
PictureCallback picture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
if (data != null) {
//make a new picture file
File pictureFile = getOutputMediaFile();
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
String imageEncoded = Base64.encodeToString(data, Base64.DEFAULT);
myPreferences.setVisitorPhoto(imageEncoded);
if (bitmap == null) {
return;
}
try {
//write the file
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
//Toast toast = Toast.makeText(myContext, "Picture saved: " + pictureFile.getName(), Toast.LENGTH_LONG);
//toast.show();
//Method for image view
captureImageView();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
//refresh camera to continue preview
mPreview.refreshCamera(mCamera);
}
}
};
return picture;
}
**private void captureImageView(){
//Visibitlity of camera photo
cameraPreview.setVisibility(View.GONE);
capturedImageHolder.setVisibility(View.VISIBLE);
int screenWidth = getResources().getDisplayMetrics().widthPixels;
int screenHeight = getResources().getDisplayMetrics().heightPixels;
Camera.CameraInfo info = new Camera.CameraInfo();
Matrix mtx = new Matrix();
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
// Notice that width and height are reversed
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, screenHeight, screenWidth, true);
int w = scaled.getWidth();
int h = scaled.getHeight();
// Perform matrix rotations/mirrors depending on camera that took the photo
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
float[] mirrorY = { -1, 0, 0, 0, 1, 0, 0, 0, 1};
Matrix matrixMirrorY = new Matrix();
matrixMirrorY.setValues(mirrorY);
mtx.postConcat(matrixMirrorY);
}
// Setting post rotate to 90
mtx.postRotate(270);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(scaled, 0, 0, w, h, mtx, true);
//capturedImageHolder.setImageBitmap(bitmap);
capturedImageHolder.setImageBitmap(scaleDownBitmapImage(bitmap, 350, 450));
}else{// LANDSCAPE MODE
//No need to reverse width and height
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, screenWidth,screenHeight , true);
bitmap=scaled;
//capturedImageHolder.setImageBitmap(bitmap);
capturedImageHolder.setImageBitmap(scaleDownBitmapImage(bitmap, 650, 300));
}
}**
private Bitmap scaleDownBitmapImage(Bitmap bitmap, int newWidth, int newHeight) {
Bitmap resizedBitmap = Bitmap.createScaledBitmap(bitmap, newWidth, newHeight, true);
return resizedBitmap;
}
OnClickListener captrureListener = new OnClickListener() {
#Override
public void onClick(View v) {
//Visibitlity of camera photo
cameraPreview.setVisibility(View.VISIBLE);
capturedImageHolder.setVisibility(View.GONE);
new Loading().execute();
//timer.schedule(task, DELAY, DELAY);
timerThread = new Thread()
{
public void run() {
for (i = 0; i < 6; i++) {
runOnUiThread(new Runnable() {
public void run() {
image_animation.setImageResource(imageArray[i]);
}
});
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
if(i==6){
timerThread.interrupt();
}
}
}
};
timerThread.start();
}
};
private final TimerTask task = new TimerTask() {
private int counter = 0;
public void run() {
handler.post(new Runnable() {
public void run() {
image_animation.setImageResource(imageArray[i]);
i++;
if (i > imageArray.length - 1) {
i = 0;
}
}
});
if (++counter == 6) {
timer.cancel();
timer.purge();
}
}
};
public class Loading extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... voids) {
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
// 6000ms=6s at intervals of 1000ms=1s so that means it lasts 5 seconds
new CountDownTimer(5000, 1000) {
#Override
public void onTick(long millisUntilFinished) {
// every time 1 second passes
//tv.setText("" + millisUntilFinished/1000);
/* image_animation.setImageResource(imageArray[i]);
i++;
if (i > imageArray.length - 1) {
i = 0;
}*/
}
#Override
public void onFinish() {
// count finished
//tv.setText("Picture Taken");
mCamera.takePicture(null, null, null, mPicture);
}
}.start();
}
}
//make picture and save to a folder
private static File getOutputMediaFile() {
//make a new file directory inside the "sdcard" folder
File mediaStorageDir = new File("/sdcard/", "JCG Camera");
//if this "JCGCamera folder does not exist
if (!mediaStorageDir.exists()) {
//if you cannot make this folder return
if (!mediaStorageDir.mkdirs()) {
return null;
}
}
//take the current timeStamp
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
//and make a media file:
mediaFile = new File(mediaStorageDir.getPath() + File.separator + "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
private void releaseCamera() {
// stop and release camera
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
private void hideKeyboard() {
View view = getCurrentFocus();
if (view != null) {
((InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE)).
hideSoftInputFromWindow(view.getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
}
}
#Override
public void onBackPressed()
{
// code here to show dialog
super.onBackPressed(); // optional depending on your needs
}
#Override
protected void onDestroy() {
if (mCamera != null) {
mCamera.release();
}
super.onDestroy();
}
#Override
public void onStop() {
if (mCamera != null) {
mCamera.release();
}
super.onStop();
}
}
What is the result I am getting right now and what I want that is below. Please see during photo capture hand was in the right side but when photo preview is displaying it is showing in the left hand side.
So, what can be done to resolve the issue?
Unfortunately we can not disable mirror preview but we can use TextureView and apply SetTransform to reverse Camera Preview, this work only with API >= 14 . so try below code in your PhotoCaputeActivity
mCamera.setDisplayOrientation(90);
Matrix matrix = new Matrix();
matrix.setScale(-1, 1);
matrix.postTranslate(width, 0);
mTextureView.setTransform(matrix);
for more info check this link.
Also keep in mind
This is used with Front camera and when trying with Back camera make mTextureView.setTransform(null);
At last I got solution by adding following attributes in the imageview and worked like a charm :)
android:scaleX="-1"
in OpenCV JavaCameraView, it is very easy
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
imageMat = inputFrame.rgba().t();
if (//horizontal reverse)
Core.flip(imageMat, imageMat, 1);
else //for vertical reverse
Core.flip(imageMat, imageMat, -1);

How to solve "android.view.SurfaceView.getHolder()' on a null object reference"?

public class CameraFragment extends Fragment implements SurfaceHolder.Callback,View.OnClickListener {
// Defined All Field
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private Camera camera;
private ImageView ImgThumbnail;
private Button btnFlash, btnFrontCamera, btnCapture;
private int cameraId;
private boolean flashmode = false;
private int rotation;
private String SavedImagePath;
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_camera, container,false);
// Camera surface view created
cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
btnFlash = (Button) container.findViewById(R.id.btnFlash);
btnFrontCamera = (Button) container.findViewById(R.id.btnFrontCamera);
btnCapture = (Button) container.findViewById(R.id.btnCapture);
surfaceView = (SurfaceView) container.findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
ImgThumbnail = (ImageView) container.findViewById(R.id.ImgThumbnail);
btnFrontCamera.setOnClickListener(this);
btnCapture.setOnClickListener(this);
btnFlash.setOnClickListener(this); getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (Camera.getNumberOfCameras() > 1) {
btnFrontCamera.setVisibility(View.VISIBLE);
}
if (!getActivity().getBaseContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
btnFlash.setVisibility(View.GONE);
}
return view;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (!openCamera(Camera.CameraInfo.CAMERA_FACING_BACK)) {
alertCameraDialog();
}
}
private boolean openCamera(int id) {
boolean result = false;
cameraId = id;
releaseCamera();
try {
camera = Camera.open(cameraId);
} catch (Exception e) {
e.printStackTrace();
}
if (camera != null) {
try {
setUpCamera(camera);
camera.setErrorCallback(new Camera.ErrorCallback() {
#Override
public void onError(int error, Camera camera) {
}
});
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
result = true;
} catch (IOException e) {
e.printStackTrace();
result = false;
releaseCamera();
}
}
return result;
}
private void setUpCamera(Camera c) {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, cameraInfo);
rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
int degree = 0;
switch (rotation) {
case Surface.ROTATION_0:
degree = 0;
break;
case Surface.ROTATION_90:
degree = 90;
break;
case Surface.ROTATION_180:
degree = 180;
break;
case Surface.ROTATION_270:
degree = 270;
break;
default:
break;
}
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Front Facing
rotation = (cameraInfo.orientation + degree) % 330;
rotation = (360 - rotation) % 360;
} else {
// Back Facing
rotation = (cameraInfo.orientation - degree + 360) % 360;
}
c.setDisplayOrientation(rotation);
Camera.Parameters params = c.getParameters();
showFlashButton(params);
List<String> focusModes = params.getSupportedFlashModes();
if (focusModes != null) { if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { params.setFlashMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
}
params.setRotation(rotation);
}
private void showFlashButton(Camera.Parameters params) {
boolean showFlash = (getActivity().getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA_FLASH) && params.getFlashMode() != null)
&& params.getSupportedFlashModes() != null
&& params.getSupportedFocusModes().size() > 1;
btnFlash.setVisibility(showFlash ? View.VISIBLE : View.INVISIBLE);
}
private void releaseCamera() {
try {
if (camera != null) {
camera.setPreviewCallback(null);
camera.setErrorCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
} catch (Exception e) {
e.printStackTrace();
Log.e("error", e.toString());
camera = null;
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnFlash:
flashOnButton();
break;
case R.id.btnFrontCamera:
flipCamera();
break;
case R.id.btnCapture:
takeImage();
break;
default:
break;
}
}
private void flashOnButton() {
if (camera != null) {
try {
Camera.Parameters param = camera.getParameters();
param.setFlashMode(!flashmode ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF);
camera.setParameters(param);
flashmode = !flashmode;
Toast.makeText(getActivity(), "Flash Touch", Toast.LENGTH_SHORT).show();
} catch (Exception e) {
// TODO: handle exception
}
}
}
private void takeImage() {
camera.takePicture(null, null, new Camera.PictureCallback() {
private File imageFile;
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
// Convert byte array into bitmap
Bitmap loadedImage = BitmapFactory.decodeByteArray(data, 0, data.length);
// rotate Image
Matrix rotateMatrix = new Matrix();
rotateMatrix.postRotate(rotation);
Bitmap rotatedBitmap = Bitmap.createBitmap(loadedImage, 0, 0, loadedImage.getWidth(), loadedImage.getHeight(), rotateMatrix, false);
final File folder;
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
folder = new File(Environment.getExternalStorageDirectory() + "/CameraApp");
} else {
folder = new File(Environment.getExternalStorageDirectory() + "/CameraApp");
}
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
}
if (success) {
java.util.Date date = new java.util.Date();
imageFile = new File(folder.getAbsolutePath() + File.separator + new Timestamp(date.getTime()).toString() + " " + "Image.jpg");
SavedImagePath = new Timestamp(date.getTime()).toString() + " " + "Image.jpg";
imageFile.createNewFile();
} else {
Toast.makeText(getActivity().getBaseContext(), "Image Not saved", Toast.LENGTH_SHORT).show();
return;
}
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// Save image into gallery
rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
FileOutputStream file_out = new FileOutputStream(imageFile);
file_out.write(outputStream.toByteArray());
file_out.close();
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis());
values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.MediaColumns.DATA, imageFile.getAbsolutePath()); getActivity().getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
Toast.makeText(getActivity(), "Photo Captured", Toast.LENGTH_SHORT).show();
// Code For Captured Image Save in a ImageView.
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
String imagePath = folder.getAbsolutePath() + File.separator + SavedImagePath;
Uri myURI = Uri.parse(imagePath);
ImgThumbnail.setImageURI(myURI);
Toast.makeText(getActivity(), "Photo Saved on ImageView", Toast.LENGTH_SHORT).show();
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
private void flipCamera() {
int id = (cameraId == Camera.CameraInfo.CAMERA_FACING_BACK ? Camera.CameraInfo.CAMERA_FACING_FRONT : Camera.CameraInfo.CAMERA_FACING_BACK);
Toast.makeText(getActivity(), "Camera Mode Changing", Toast.LENGTH_SHORT).show();
if (!openCamera(id)) {
alertCameraDialog();
}
}
private void alertCameraDialog() {
AlertDialog.Builder dialog = createAlert(getActivity(), "Camera info", "Error to open camera");
dialog.setNegativeButton("OK", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
});
dialog.show();
}
private AlertDialog.Builder createAlert(Context context, String Title, String Message) {
AlertDialog.Builder dialog = new AlertDialog.Builder(new ContextThemeWrapper(context, android.R.style.Theme_Holo_Light_Dialog));
dialog.setIcon(R.drawable.info_for_camera);
if (Title != null)
dialog.setTitle(Title);
else
dialog.setTitle("Information");
dialog.setMessage(Message);
dialog.setCancelable(false);
return dialog;
}
Whenever you found same as my problem, then do the following. Just replace your OnCreateView() code into onViewCreated(), because OnCreateView() method is only useful for inflating a View.
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState){
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_camera, container, false);
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
// Camera surface view created
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
btnFlash = (Button) view.findViewById(R.id.btnFlash);
btnFrontCamera = (Button) view.findViewById(R.id.btnFrontCamera);
btnCapture = (Button) view.findViewById(R.id.btnCapture);
surfaceView = (SurfaceView) view.findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
ImgThumbnail = (ImageView) view.findViewById(R.id.ImgThumbnail);
btnFrontCamera.setOnClickListener(this);
btnCapture.setOnClickListener(this);
btnFlash.setOnClickListener(this);
getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (Camera.getNumberOfCameras() > 1) {
btnFrontCamera.setVisibility(View.VISIBLE);
}
if (!getActivity().getBaseContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
btnFlash.setVisibility(View.GONE);
}
}

Opencv4android Mediarecorder.getSurface() no View display

I want to use Opencv4android sample Tutorial1 to do Video Recorder.
And I use this solution, but my smartphone not display anything on Screen.
Just black view. Can anyone help me?
here is my code
Tutorial1Activity
public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//
//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;
//---------------MediaRecorder-------------------//
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//---------------------------------------------------------//
bt_Record = (Button)findViewById(R.id.bt_recorder);
folder_pathforfile = folder_path + File.separator + folder_name
+ File.separator + "opencv" + "_";
CreateSDfolder();
ongetTime();
//---------------------------------------------------------//
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
private void CreateSDfolder() {
String filefolderpath = folder_path + File.separator + folder_name;
File dir = new File(filefolderpath);
if (!dir.exists()){
Log.e("folder", "not exist");
try{
//dir.createNewFile(true);
dir.mkdir();
Log.e("folder", "creat exist");
}catch(Exception e){
Log.e("folder", "creat not exist");
e.printStackTrace();
}
}
else{
Log.e("folder", "exist");
}
}
private void ongetTime() {
Date dt=new Date();
showTime=sdf_fileintxt.format(dt);
showTimefile =sdf.format(dt);
}
private void WritetoSD(String data) {
try {
fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
bw = new BufferedWriter(fw);
if (first_in == true) {
first_in = false;
bw.append(showTime);
bw.newLine();
}
bw.append(data);
bw.newLine();
bw.flush();
bw.close();
} catch (IOException e) {
Log.e("WriteToSD", "Write To SD ERROR");
e.printStackTrace();
}
}
public void onRecordSignal (View v){
if(!isRecord){
isRecord = true;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Stop");
//new MediaPrepareTask().execute(null, null, null);
if (prepareMediaRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
mOpenCvCameraView.setRecorder(mediaRecorder);
mediaRecorder.start();
} else {
// prepare didn't work, release the camera
Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
// mediaRecorder.stop();
releaseMediaRecorder();
}
} else{
isRecord = false;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Record");
try {
if(mediaRecorder != null)
mediaRecorder.stop(); // stop the recording
else
Log.e(TAG,"onRecordSignal mediaRecorder is null");
} catch (RuntimeException e) {
// RuntimeException is thrown when stop() is called immediately after start().
// In this case the output file is not properly constructed ans should be deleted.
Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
//noinspection ResultOfMethodCallIgnored
}
releaseMediaRecorder(); // release the MediaRecorder object
}
}
public void releaseMediaRecorder() {
Log.e("debug","releaseMediaRecorder");
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
JavaCameraView.mCamera.lock();
mOpenCvCameraView.releaseRecord();
}
}
private String recordfilepath() {
// TODO Auto-generated method stub
ongetTime();
File sddir = Environment.getExternalStorageDirectory();
File vrdir = new File(sddir, folder_name);
File file = new File(vrdir, showTimefile+"_.mp4");
String filepath = file.getAbsolutePath();
Log.e("debug mediarecorder", filepath);
return filepath;
}
public boolean prepareMediaRecorder() {
// TODO Auto-generated method stub
Log.e("debug mediarecorder", "in prepareMediaRecorder");
mediaRecorder = new MediaRecorder();
try {
JavaCameraView.mCamera.lock();
JavaCameraView.mCamera.unlock();
}catch (RuntimeException e){
Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
}
/*mediaRecorder.reset();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
//mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
mediaRecorder.setOutputFile(recordfilepath());
//mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
//mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
mediaRecorder.reset();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
//mediaRecorder.setOutputFile("out.mp4");
mediaRecorder.setOutputFile(recordfilepath());
mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
//mediaRecorder.setOnInfoListener(this);
//mediaRecorder.setOnErrorListener(this);
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.e("debug mediarecorder", "not prepare");
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.e("debug mediarecorder", "not prepare IOException");
//releaseMediaRecorder();
}
return true;
}
}
CameraBridgeViewBase
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
//-------------------------------------//
protected MediaRecorder mRecorder;
protected Surface mSurface = null;
public void setRecorder(MediaRecorder rec) {
mRecorder = rec;
//Log.e(TAG,mRecorder.toString());
if (mRecorder != null) {
mSurface = mRecorder.getSurface();
Log.e(TAG,"mRecorder is not null");
Log.e(TAG,"mSurface = "+mSurface.toString());
}
else{
Log.e(TAG,"mRecorder is null");
}
}
public void releaseRecord(){
mSurface.release();
}
//-------------------------------------//
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* #param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* #param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* #param maxWidth - the maximum width allowed for camera frame.
* #param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* #param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas;
if (mRecorder != null) {
canvas = mSurface.lockCanvas(null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
mSurface.unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* #param width - the width of this SurfaceView
* #param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* #param supportedSizes
* #param surfaceWidth
* #param surfaceHeight
* #return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}
And I add permission in Manifest
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>

CaptureActivity from Zxing library didn't return when barcode is scanned

i have integrated zxing library in my app, i used to call zxing via intent :
Intent intent = new Intent("com.google.zxing.client.android.SCAN");
intent.putExtra("SCAN_MODE", "ONE_D_MODE");
intent.putExtra("SCAN_FORMATS", "CODE_39,CODE_93,CODE_128,DATA_MATRIX,ITF,CODABAR,EAN_13,EAN_8,UPC_A,QR_CODE");
startActivityForResult(intent, 1);
But com.google.zxing.client.android.SCAN allows user to choose scan app if he has another barcode scanners, and i want that if user starts scan from my app than will stats my zxing CaptureActivity.class , i call it via intent
Intent intent = new Intent(this, CaptuureActivity.class);
intent.putExtra("SCAN_MODE", "ONE_D_MODE");
intent.putExtra("SCAN_FORMATS", "CODE_39,CODE_93,CODE_128,DATA_MATRIX,ITF,CODABAR,EAN_13,EAN_8,UPC_A,QR_CODE");
startActivityForResult(intent, 1);
But when Capture Activity detect barcode nothing happend, it didnt comes back to previous activity .
Here is CaptureACtivity :
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES =
EnumSet.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
private TextView statusView;
private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType,?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
}
#Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
resultView = findViewById(R.id.result_view);
statusView = (TextView) findViewById(R.id.status_view);
handler = null;
lastResult = null;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
// if (prefs.getBoolean(PreferencesActivity.KEY_DISABLE_AUTO_ORIENTATION, true)) {
// setRequestedOrientation(getCurrentOrientation());
// } else {
// setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE);
// }
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
}
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
if (customPromptMessage != null) {
statusView.setText(customPromptMessage);
}
} else if (dataString != null &&
dataString.contains("http://www.google") &&
dataString.contains("/m/products/scan")) {
// Scan only products and send the result to mobile Product Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
}
private int getCurrentOrientation() {
int rotation = getWindowManager().getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
default:
return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
}
}
private static boolean isZXingURL(String dataString) {
if (dataString == null) {
return false;
}
for (String url : ZXING_URLS) {
if (dataString.startsWith(url)) {
return true;
}
}
return false;
}
#Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
cameraManager.closeDriver();
if (!hasSurface) {
/* SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);*/
}
super.onPause();
}
#Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (source == IntentSource.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
finish();
return true;
}
if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) {
restartPreviewAfterDelay(0L);
return true;
}
break;
case KeyEvent.KEYCODE_FOCUS:
case KeyEvent.KEYCODE_CAMERA:
// Handle these events so they don't launch the Camera app
return true;
// Use volume up/down to turn on light
case KeyEvent.KEYCODE_VOLUME_DOWN:
cameraManager.setTorch(false);
return true;
case KeyEvent.KEYCODE_VOLUME_UP:
cameraManager.setTorch(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.capture, menu);
return super.onCreateOptionsMenu(menu);
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
switch (item.getItemId()) {
case R.id.menu_share:
intent.setClassName(this, ShareActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_history:
intent.setClassName(this, HistoryActivity.class.getName());
startActivityForResult(intent, HISTORY_REQUEST_CODE);
break;
case R.id.menu_settings:
intent.setClassName(this, PreferencesActivity.class.getName());
startActivity(intent);
break;
case R.id.menu_help:
intent.setClassName(this, HelpActivity.class.getName());
startActivity(intent);
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK) {
if (requestCode == HISTORY_REQUEST_CODE) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (handler == null) {
savedResultToShow = result;
} else {
if (result != null) {
savedResultToShow = result;
}
if (savedResultToShow != null) {
Message message = Message.obtain(handler, R.id.decode_succeeded, savedResultToShow);
handler.sendMessage(message);
}
savedResultToShow = null;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show the results.
*
* #param rawResult The contents of the barcode.
* #param scaleFactor amount by which thumbnail was scaled
* #param barcode A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
inactivityTimer.onActivity();
lastResult = rawResult;
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
boolean fromLiveScan = barcode != null;
if (fromLiveScan) {
historyManager.addHistoryItem(rawResult, resultHandler);
// Then not from history, so beep/vibrate and we have an image to draw on
beepManager.playBeepSoundAndVibrate();
//drawResultPoints(barcode, scaleFactor, rawResult);
}
switch (source) {
case NATIVE_APP_INTENT:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case PRODUCT_SEARCH_LINK:
handleDecodeExternally(rawResult, resultHandler, barcode);
break;
case ZXING_LINK:
if (scanFromWebPageManager == null || !scanFromWebPageManager.isScanFromWebPage()) {
handleDecodeInternally(rawResult, resultHandler, barcode);
} else {
handleDecodeExternally(rawResult, resultHandler, barcode);
}
break;
case NONE:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (fromLiveScan && prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) {
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.msg_bulk_mode_scanned) + " (" + rawResult.getText() + ')',
Toast.LENGTH_SHORT).show();
// Wait a moment or else it will scan the same barcode continuously about 3 times
// restartPreviewAfterDelay(BULK_MODE_SCAN_DELAY_MS);
} else {
handleDecodeInternally(rawResult, resultHandler, barcode);
}
break;
}
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of the barcode.
*
* #param barcode A bitmap of the captured image.
* #param scaleFactor amount by which thumbnail was scaled
* #param rawResult The decoded results which contains the points to draw.
*/
private void drawResultPoints(Bitmap barcode, float scaleFactor, Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(getResources().getColor(R.color.result_points));
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1], scaleFactor);
} else if (points.length == 4 &&
(rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A ||
rawResult.getBarcodeFormat() == BarcodeFormat.EAN_13)) {
// Hacky special case -- draw two lines, for the barcode and metadata
drawLine(canvas, paint, points[0], points[1], scaleFactor);
drawLine(canvas, paint, points[2], points[3], scaleFactor);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
if (point != null) {
canvas.drawPoint(scaleFactor * point.getX(), scaleFactor * point.getY(), paint);
}
}
}
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(),
scaleFactor * a.getY(),
scaleFactor * b.getX(),
scaleFactor * b.getY(),
paint);
}
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
CharSequence displayContents = resultHandler.getDisplayContents();
if (copyToClipboard && !resultHandler.areContentsSecure()) {
ClipboardInterface.setText(displayContents, this);
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (resultHandler.getDefaultButtonID() != null && prefs.getBoolean(PreferencesActivity.KEY_AUTO_OPEN_WEB, false)) {
resultHandler.handleButtonPress(resultHandler.getDefaultButtonID());
return;
}
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
resultView.setVisibility(View.VISIBLE);
/*ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
})*/
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formatter.format(new Date(rawResult.getTimestamp())));
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata = rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
contentsTextView.setText(displayContents);
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(
PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView,
resultHandler.getResult(),
historyManager,
this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
}
// Briefly show the contents of the barcode, then handle the result outside Barcode Scanner.
private void handleDecodeExternally(Result rawResult, ResultHandler resultHandler, Bitmap barcode) {
if (barcode != null) {
viewfinderView.drawResultBitmap(barcode);
}
long resultDurationMS;
if (getIntent() == null) {
resultDurationMS = 0;
} else {
resultDurationMS = getIntent().getLongExtra(Intents.Scan.RESULT_DISPLAY_DURATION_MS,
0);
}
if (resultDurationMS > 0) {
String rawResultString = String.valueOf(rawResult);
if (rawResultString.length() > 32) {
rawResultString = rawResultString.substring(0, 32) + " ...";
}
statusView.setText(getString(resultHandler.getDisplayTitle()) + " : " + rawResultString);
}
if (copyToClipboard && !resultHandler.areContentsSecure()) {
CharSequence text = resultHandler.getDisplayContents();
ClipboardInterface.setText(text, this);
}
if (source == IntentSource.NATIVE_APP_INTENT) {
// Hand back whatever action they requested - this can be changed to Intents.Scan.ACTION when
// the deprecated intent is retired.
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
setResult(Activity.RESULT_OK,intent);
byte[] rawBytes = rawResult.getRawBytes();
if (rawBytes != null && rawBytes.length > 0) {
intent.putExtra(Intents.Scan.RESULT_BYTES, rawBytes);
}
Map<ResultMetadataType,?> metadata = rawResult.getResultMetadata();
if (metadata != null) {
if (metadata.containsKey(ResultMetadataType.UPC_EAN_EXTENSION)) {
intent.putExtra(Intents.Scan.RESULT_UPC_EAN_EXTENSION,
metadata.get(ResultMetadataType.UPC_EAN_EXTENSION).toString());
}
Number orientation = (Number) metadata.get(ResultMetadataType.ORIENTATION);
if (orientation != null) {
intent.putExtra(Intents.Scan.RESULT_ORIENTATION, orientation.intValue());
}
String ecLevel = (String) metadata.get(ResultMetadataType.ERROR_CORRECTION_LEVEL);
if (ecLevel != null) {
intent.putExtra(Intents.Scan.RESULT_ERROR_CORRECTION_LEVEL, ecLevel);
}
#SuppressWarnings("unchecked")
Iterable<byte[]> byteSegments = (Iterable<byte[]>) metadata.get(ResultMetadataType.BYTE_SEGMENTS);
if (byteSegments != null) {
int i = 0;
for (byte[] byteSegment : byteSegments) {
intent.putExtra(Intents.Scan.RESULT_BYTE_SEGMENTS_PREFIX + i, byteSegment);
i++;
}
}
}
sendReplyMessage(R.id.return_scan_result, intent, 0);
} else if (source == IntentSource.PRODUCT_SEARCH_LINK) {
// Reformulate the URL which triggered us into a query, so that the request goes to the same
// TLD as the scan URL.
int end = sourceUrl.lastIndexOf("/scan");
String replyURL = sourceUrl.substring(0, end) + "?q=" + resultHandler.getDisplayContents() + "&source=zxing";
sendReplyMessage(R.id.launch_product_query, replyURL, 0);
} else if (source == IntentSource.ZXING_LINK) {
if (scanFromWebPageManager != null && scanFromWebPageManager.isScanFromWebPage()) {
String replyURL = scanFromWebPageManager.buildReplyURL(rawResult, resultHandler);
sendReplyMessage(R.id.launch_product_query, replyURL, 0);
}
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
if (handler != null) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
resultView.setVisibility(View.GONE);
statusView.setText(R.string.msg_default_status);
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
I set RESULT_OK in
if (source == IntentSource.NATIVE_APP_INTENT) {
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
setResult(Activity.RESULT_OK,intent)
......
What im doing wrong?
And one more question , when i call scan with com.google.zxing.client.android.SCAN when barcode was captured actvity didnt close immediately, it wait about 500-1000 milliseconds , how to disable this ?
Thanks for answers
Just add the third line here while starting the intent, thats all, no need for any change in Zxing code, these guys Rock
Intent intent = new Intent(getApplicationContext(), CaptureActivity.class);
intent.putExtra("SCAN_FORMATS", "QR_CODE,EAN_13,EAN_8,RSS_14,UPC_A,UPC_E,CODE_39,CODE_93,CODE_128,ITF,CODABAR,DATA_MATRIX");
intent.setAction(Intents.Scan.ACTION);
startActivityForResult(intent, 0);
By the way I'm using Zxing version 3.2.0
Just like to add the code that I used to receive the output from Zxing:
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == Activity.RESULT_OK) {
String contents = intent.getStringExtra(Intents.Scan.RESULT);
String formatName = intent.getStringExtra(Intents.Scan.RESULT_FORMAT);
tvStatus.setText(formatName);
tvScanResults.setText(contents + "\n\n" + formatName);
} else if (resultCode == Activity.RESULT_CANCELED) {
}
}

Categories

Resources