My app consist Image Capture Face Recognition what i need to implement is converting the byte Array image into bitmap , and pass the bitmap to next Activity, The next Activity will receive the bitmap and show it in an image-view , than convert the bitmap into base 64.
I Don't know how to implement above task into my existing codes.
So when image is captured it shows, once click to save button pass the image to next activity and again convert the bitmap into base64.
FaceTrackerAcivity.java
public final class FaceTrackerActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private ImageView btnCapture;
private ImageView btnChangeCamera;
private ImageView btnCancel;
private ImageView btnSave;
private FrameLayout frmResult;
private ImageView imgPicture;
Bitmap bmp;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private static final int RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM = 3;
private int cameraId = CameraSource.CAMERA_FACING_BACK;
Intent x;
//==============================================================================================
// Activity Methods
//==============================================================================================
/**
* Initializes the UI and initiates the creation of a face detector.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
btnCapture = (ImageView) findViewById(R.id.btn_capture);
btnChangeCamera = (ImageView) findViewById(R.id.btn_change_camera);
btnCancel = (ImageView) findViewById(R.id.btn_cancel);
btnSave = (ImageView) findViewById(R.id.btn_save);
frmResult = (FrameLayout) findViewById(R.id.frm_capture_result);
imgPicture = (ImageView) findViewById(R.id.img_capture_result);
boolean hasPermissionCamera = (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED);
if (!hasPermissionCamera) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, RC_HANDLE_CAMERA_PERM);
} else {
boolean hasPermissionWriteStorage = (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED);
if (!hasPermissionWriteStorage) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM);
} else {
createCameraSource(cameraId);
}
}
btnCapture.setOnClickListener(this);
btnChangeCamera.setOnClickListener(this);
}
private void createCameraSource(int cameraId) {
Context context = getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
if (!detector.isOperational()) {
Log.w(TAG, "Face detector dependencies are not yet available.");
}
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(metrics.heightPixels, metrics.widthPixels)
.setFacing(cameraId)
.setAutoFocusEnabled(true)
.setRequestedFps(30.0f)
.build();
}
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
switch (requestCode) {
case RC_HANDLE_CAMERA_PERM: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
finish();
startActivity(getIntent());
}
break;
}
case RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
finish();
startActivity(getIntent());
}
break;
}
}
}
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg = GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
#Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.btn_capture) {
Toast.makeText(this, "capture", Toast.LENGTH_SHORT).show();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(final byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, options);
switch (orientation) {
case 90:
bmp = Utils.rotateImage(bmp, 90);
break;
case 180:
bmp = Utils.rotateImage(bmp, 180);
break;
case 270:
bmp = Utils.rotateImage(bmp, 270);
break;
}
if (cameraId == CameraSource.CAMERA_FACING_FRONT) {
bmp = Utils.flip(bmp, Constants.FLIP_HORIZONTAL);
}
if (bmp != null) {
frmResult.setVisibility(View.VISIBLE);
imgPicture.setImageBitmap(bmp);
btnCancel.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
frmResult.setVisibility(View.GONE);
}
});
btnSave.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
SavePhotoTask savePhotoTask = new SavePhotoTask();
savePhotoTask.execute(bytes);
}
});
}
}
});
} else if (id == R.id.btn_change_camera) {
mCameraSource.release();
mCameraSource = null;
cameraId = cameraId == CameraSource.CAMERA_FACING_BACK ? CameraSource.CAMERA_FACING_FRONT : CameraSource.CAMERA_FACING_BACK;
createCameraSource(cameraId);
startCameraSource();
}
}
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
class SavePhotoTask extends AsyncTask<byte[], String, String> {
#Override
protected String doInBackground(byte[]... data) {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "TRACKER_" + timeStamp + "_";
File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES);
try {
File photo = File.createTempFile(imageFileName, ".jpg", storageDir);
FileOutputStream fos = new FileOutputStream(photo.getPath());
fos.write(data[0]);
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}}
NEXT.java
public class NEXT extends AppCompatActivity {
ImageView y;
Intent intent;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.next);
y = (ImageView) findViewById(R.id.myimage);
Bitmap bitmap = (Bitmap) intent.getParcelableExtra("key");
y.setImageBitmap(bitmap);
}}
Putin large data (>1Mo) can produce android.os.TransactionTooLargeException, ref :
The Binder transaction buffer has a limited fixed size, currently
1Mb, which is shared by all transactions in progress for the
process. Consequently this exception can be thrown when there are many
transactions in progress even when most of the individual transactions
are of moderate size.
The best way is to create a class and put your image into it, you can then access to it from any activity
Related
I have a background service which is an accessibility service (don't think it makes a difference). I want to launch a new activity from this service. I have tried using this suggested solution:
Intent dialogIntent = new Intent(this, ScreenshotActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
getApplicationContext().startActivity(dialogIntent);
However, this does not seem to work. My logcat gives this output but the activity does not launch:
020-03-20 20:54:02.824 22977-22977/com.xx.xx I/Timeline: Timeline: Activity_launch_request time:19482980
My service and activity work properly independently. But not in this situation. The point from where I am making this call is reached as well. The device I am using for testing is running Android 9.
Edit: Main service code:
public class MyAccessibility extends AccessibilityService {
public static MyAccessibility instance;
#Override
public void onCreate() {
super.onCreate();
Log.i("myLog", "create accessibility");
}
#Override
public void onAccessibilityEvent(AccessibilityEvent accessibilityEvent) {
Log.i("myLog", "Event");
if (instance == null) {
Log.i("myLog", "Instance set to not null");
instance = this;
}
}
#Override
public void onInterrupt() {
}
#Override
protected void onServiceConnected() {
super.onServiceConnected();
AccessibilityServiceInfo info = new AccessibilityServiceInfo();
info.eventTypes = AccessibilityEvent.TYPES_ALL_MASK;
info.notificationTimeout = 100;
info.feedbackType = AccessibilityServiceInfo.FEEDBACK_ALL_MASK;
this.setServiceInfo(info);
Toast t = Toast.makeText(getApplicationContext(), "Accessibility Service is connected now", Toast.LENGTH_SHORT);
t.show();
System.out.println("Accessibility was connected!");
instance = this;
}
public void takeSS(){
Intent dialogIntent = new Intent(this, ScreenshotActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
this.startActivity(dialogIntent);
}
There are some other methods which uses dispatchGesture. That is why I have a static reference to this and I can access these methods from elsewhere. The screenshot activity uses media projection to take the screenshot and use the image:
public class ScreenshotActivity extends Activity {
private static final int REQUEST_MEDIA_PROJECTION = 1;
private static final String TAG = "ScreenshotActivity";
private MediaProjectionManager mProjectionManager;
private MediaProjection mMediaProjection = null;
private VirtualDisplay mVirtualDisplay;
private ImageReader mImageReader;
private static final int MAX_IMAGE_BUFFER = 10;
private int counter;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_screenshot_dummy);
counter = 0;
OrientationChangedListener mOrientationChangedListener = new OrientationChangedListener(this);
mOrientationChangedListener.enable();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mProjectionManager = (MediaProjectionManager)getSystemService(MEDIA_PROJECTION_SERVICE);
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_MEDIA_PROJECTION);
}
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void onActivityResult(int requestCode, int resultCode, Intent resultData) {
super.onActivityResult(requestCode, resultCode, resultData);
if (requestCode == REQUEST_MEDIA_PROJECTION) {
String message;
if (resultCode != Activity.RESULT_OK) {
message = "Media Projection Declined";
mMediaProjection = null;
} else {
message = "Media Projection Accepted";
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, resultData);
attachImageCaptureOverlay();
}
Toast toast = Toast.makeText(this, message, Toast.LENGTH_SHORT);
toast.show();
}
}
private class OrientationChangedListener extends OrientationEventListener {
int mLastOrientation = -1;
OrientationChangedListener(Context context) {
super(context);
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void onOrientationChanged(int orientation) {
final int screenOrientation = getWindowManager().getDefaultDisplay().getRotation();
if (mVirtualDisplay == null) return;
if (mLastOrientation == screenOrientation) return;
mLastOrientation = screenOrientation;
detachImageCaptureOverlay();
attachImageCaptureOverlay();
}
}
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null || image.getPlanes().length <= 0) return;
final Image.Plane plane = image.getPlanes()[0];
final int rowPadding = plane.getRowStride() - plane.getPixelStride() * image.getWidth();
final int bitmapWidth = image.getWidth() + rowPadding / plane.getPixelStride();
final Bitmap tempBitmap = Bitmap.createBitmap(bitmapWidth, image.getHeight(), Bitmap.Config.ARGB_8888);
tempBitmap.copyPixelsFromBuffer(plane.getBuffer());
Rect cropRect = image.getCropRect();
final Bitmap bitmap = Bitmap.createBitmap(tempBitmap, cropRect.left, cropRect.top, cropRect.width(), cropRect.height());
//Do something with the bitmap
image.close();
}
};
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private void attachImageCaptureOverlay() {
if (mMediaProjection == null) return;
final DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
mImageReader = ImageReader.newInstance(metrics.widthPixels, metrics.heightPixels, PixelFormat.RGBA_8888, MAX_IMAGE_BUFFER);
mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenCaptureTest",
metrics.widthPixels, metrics.heightPixels, metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null, null);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
}
#RequiresApi(api = Build.VERSION_CODES.KITKAT)
private void detachImageCaptureOverlay() {
mVirtualDisplay.release();
mImageReader.close();
}
}
You can't start activity from background since Android Oreo.
One workaround is to call startForeground(true) when your service starts, then add sticky Notification to your service with appropriate action(starting your desired activity via PendingIntent).
I am working on a filter activity in which there are 3 images at the bottom. By default, image 3 will be selected as it is the last one that will be clicked.
This same activity contains Latitude and Longitude which will be sent to next activity to fetch nearby locations.
The problem that I am facing is that among those 3 images when I select a filter for Image 1 and select the image 2, the activity gets restarted and because of this, the lat and long values are reset to 0.0 and 0.0 respectively.
Code -
public class MainActivity extends AppCompatActivity implements FiltersListFragment.FiltersListFragmentListener, EditImageFragment.EditImageFragmentListener {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_activity_main_filter);
instagramFilterIntent = new Intent(this, MainActivity.class);
ButterKnife.bind(this);
getBundleValues();
loadImage();
setupViewPager(viewPager);
setBitmap(originalImage);
ivSave.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
saveImageToGallery();
startingActivtity();
}
});
ivClose.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
saveImageToGallery();
}
});
}
double latitude,longitude;
private void getBundleValues() {
Bundle extras = getIntent().getExtras();
if (extras != null) {
if (extras.containsKey("image")) {
Log.e("image", extras.getString("image"));
editingImage = extras.getString("image");
imgFile = new File(extras.getString("image"));
originalFile = imgFile;
if (imgFile.exists()) {
Bitmap myBitmap = BitmapFactory.decodeFile(imgFile.getAbsolutePath());
//Drawable d = new BitmapDrawable(getResources(), myBitmap);
originalImage = myBitmap;
}
}
if(extras.containsKey("PESDK")){
initiated=extras.getBoolean("PESDK");
}
if(extras.containsKey("Latitude")){
latitude=extras.getDouble("Latitude");
Toast.makeText(this, "lat"+latitude, Toast.LENGTH_SHORT).show();
}
if(extras.containsKey("Longitude")){
longitude=extras.getDouble("Longitude");
}
if (extras.containsKey("Filters")) {
filters = extras.getBoolean("Filters");
instagramFilterIntent.putExtra("Filters", filters);
}
if (extras.containsKey("Image1")) {
files[0] = extras.getString("Image1");
instagramFilterIntent.putExtra("Image1", files[0]);
}
if (extras.containsKey("Image2")) {
files[1] = extras.getString("Image2");
instagramFilterIntent.putExtra("Image2", files[1]);
}
if (extras.containsKey("Image3")) {
files[2] = extras.getString("Image3");
instagramFilterIntent.putExtra("Image3", files[2]);
}
initializeImages();
}
}
private void initializeImages() {
if (files[0] != null) {
//Glide.with(getApplicationContext()).load(files[0]).into(preview1Img);
Bitmap bitmap = BitmapFactory.decodeFile(files[0]);
preview1Img.setImageBitmap(bitmap);
if(editingImage.equalsIgnoreCase(files[0])) {
preview1B.setVisibility(View.VISIBLE);
}
}
else {
preview1Layout.setVisibility(View.GONE);
preview1B.setVisibility(View.INVISIBLE);
}
if (files[1] != null) {
//Glide.with(getApplicationContext()).load(files[1]).into(preview2Img);
Bitmap bitmap = BitmapFactory.decodeFile(files[1]);
preview2Img.setImageBitmap(bitmap);
if(editingImage.equalsIgnoreCase(files[1])) {
preview2B.setVisibility(View.VISIBLE);
}
}
else {
preview2Layout.setVisibility(View.GONE);
preview2B.setVisibility(View.INVISIBLE);
}
if (files[2] != null) {
//Glide.with(getApplicationContext()).load(files[2]).into(preview3Img);
Bitmap bitmap = BitmapFactory.decodeFile(files[2]);
preview3Img.setImageBitmap(bitmap);
if(editingImage.equalsIgnoreCase(files[2])){
preview3B.setVisibility(View.VISIBLE);
}
}
else {
preview3Layout.setVisibility(View.GONE);
preview3B.setVisibility(View.INVISIBLE);
}
}
public static void setBitmap(Bitmap originalImage) {
ORIGINAL = originalImage;
}
private void setupViewPager(ViewPager viewPager) {
ViewPagerAdapter adapter = new ViewPagerAdapter(getSupportFragmentManager());
// adding filter list fragment
filtersListFragment = new FiltersListFragment();
filtersListFragment.setInitiatedPESDK(initiated);
filtersListFragment.setListener(this);
// adding edit image fragment
editImageFragment = new EditImageFragment();
editImageFragment.setListener(this);
adapter.addFragment(filtersListFragment, getString(R.string.tab_filters));
adapter.addFragment(editImageFragment, getString(R.string.tab_edit));
//filtersListFragment.prepareThumbnail(originalImage);
viewPager.setAdapter(adapter);
}
#Override
public void onFilterSelected(Filter filter, ImageFilter imageFilter) {
// reset image controls
resetControls();
// applying the selected filter
filteredImage = originalImage.copy(Bitmap.Config.ARGB_8888, true);
// preview filtered image
if(filter!=null) {
imagePreview.setImageBitmap(filter.processFilter(filteredImage));
}else{
imagePreview.setImageBitmap(imageFilter.renderImage(filteredImage,false));
}
finalImage = filteredImage.copy(Bitmap.Config.ARGB_8888, true);
}
/**
* Resets image edit controls to normal when new filter
* is selected
*/
private void resetControls() {
if (editImageFragment != null) {
editImageFragment.resetControls();
}
brightnessFinal = 10;
saturationFinal = 1.0f;
contrastFinal = 1.0f;
}
/**
* Image View click listners
* */
public void image1Click(View view) {
if (!imgFile.toString().equalsIgnoreCase(files[0])) {
saveImageToGallery();
instagramFilterIntent.putExtra("image", files[0]);
instagramFilterIntent.putExtra("PESDK",initiated);
this.finish();
startActivity(instagramFilterIntent);
Toast.makeText(this, "lat"+latitude, Toast.LENGTH_SHORT).show();
}
}
public void image2Click(View view) {
if (!imgFile.toString().equalsIgnoreCase(files[1])) {
saveImageToGallery();
instagramFilterIntent.putExtra("image", files[1]);
instagramFilterIntent.putExtra("PESDK",initiated);
this.finish();
startActivity(instagramFilterIntent);
Toast.makeText(this, "lat"+latitude, Toast.LENGTH_SHORT).show();
}
}
public void image3Click(View view) {
if (!imgFile.toString().equalsIgnoreCase(files[2])) {
saveImageToGallery();
instagramFilterIntent.putExtra("image", files[2]);
instagramFilterIntent.putExtra("PESDK",initiated);
this.finish();
startActivity(instagramFilterIntent);
Toast.makeText(this, "lat"+latitude, Toast.LENGTH_SHORT).show();
}
}
class ViewPagerAdapter extends FragmentPagerAdapter {
private final List<Fragment> mFragmentList = new ArrayList<>();
private final List<String> mFragmentTitleList = new ArrayList<>();
public ViewPagerAdapter(FragmentManager manager) {
super(manager);
}
#Override
public Fragment getItem(int position) {
return mFragmentList.get(position);
}
#Override
public int getCount() {
return mFragmentList.size();
}
public void addFragment(Fragment fragment, String title) {
mFragmentList.add(fragment);
mFragmentTitleList.add(title);
}
#Override
public CharSequence getPageTitle(int position) {
return mFragmentTitleList.get(position);
}
}
// load the default image from assets on app launch
private void loadImage() {
//#todo
// originalImage = BitmapUtils.getBitmapFromGallery(this, Uri.parse(IMAGE_NAME), 300, 300);
filteredImage = originalImage.copy(Bitmap.Config.ARGB_8888, true);
finalImage = originalImage.copy(Bitmap.Config.ARGB_8888, true);
imagePreview.setImageBitmap(originalImage);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK && requestCode == SELECT_GALLERY_IMAGE) {
Log.e("data.getData()", data.getData() + "");
Bitmap bitmap = BitmapUtils.getBitmapFromGallery(this, data.getData(), 800, 800);
// clear bitmap memory
originalImage.recycle();
finalImage.recycle();
finalImage.recycle();
originalImage = bitmap.copy(Bitmap.Config.ARGB_8888, true);
filteredImage = originalImage.copy(Bitmap.Config.ARGB_8888, true);
finalImage = originalImage.copy(Bitmap.Config.ARGB_8888, true);
imagePreview.setImageBitmap(originalImage);
bitmap.recycle();
// render selected image thumbnails
filtersListFragment.prepareThumbnail(originalImage);
}
}
/*
* saves image to camera gallery
* */
private void saveImageToGallery() {
Dexter.withActivity(this).withPermissions(Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE)
.withListener(new MultiplePermissionsListener() {
#Override
public void onPermissionsChecked(MultiplePermissionsReport report) {
if (report.areAllPermissionsGranted()) {
if (originalFile.exists()) {
originalFile.delete();
}
// final String path = BitmapUtils.insertImage(getContentResolver(), finalImage, System.currentTimeMillis() + "_profile.jpg", null);
new MainActivity.SaveImageTask(finalImage, originalFile).execute();
//finish();
}
else {
Toast.makeText(getApplicationContext(), "Permissions are not granted!", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onPermissionRationaleShouldBeShown(List<PermissionRequest> permissions, PermissionToken token) {
token.continuePermissionRequest();
}
}).check();
}
private class SaveImageTask extends AsyncTask<byte[], Void, Void> {
Bitmap finalBitmap;
File name;
public SaveImageTask(Bitmap finalBitmap, File name) {
this.finalBitmap = finalBitmap;
this.name = name;
}
#Override
protected Void doInBackground(byte[]... data) {
FileOutputStream outStream = null;
// Write to SD Card
try {
File file = name;
if (file.exists()) {
} else {
try {
FileOutputStream out = new FileOutputStream(file);
finalBitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
Log.e("FIlter MainACTIVITY", "SAVED");
Toast.makeText(getApplicationContext(), "Image Saved", Toast.LENGTH_SHORT).show();
}
}
public void startingActivtity(){
Intent chekInActivity = new Intent(MainActivity.this, CurrentLocationPlaces.class);
if(files[0] != null) {
chekInActivity.putExtra("Image1",files[0].toString());
}
if(files[1] != null) {
chekInActivity.putExtra("Image2",files[1].toString());
}
if(files[2] != null) {
chekInActivity.putExtra("Image3",files[2].toString());
}
chekInActivity.putExtra("source","gallery");
chekInActivity.putExtra("Latitude", latitude);
chekInActivity.putExtra("Longitude", longitude);
startActivity(chekInActivity);
}
}
May I know how to either stop restarting the activity and select the image to get its preview to apply filters or how to save the lat and long values so that it won't get reset when the activity restarts?
If the value should be held across activity restarts you should be using an activity or shared preference.
Basically in your onCreate, load the values and assign them to your Textview (or other component). When they change just update the preference value.
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Load the lat/lng
SharedPreferences preferences = getPreferences(MODE_PRIVATE);
double lat = preferences.getDouble("latitude", 0);
double lon = preferences.getDouble("longitude", 0);
}
Then where you have your application update that value simply update the preference value.
private void updatePreference(double lat, double lng) {
SharedPreferences preferences = getPreference(MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
editor.putInt("latitude", lat);
editor.putInt("longitude", lon);
editor.commit();
}
That will hold your values and allow you to update/interface with them across activity restarts.
SEE: SharedPreferences
I have some problem with printing image to AGPtEK 58mm Mini Bluetooth Pocket POS Thermal Receipt Printer. I am trying to convert webview content into image (this is working fine) and after that I want to print it with this printer but it prints only solid black background. Here is my code:
public class PrintDemo extends Activity {
private static final int REQUEST_ENABLE_BT = 2;
private static final int REQUEST_CONNECT_DEVICE = 1;
private static final int PERMISSIONS_REQUEST_BLUETOOTH = 1;
private static final String TAG_REQUEST_PERMISSION = "Request permission";
private static final int PERMISSIONS_REQUEST_INTERNET = 0;
private static final int PERMISSIONS_REQUEST_BT_ADMIN = 2;
private static final int PERMISSIONS_REQUEST_LOCATION = 3;
private static final String WEB_SITE = "Remembered Web Site";
private static final String IS_CHECKED = "Check box";
#Bind(R.id.btn_search)
Button btnSearch;
#Bind(R.id.btn_print)
Button btnSendDraw;
#Bind(R.id.btn_open)
Button btnSend;
#Bind(R.id.btn_close)
Button btnClose;
private final Handler mHandler = new Handler() {
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case BluetoothService.MESSAGE_STATE_CHANGE:
switch (msg.arg1) {
case BluetoothService.STATE_CONNECTED:
Toast.makeText(getApplicationContext(), "Connect successful",
Toast.LENGTH_SHORT).show();
btnClose.setEnabled(true);
btnSend.setEnabled(true);
btnSendDraw.setEnabled(true);
break;
case BluetoothService.STATE_CONNECTING:
Log.d("State", "Connecting...");
break;
case BluetoothService.STATE_LISTEN:
case BluetoothService.STATE_NONE:
Log.d("State", "Not found");
break;
}
break;
case BluetoothService.MESSAGE_CONNECTION_LOST:
Toast.makeText(getApplicationContext(), "Device connection was lost",
Toast.LENGTH_SHORT).show();
btnClose.setEnabled(false);
btnSend.setEnabled(true);
btnSendDraw.setEnabled(false);
break;
case BluetoothService.MESSAGE_UNABLE_CONNECT:
Toast.makeText(getApplicationContext(), "Unable to connect device",
Toast.LENGTH_SHORT).show();
break;
}
}
};
String path;
File dir;
File file;
#Bind(R.id.check_box)
CheckBox checkBox;
#Bind(R.id.txt_content)
EditText edtContext;
#Bind(R.id.web_view)
WebView webView;
BluetoothService mService;
BluetoothDevice con_dev;
private SharedPreferences sharedPref;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fabric.with(this, new Crashlytics());
setContentView(R.layout.main);
ButterKnife.bind(this);
mService = new BluetoothService(this, mHandler);
if (!mService.isAvailable()) {
Toast.makeText(this, "Bluetooth is not available", Toast.LENGTH_LONG).show();
finish();
}
webView.getSettings().setJavaScriptEnabled(true);
webView.getSettings().setDefaultTextEncodingName("utf-8");
webView.setWebViewClient(new WebViewClient() {
#SuppressLint("SdCardPath")
#Override
public void onPageFinished(final WebView view, String url) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
siteToImage(view);
}
}, 5000);
}
});
sharedPref = this.getPreferences(Context.MODE_PRIVATE);
checkPermissions();
}
private void siteToImage(WebView view) {
Picture picture = view.capturePicture();
Bitmap b = Bitmap.createBitmap(
picture.getWidth(), picture.getHeight(), Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
picture.draw(c);
FileOutputStream fos;
try {
path = Environment.getExternalStorageDirectory().toString();
dir = new File(path, "/PrintDemo/media/img/");
if (!dir.isDirectory()) {
dir.mkdirs();
}
String arquivo = "darf_" + System.currentTimeMillis() + ".jpg";
file = new File(dir, arquivo);
fos = new FileOutputStream(file);
String imagePath = file.getAbsolutePath();
//scan the image so show up in album
MediaScannerConnection.scanFile(PrintDemo.this, new String[]{imagePath},
null, new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
}
});
if (fos != null) {
b.compress(Bitmap.CompressFormat.JPEG, 90, fos);
fos.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void setRememberedWeb() {
if (checkBox.isChecked()) {
String rememberedWeb = sharedPref.getString(WEB_SITE, "");
if (!rememberedWeb.equals("")) {
edtContext.setText(rememberedWeb);
}
}
}
#Override
protected void onPause() {
super.onPause();
saveState(checkBox.isChecked());
}
#Override
protected void onResume() {
super.onResume();
checkBox.setChecked(load());
setRememberedWeb();
}
private void saveState(boolean isChecked) {
SharedPreferences.Editor editor = sharedPref.edit();
editor.putBoolean(IS_CHECKED, isChecked);
if (isChecked) {
editor.putString(WEB_SITE, edtContext.getText().toString());
} else {
editor.putString(WEB_SITE, getString(R.string.txt_content));
}
editor.apply();
}
private boolean load() {
return sharedPref.getBoolean(IS_CHECKED, false);
}
private boolean checkPermissions() {
int permissionCheck =
ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH);
int permissionInternet =
ContextCompat.checkSelfPermission(this, Manifest.permission.INTERNET);
int permissionBTAdmin =
ContextCompat.checkSelfPermission(this, Manifest.permission.BLUETOOTH_ADMIN);
int permissionLocation =
ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION);
if (permissionCheck == PackageManager.PERMISSION_DENIED) {
edtContext.setText(R.string.no_bluetooth_permissions);
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.BLUETOOTH)) {
Toast.makeText(PrintDemo.this, TAG_REQUEST_PERMISSION, Toast.LENGTH_SHORT).show();
} else {
requestBTPermission();
}
return false;
} else if (permissionInternet == PackageManager.PERMISSION_DENIED) {
edtContext.setText(R.string.no_internet_permissions);
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.INTERNET)) {
Toast.makeText(PrintDemo.this, TAG_REQUEST_PERMISSION, Toast.LENGTH_SHORT).show();
} else {
requestInternetPermission();
}
return false;
} else if (permissionBTAdmin == PackageManager.PERMISSION_DENIED) {
edtContext.setText(R.string.no_bt_admin_permissions);
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.INTERNET)) {
Toast.makeText(PrintDemo.this, TAG_REQUEST_PERMISSION, Toast.LENGTH_SHORT).show();
} else {
requestBTAdminPermission();
}
return false;
} else if (permissionLocation == PackageManager.PERMISSION_DENIED) {
edtContext.setText(R.string.no_location_permissions);
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.ACCESS_COARSE_LOCATION)) {
Toast.makeText(PrintDemo.this, TAG_REQUEST_PERMISSION, Toast.LENGTH_SHORT).show();
} else {
requestLocationPermission();
}
return false;
} else {
return true;
}
}
private void requestLocationPermission() {
ActivityCompat
.requestPermissions(this, new String[]{Manifest.permission.ACCESS_COARSE_LOCATION},
PERMISSIONS_REQUEST_LOCATION);
}
private void requestBTAdminPermission() {
ActivityCompat
.requestPermissions(this, new String[]{Manifest.permission.BLUETOOTH_ADMIN},
PERMISSIONS_REQUEST_BT_ADMIN);
}
private void requestInternetPermission() {
ActivityCompat
.requestPermissions(this, new String[]{Manifest.permission.INTERNET},
PERMISSIONS_REQUEST_INTERNET);
}
private void requestBTPermission() {
ActivityCompat
.requestPermissions(this, new String[]{Manifest.permission.BLUETOOTH},
PERMISSIONS_REQUEST_BLUETOOTH);
}
#Override
public void onStart() {
super.onStart();
if (!mService.isBTopen()) {
Intent enableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
}
try {
btnSendDraw = (Button) this.findViewById(R.id.btn_print);
btnSendDraw.setOnClickListener(new ClickEvent());
btnSearch = (Button) this.findViewById(R.id.btn_search);
btnSearch.setOnClickListener(new ClickEvent());
btnSend = (Button) this.findViewById(R.id.btn_open);
btnSend.setOnClickListener(new ClickEvent());
btnClose = (Button) this.findViewById(R.id.btn_close);
btnClose.setOnClickListener(new ClickEvent());
edtContext = (EditText) findViewById(R.id.txt_content);
btnClose.setEnabled(false);
btnSend.setEnabled(true);
btnSendDraw.setEnabled(false);
} catch (Exception ex) {
Log.e("TAG", ex.getMessage());
}
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mService != null)
mService.stop();
mService = null;
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case REQUEST_ENABLE_BT:
if (resultCode == Activity.RESULT_OK) {
Toast.makeText(this, "Bluetooth open successful", Toast.LENGTH_LONG).show();
} else {
finish();
}
break;
case REQUEST_CONNECT_DEVICE:
if (resultCode == Activity.RESULT_OK) {
String address = data.getExtras()
.getString(DeviceListActivity.EXTRA_DEVICE_ADDRESS);
con_dev = mService.getDevByMac(address);
mService.connect(con_dev);
}
break;
}
}
#SuppressLint("SdCardPath")
private void printImage() {
byte[] sendData;
PrintPic pg = new PrintPic();
pg.initCanvas(384);
pg.initPaint();
pg.drawImage(0, 0, file.getPath());
sendData = pg.printDraw();
mService.write(sendData);
}
public void downloadContent() {
if (!edtContext.getText().toString().equals("") && !edtContext.getText().toString().equals("https://")) {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(edtContext.getText().toString())
.build();
HttpService service = retrofit.create(HttpService.class);
Call<ResponseBody> result = service.getContent();
result.enqueue(new Callback<ResponseBody>() {
#Override
public void onResponse(Response<ResponseBody> response) {
try {
if (response.body() != null) {
String summary = response.body().string();
webView.loadData(summary, "text/html; charset=utf-8", null);
}
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void onFailure(Throwable t) {
}
});
}
}
public interface HttpService {
#GET("/")
Call<ResponseBody> getContent();
}
class ClickEvent implements View.OnClickListener {
public void onClick(View v) {
if (v == btnSearch) {
Intent serverIntent = new Intent(PrintDemo.this, DeviceListActivity.class);
startActivityForResult(serverIntent, REQUEST_CONNECT_DEVICE);
} else if (v == btnSend) {
downloadContent();
} else if (v == btnClose) {
mService.stop();
} else if (v == btnSendDraw) {
printImage();
}
}
}
}
The result is almost what I want you can see by yourself, but the printed image is not clear:
I fixed it guys, this was the problem, the method siteToImage(). Here are the changes I hope it will helps someone:
private void siteToImage() {
webView.measure(View.MeasureSpec.makeMeasureSpec(
View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED),
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
webView.setDrawingCacheEnabled(true);
webView.buildDrawingCache();
Bitmap b = Bitmap.createBitmap(webView.getMeasuredWidth(),
webView.getMeasuredHeight(), Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
Paint paint = new Paint();
int iHeight = b.getHeight();
c.drawBitmap(b, 0, iHeight, paint);
webView.draw(c);
FileOutputStream fos;
try {
path = Environment.getExternalStorageDirectory().toString();
dir = new File(path, "/PrintDemo/media/img/");
if (!dir.isDirectory()) {
dir.mkdirs();
}
String arquivo = "darf_" + System.currentTimeMillis() + ".jpg";
file = new File(dir, arquivo);
fos = new FileOutputStream(file);
String imagePath = file.getAbsolutePath();
//scan the image so show up in album
MediaScannerConnection.scanFile(PrintDemo.this, new String[]{imagePath},
null, new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
}
});
b.compress(Bitmap.CompressFormat.PNG, 50, fos);
fos.flush();
fos.close();
b.recycle();
} catch (Exception e) {
e.printStackTrace();
}
}
I am new to opencv and trying to create a simple application which will open the camera and capture the photo. I have implemented the CvCameraViewListener interface for this purpose. My code looks as follows:
MainActivity.java
public class MainActivity extends Activity implements CvCameraViewListener2{
public String TAG = "MainActivity";
private int mCameraIndex;
private Mat mBgr;
private Boolean mIsPhotoPending;
private CameraBridgeViewBase mCameraView;
private static final String STATE_CAMERA_INDEX = "cameraIndex";
private Boolean mIsMenuLocked;
private CameraBridgeViewBase.CvCameraViewFrame inputFrame;
int screen_w, screen_h;
private Mat gray, frame, lowRes;
static {
if (!OpenCVLoader.initDebug()) {
Log.v("MainActivity","Loading of OpenCv Failed");
}
}
private BaseLoaderCallback mLoaderCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status) {
case LoaderCallbackInterface.SUCCESS:
{
String TAG = "";
Log.i(TAG, "Open CV successfully loaded");
mCameraView.enableView();
mBgr = new Mat();
}break;
default:
{
super.onManagerConnected(status);
}break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if(savedInstanceState != null)
{
mCameraIndex = savedInstanceState.getInt(STATE_CAMERA_INDEX, 0);
}
else
{
mCameraIndex = 0;
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD)
{
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
}
mCameraView = new NativeCameraView(this, mCameraIndex);
//mCameraView.setCvCameraViewListener(this);
findViewById(R.id.HelloOpenCvView);
//mOpenCvCameraView = new JavaCameraView(this,-1);
setContentView(mCameraView);
}
#Override
public void onResume() {
super.onResume();
mLoaderCallBack.onManagerConnected(LoaderCallbackInterface.SUCCESS);
// OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallBack);
mIsMenuLocked = false;
}
#Override
public void onPause() {
super.onPause();
if(mCameraView != null)
mCameraView.disableView();
}
public void onDestroy() {
super.onDestroy();
if(mCameraView != null)
mCameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat rgba = inputFrame.rgba();
if(mIsPhotoPending)
{
takePhoto(rgba);
}
return rgba;
}
private void takePhoto(Mat rgba)
{
//get the path of the photo
final long currentTimeMillis = System.currentTimeMillis();
final String appName = getString(R.string.app_name);
final String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).toString();
final String albumPath = galleryPath + "/" + appName;
final String photoPath = albumPath + "/" + currentTimeMillis + ".png";
final ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, photoPath);
values.put(MediaStore.Images.Media.MIME_TYPE, showActivity.PHOTO_MIME_TYPE);
values.put(MediaStore.Images.Media.TITLE, appName);
values.put(MediaStore.Images.Media.DESCRIPTION, appName);
values.put(MediaStore.Images.Media.DATE_TAKEN, currentTimeMillis);
//check if the album directory exists
File album = new File(albumPath);
if(!album.isDirectory() && !album.mkdirs())
{
Log.e(TAG,"Failed to create album directory at" + albumPath);
return;
}
//try to create the photo
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
if(!Highgui.imwrite(photoPath, mBgr))
{
Log.d(TAG,"Photo saved successfully");
onTakePhotoFailed();
}
Log.d(TAG, "Photo saved successfully");
//insert photo in mediastore
Uri uri;
final Intent intent = new Intent();
try
{
uri = getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
intent.putExtra(showActivity.EXTRA_PHOTO_URI, uri);
}catch(final Exception e)
{
Log.e(TAG, "Failed to insert photo into media store");
e.printStackTrace();
}
//delete the photo because insertion failed
File photo = new File(photoPath);
if(!photo.delete())
{
Log.e(TAG, "Failed to delete non-inserted photo");
}
onTakePhotoFailed();
intent.putExtra(showActivity.EXTRA_PHOTO_DATA_PATH, photoPath);
startActivity(intent);
return;
}
private void onTakePhotoFailed()
{
mIsMenuLocked = false;
//display error message
final String errorMessage = getString(R.string.photo_error_message);
runOnUiThread(new Runnable()
{
#Override
public void run()
{
Toast.makeText(MainActivity.this, errorMessage, Toast.LENGTH_SHORT).show();
}
});
}
My problem is that the onCameraFrame() method is never being called which in turn does not call the takePhoto() method and I am not able to capture the photo. I have called the takePhoto() method within the onCamerFrame() method as the method will take the Mat details. Kindly let me know where did I go wrong.
Any help would be highly appreciated.
You've commented out the camera listener. That's why onCameraFrame() is never called. Uncomment this in onCreate():
mCameraView.setCvCameraViewListener(this);
You may need to implement the PictureCallBack interface your activity. Refer the Tutorial 3 - Camera Control App.
I need to get the orientation of a device. The screen orientation of the device is fixed as portrait.I have used the following code but it doesn't seem to work.I have made changes in the manifest file
due to which
getResources().getConfiguration().orientation;
always gives the same value
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback,SensorEventListener{
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String PACKAGE_NAME = "com.google.zxing.client.android";
private static final String PRODUCT_SEARCH_URL_PREFIX = "http://www.google";
private static final String PRODUCT_SEARCH_URL_SUFFIX = "/m/products/scan";
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Set<ResultMetadataType> DISPLAYABLE_METADATA_TYPES =
EnumSet.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
//private TextView statusView;
//private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType,?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
private int orientation;
private SensorManager mSensorManager;
private Sensor mAccelerometer;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
//showHelpOnFirstLaunch();
}
#Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
WindowManager manager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE);
Display display = manager.getDefaultDisplay();
int orientaionWidth = display.getWidth();
int orientaionHeight = display.getHeight();
int rotation=display.getRotation();
int orien=getResources().getConfiguration().orientation;
boolean orientation = false;
if(orientaionWidth>orientaionHeight){
orientation=true;
}
if(orien==1){
setLandscape(true);
}else{
setLandscape(false);
}
handler = null;
lastResult = null;
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
// Scan the formats the intent requested, and return the result to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
} else if (dataString != null &&
dataString.contains(PRODUCT_SEARCH_URL_PREFIX) &&
dataString.contains(PRODUCT_SEARCH_URL_SUFFIX)) {
// Scan only products and send the result to mobile Product Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
}
#Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
super.onPause();
}
#Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK) {
if (requestCode == HISTORY_REQUEST_CODE) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(),
scaleFactor * a.getY(),
scaleFactor * b.getX(),
scaleFactor * b.getY(),
paint);
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
/**
* We want the help screen to be shown automatically the first time a new version of the app is
* run. The easiest way to do this is to check android:versionCode from the manifest, and compare
* it to a value stored as a preference.
*/
private boolean showHelpOnFirstLaunch() {
/* try {
PackageInfo info = getPackageManager().getPackageInfo(PACKAGE_NAME, 0);
int currentVersion = info.versionCode;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
int lastVersion = prefs.getInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, 0);
if (currentVersion > lastVersion) {
prefs.edit().putInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, currentVersion).commit();
Intent intent = new Intent(this, HelpActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
// Show the default page on a clean install, and the what's new page on an upgrade.
String page = lastVersion == 0 ? HelpActivity.DEFAULT_PAGE : HelpActivity.WHATS_NEW_PAGE;
intent.putExtra(HelpActivity.REQUESTED_PAGE_KEY, page);
startActivity(intent);
return true;
}
} catch (PackageManager.NameNotFoundException e) {
Log.w(TAG, e);
}*/
return false;
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
public void setLandscape(boolean orientation) {
viewfinderView.setLandscape(orientation);
}
#Override
public void onAccuracyChanged(Sensor arg0, int arg1) {
// TODO Auto-generated method stub
}
#Override
public void onSensorChanged(SensorEvent arg0) {
if (arg0.values[1]<6.5 && arg0.values[1]>-6.5) {
if (orientation!=1) {
Log.d("Sensor", "Landscape");
}
orientation=1;
} else {
if (orientation!=0) {
Log.d("Sensor", "Portrait");
}
orientation=0;
}
}
}
You can get the current orientation through
Activity.getResources().getConfiguration().orientation
or
getActivity().getResources().getConfiguration().orientation
this should work...
int orientation = getResources().getConfiguration().orientation;
if(orientation == Configuration.ORIENTATION_PORTRAIT) {
Log.i(TAG, "Portrait");
} else if(orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.i(TAG, "LandScape");
}
It should be
Display display = ((WindowManager) getSystemService(WINDOW_SERVICE)).getDefaultDisplay();
/* Now we can retrieve all display-related infos */
int width = display.getWidth();
int height = display.getHeight();
int rotation = display.getRotation();
Also, try with Activity().getResources().getConfiguration().orientation
By setting the orientation to be fixed to portrait in the manifest, you cant use getResources().getConfiguration().orientation as you already know.
Try using the accelerometer to determine the current rotation/tilt of the device as described here, How do I use the Android Accelerometer?