I need to get the orientation of a device. The screen orientation of the device is fixed as portrait.I have used the following code but it doesn't seem to work.I have made changes in the manifest file
due to which
getResources().getConfiguration().orientation;
always gives the same value
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback,SensorEventListener{
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final long DEFAULT_INTENT_RESULT_DURATION_MS = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String PACKAGE_NAME = "com.google.zxing.client.android";
private static final String PRODUCT_SEARCH_URL_PREFIX = "http://www.google";
private static final String PRODUCT_SEARCH_URL_SUFFIX = "/m/products/scan";
private static final String[] ZXING_URLS = { "http://zxing.appspot.com/scan", "zxing://scan/" };
public static final int HISTORY_REQUEST_CODE = 0x0000bacc;
private static final Set<ResultMetadataType> DISPLAYABLE_METADATA_TYPES =
EnumSet.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager cameraManager;
private CaptureActivityHandler handler;
private Result savedResultToShow;
private ViewfinderView viewfinderView;
//private TextView statusView;
//private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private IntentSource source;
private String sourceUrl;
private ScanFromWebPageManager scanFromWebPageManager;
private Collection<BarcodeFormat> decodeFormats;
private Map<DecodeHintType,?> decodeHints;
private String characterSet;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private AmbientLightManager ambientLightManager;
private int orientation;
private SensorManager mSensorManager;
private Sensor mAccelerometer;
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
CameraManager getCameraManager() {
return cameraManager;
}
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
ambientLightManager = new AmbientLightManager(this);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
//showHelpOnFirstLaunch();
}
#Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
cameraManager = new CameraManager(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
viewfinderView.setCameraManager(cameraManager);
WindowManager manager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE);
Display display = manager.getDefaultDisplay();
int orientaionWidth = display.getWidth();
int orientaionHeight = display.getHeight();
int rotation=display.getRotation();
int orien=getResources().getConfiguration().orientation;
boolean orientation = false;
if(orientaionWidth>orientaionHeight){
orientation=true;
}
if(orien==1){
setLandscape(true);
}else{
setLandscape(false);
}
handler = null;
lastResult = null;
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
beepManager.updatePrefs();
ambientLightManager.start(cameraManager);
inactivityTimer.onResume();
Intent intent = getIntent();
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true)
&& (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true));
source = IntentSource.NONE;
decodeFormats = null;
characterSet = null;
if (intent != null) {
String action = intent.getAction();
String dataString = intent.getDataString();
if (Intents.Scan.ACTION.equals(action)) {
// Scan the formats the intent requested, and return the result to the calling activity.
source = IntentSource.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
decodeHints = DecodeHintManager.parseDecodeHints(intent);
if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) {
int width = intent.getIntExtra(Intents.Scan.WIDTH, 0);
int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0);
if (width > 0 && height > 0) {
cameraManager.setManualFramingRect(width, height);
}
}
String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE);
} else if (dataString != null &&
dataString.contains(PRODUCT_SEARCH_URL_PREFIX) &&
dataString.contains(PRODUCT_SEARCH_URL_SUFFIX)) {
// Scan only products and send the result to mobile Product Search.
source = IntentSource.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (isZXingURL(dataString)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = IntentSource.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(dataString);
scanFromWebPageManager = new ScanFromWebPageManager(inputUri);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
// Allow a sub-set of the hints to be specified by the caller.
decodeHints = DecodeHintManager.parseDecodeHints(inputUri);
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
}
}
#Override
protected void onPause() {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
inactivityTimer.onPause();
ambientLightManager.stop();
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
super.onPause();
}
#Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (resultCode == RESULT_OK) {
if (requestCode == HISTORY_REQUEST_CODE) {
int itemNumber = intent.getIntExtra(Intents.History.ITEM_NUMBER, -1);
if (itemNumber >= 0) {
HistoryItem historyItem = historyManager.buildHistoryItem(itemNumber);
decodeOrStoreSavedBitmap(null, historyItem.getResult());
}
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(),
scaleFactor * a.getY(),
scaleFactor * b.getX(),
scaleFactor * b.getY(),
paint);
}
}
private void sendReplyMessage(int id, Object arg, long delayMS) {
Message message = Message.obtain(handler, id, arg);
if (delayMS > 0L) {
handler.sendMessageDelayed(message, delayMS);
} else {
handler.sendMessage(message);
}
}
/**
* We want the help screen to be shown automatically the first time a new version of the app is
* run. The easiest way to do this is to check android:versionCode from the manifest, and compare
* it to a value stored as a preference.
*/
private boolean showHelpOnFirstLaunch() {
/* try {
PackageInfo info = getPackageManager().getPackageInfo(PACKAGE_NAME, 0);
int currentVersion = info.versionCode;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
int lastVersion = prefs.getInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, 0);
if (currentVersion > lastVersion) {
prefs.edit().putInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, currentVersion).commit();
Intent intent = new Intent(this, HelpActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
// Show the default page on a clean install, and the what's new page on an upgrade.
String page = lastVersion == 0 ? HelpActivity.DEFAULT_PAGE : HelpActivity.WHATS_NEW_PAGE;
intent.putExtra(HelpActivity.REQUESTED_PAGE_KEY, page);
startActivity(intent);
return true;
}
} catch (PackageManager.NameNotFoundException e) {
Log.w(TAG, e);
}*/
return false;
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (cameraManager.isOpen()) {
Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
cameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, decodeHints, characterSet, cameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
displayFrameworkBugMessageAndExit();
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
resetStatusView();
}
private void resetStatusView() {
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
public void setLandscape(boolean orientation) {
viewfinderView.setLandscape(orientation);
}
#Override
public void onAccuracyChanged(Sensor arg0, int arg1) {
// TODO Auto-generated method stub
}
#Override
public void onSensorChanged(SensorEvent arg0) {
if (arg0.values[1]<6.5 && arg0.values[1]>-6.5) {
if (orientation!=1) {
Log.d("Sensor", "Landscape");
}
orientation=1;
} else {
if (orientation!=0) {
Log.d("Sensor", "Portrait");
}
orientation=0;
}
}
}
You can get the current orientation through
Activity.getResources().getConfiguration().orientation
or
getActivity().getResources().getConfiguration().orientation
this should work...
int orientation = getResources().getConfiguration().orientation;
if(orientation == Configuration.ORIENTATION_PORTRAIT) {
Log.i(TAG, "Portrait");
} else if(orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.i(TAG, "LandScape");
}
It should be
Display display = ((WindowManager) getSystemService(WINDOW_SERVICE)).getDefaultDisplay();
/* Now we can retrieve all display-related infos */
int width = display.getWidth();
int height = display.getHeight();
int rotation = display.getRotation();
Also, try with Activity().getResources().getConfiguration().orientation
By setting the orientation to be fixed to portrait in the manifest, you cant use getResources().getConfiguration().orientation as you already know.
Try using the accelerometer to determine the current rotation/tilt of the device as described here, How do I use the Android Accelerometer?
Related
I have a background service which is an accessibility service (don't think it makes a difference). I want to launch a new activity from this service. I have tried using this suggested solution:
Intent dialogIntent = new Intent(this, ScreenshotActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
getApplicationContext().startActivity(dialogIntent);
However, this does not seem to work. My logcat gives this output but the activity does not launch:
020-03-20 20:54:02.824 22977-22977/com.xx.xx I/Timeline: Timeline: Activity_launch_request time:19482980
My service and activity work properly independently. But not in this situation. The point from where I am making this call is reached as well. The device I am using for testing is running Android 9.
Edit: Main service code:
public class MyAccessibility extends AccessibilityService {
public static MyAccessibility instance;
#Override
public void onCreate() {
super.onCreate();
Log.i("myLog", "create accessibility");
}
#Override
public void onAccessibilityEvent(AccessibilityEvent accessibilityEvent) {
Log.i("myLog", "Event");
if (instance == null) {
Log.i("myLog", "Instance set to not null");
instance = this;
}
}
#Override
public void onInterrupt() {
}
#Override
protected void onServiceConnected() {
super.onServiceConnected();
AccessibilityServiceInfo info = new AccessibilityServiceInfo();
info.eventTypes = AccessibilityEvent.TYPES_ALL_MASK;
info.notificationTimeout = 100;
info.feedbackType = AccessibilityServiceInfo.FEEDBACK_ALL_MASK;
this.setServiceInfo(info);
Toast t = Toast.makeText(getApplicationContext(), "Accessibility Service is connected now", Toast.LENGTH_SHORT);
t.show();
System.out.println("Accessibility was connected!");
instance = this;
}
public void takeSS(){
Intent dialogIntent = new Intent(this, ScreenshotActivity.class);
dialogIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
this.startActivity(dialogIntent);
}
There are some other methods which uses dispatchGesture. That is why I have a static reference to this and I can access these methods from elsewhere. The screenshot activity uses media projection to take the screenshot and use the image:
public class ScreenshotActivity extends Activity {
private static final int REQUEST_MEDIA_PROJECTION = 1;
private static final String TAG = "ScreenshotActivity";
private MediaProjectionManager mProjectionManager;
private MediaProjection mMediaProjection = null;
private VirtualDisplay mVirtualDisplay;
private ImageReader mImageReader;
private static final int MAX_IMAGE_BUFFER = 10;
private int counter;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_screenshot_dummy);
counter = 0;
OrientationChangedListener mOrientationChangedListener = new OrientationChangedListener(this);
mOrientationChangedListener.enable();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mProjectionManager = (MediaProjectionManager)getSystemService(MEDIA_PROJECTION_SERVICE);
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_MEDIA_PROJECTION);
}
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void onActivityResult(int requestCode, int resultCode, Intent resultData) {
super.onActivityResult(requestCode, resultCode, resultData);
if (requestCode == REQUEST_MEDIA_PROJECTION) {
String message;
if (resultCode != Activity.RESULT_OK) {
message = "Media Projection Declined";
mMediaProjection = null;
} else {
message = "Media Projection Accepted";
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, resultData);
attachImageCaptureOverlay();
}
Toast toast = Toast.makeText(this, message, Toast.LENGTH_SHORT);
toast.show();
}
}
private class OrientationChangedListener extends OrientationEventListener {
int mLastOrientation = -1;
OrientationChangedListener(Context context) {
super(context);
}
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void onOrientationChanged(int orientation) {
final int screenOrientation = getWindowManager().getDefaultDisplay().getRotation();
if (mVirtualDisplay == null) return;
if (mLastOrientation == screenOrientation) return;
mLastOrientation = screenOrientation;
detachImageCaptureOverlay();
attachImageCaptureOverlay();
}
}
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null || image.getPlanes().length <= 0) return;
final Image.Plane plane = image.getPlanes()[0];
final int rowPadding = plane.getRowStride() - plane.getPixelStride() * image.getWidth();
final int bitmapWidth = image.getWidth() + rowPadding / plane.getPixelStride();
final Bitmap tempBitmap = Bitmap.createBitmap(bitmapWidth, image.getHeight(), Bitmap.Config.ARGB_8888);
tempBitmap.copyPixelsFromBuffer(plane.getBuffer());
Rect cropRect = image.getCropRect();
final Bitmap bitmap = Bitmap.createBitmap(tempBitmap, cropRect.left, cropRect.top, cropRect.width(), cropRect.height());
//Do something with the bitmap
image.close();
}
};
#RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private void attachImageCaptureOverlay() {
if (mMediaProjection == null) return;
final DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
mImageReader = ImageReader.newInstance(metrics.widthPixels, metrics.heightPixels, PixelFormat.RGBA_8888, MAX_IMAGE_BUFFER);
mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenCaptureTest",
metrics.widthPixels, metrics.heightPixels, metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null, null);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
}
#RequiresApi(api = Build.VERSION_CODES.KITKAT)
private void detachImageCaptureOverlay() {
mVirtualDisplay.release();
mImageReader.close();
}
}
You can't start activity from background since Android Oreo.
One workaround is to call startForeground(true) when your service starts, then add sticky Notification to your service with appropriate action(starting your desired activity via PendingIntent).
My app consist Image Capture Face Recognition what i need to implement is converting the byte Array image into bitmap , and pass the bitmap to next Activity, The next Activity will receive the bitmap and show it in an image-view , than convert the bitmap into base 64.
I Don't know how to implement above task into my existing codes.
So when image is captured it shows, once click to save button pass the image to next activity and again convert the bitmap into base64.
FaceTrackerAcivity.java
public final class FaceTrackerActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG = "FaceTracker";
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private ImageView btnCapture;
private ImageView btnChangeCamera;
private ImageView btnCancel;
private ImageView btnSave;
private FrameLayout frmResult;
private ImageView imgPicture;
Bitmap bmp;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private static final int RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM = 3;
private int cameraId = CameraSource.CAMERA_FACING_BACK;
Intent x;
//==============================================================================================
// Activity Methods
//==============================================================================================
/**
* Initializes the UI and initiates the creation of a face detector.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
btnCapture = (ImageView) findViewById(R.id.btn_capture);
btnChangeCamera = (ImageView) findViewById(R.id.btn_change_camera);
btnCancel = (ImageView) findViewById(R.id.btn_cancel);
btnSave = (ImageView) findViewById(R.id.btn_save);
frmResult = (FrameLayout) findViewById(R.id.frm_capture_result);
imgPicture = (ImageView) findViewById(R.id.img_capture_result);
boolean hasPermissionCamera = (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED);
if (!hasPermissionCamera) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, RC_HANDLE_CAMERA_PERM);
} else {
boolean hasPermissionWriteStorage = (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED);
if (!hasPermissionWriteStorage) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM);
} else {
createCameraSource(cameraId);
}
}
btnCapture.setOnClickListener(this);
btnChangeCamera.setOnClickListener(this);
}
private void createCameraSource(int cameraId) {
Context context = getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context)
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
.build());
if (!detector.isOperational()) {
Log.w(TAG, "Face detector dependencies are not yet available.");
}
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(metrics.heightPixels, metrics.widthPixels)
.setFacing(cameraId)
.setAutoFocusEnabled(true)
.setRequestedFps(30.0f)
.build();
}
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
switch (requestCode) {
case RC_HANDLE_CAMERA_PERM: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
finish();
startActivity(getIntent());
}
break;
}
case RC_HANDLE_WRITE_EXTERNAL_STORAGE_PERM: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
finish();
startActivity(getIntent());
}
break;
}
}
}
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg = GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
#Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.btn_capture) {
Toast.makeText(this, "capture", Toast.LENGTH_SHORT).show();
mCameraSource.takePicture(null, new CameraSource.PictureCallback() {
#Override
public void onPictureTaken(final byte[] bytes) {
int orientation = Exif.getOrientation(bytes);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 2;
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, options);
switch (orientation) {
case 90:
bmp = Utils.rotateImage(bmp, 90);
break;
case 180:
bmp = Utils.rotateImage(bmp, 180);
break;
case 270:
bmp = Utils.rotateImage(bmp, 270);
break;
}
if (cameraId == CameraSource.CAMERA_FACING_FRONT) {
bmp = Utils.flip(bmp, Constants.FLIP_HORIZONTAL);
}
if (bmp != null) {
frmResult.setVisibility(View.VISIBLE);
imgPicture.setImageBitmap(bmp);
btnCancel.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
frmResult.setVisibility(View.GONE);
}
});
btnSave.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
SavePhotoTask savePhotoTask = new SavePhotoTask();
savePhotoTask.execute(bytes);
}
});
}
}
});
} else if (id == R.id.btn_change_camera) {
mCameraSource.release();
mCameraSource = null;
cameraId = cameraId == CameraSource.CAMERA_FACING_BACK ? CameraSource.CAMERA_FACING_FRONT : CameraSource.CAMERA_FACING_BACK;
createCameraSource(cameraId);
startCameraSource();
}
}
private class GraphicFaceTrackerFactory implements MultiProcessor.Factory<Face> {
#Override
public Tracker<Face> create(Face face) {
return new GraphicFaceTracker(mGraphicOverlay);
}
}
private class GraphicFaceTracker extends Tracker<Face> {
private GraphicOverlay mOverlay;
private FaceGraphic mFaceGraphic;
GraphicFaceTracker(GraphicOverlay overlay) {
mOverlay = overlay;
mFaceGraphic = new FaceGraphic(overlay);
}
#Override
public void onNewItem(int faceId, Face item) {
mFaceGraphic.setId(faceId);
}
#Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
mOverlay.add(mFaceGraphic);
mFaceGraphic.updateFace(face);
}
#Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
mOverlay.remove(mFaceGraphic);
}
#Override
public void onDone() {
mOverlay.remove(mFaceGraphic);
}
}
class SavePhotoTask extends AsyncTask<byte[], String, String> {
#Override
protected String doInBackground(byte[]... data) {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "TRACKER_" + timeStamp + "_";
File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES);
try {
File photo = File.createTempFile(imageFileName, ".jpg", storageDir);
FileOutputStream fos = new FileOutputStream(photo.getPath());
fos.write(data[0]);
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}}
NEXT.java
public class NEXT extends AppCompatActivity {
ImageView y;
Intent intent;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.next);
y = (ImageView) findViewById(R.id.myimage);
Bitmap bitmap = (Bitmap) intent.getParcelableExtra("key");
y.setImageBitmap(bitmap);
}}
Putin large data (>1Mo) can produce android.os.TransactionTooLargeException, ref :
The Binder transaction buffer has a limited fixed size, currently
1Mb, which is shared by all transactions in progress for the
process. Consequently this exception can be thrown when there are many
transactions in progress even when most of the individual transactions
are of moderate size.
The best way is to create a class and put your image into it, you can then access to it from any activity
hi everyone i have a sample screenshot project from (#commonsware) using media project to perform a screen shot in any screen (running in foreground service mode with notification)
however its not taking any picture and just beeping on button click
also my approach is to change the directory but don't know how
i need to change it cause i want to load all images in a recyclerview inside app
any help will appreciated
here's the whole service code:
public class ScreenShotService extends Service {
private static final int NOTIFY_ID = 9906;
static final String EXTRA_RESULT_CODE = "resultCode";
static final String EXTRA_RESULT_INTENT = "resultIntent";
static final String ACTION_RECORD = BuildConfig.APPLICATION_ID + ".RECORD";
static final String ACTION_SHUTDOWN = BuildConfig.APPLICATION_ID + ".SHUTDOWN";
static final int VIRT_DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC;
private MediaProjection projection;
private VirtualDisplay vdisplay;
final private HandlerThread handlerThread = new HandlerThread(getClass().getSimpleName(), android.os.Process.THREAD_PRIORITY_BACKGROUND);
private Handler handler;
private WindowManager windowManager;
private MediaProjectionManager mediaProjectionManager;
private int resultCode;
private Intent resultData;
final private ToneGenerator beeper = new ToneGenerator(AudioManager.STREAM_NOTIFICATION, 100);
#Override
public void onCreate() {
super.onCreate();
mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
windowManager = (WindowManager)getSystemService(WINDOW_SERVICE);
handlerThread.start();
handler=new Handler(handlerThread.getLooper());
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent.getAction() == null) {
resultCode = intent.getIntExtra(EXTRA_RESULT_CODE, 1337);
resultData = intent.getParcelableExtra(EXTRA_RESULT_INTENT);
foregroundify();
}
else if (intent.getAction().equals(ACTION_RECORD)) {
if (resultData!=null) {
startCapture();
}
else {
Intent ui=
new Intent(this, Main.class)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(ui);
}
}
else if (intent.getAction().equals(ACTION_SHUTDOWN)) {
beeper.startTone(ToneGenerator.TONE_PROP_NACK);
stopForeground(true);
stopSelf();
}
return(START_NOT_STICKY);
}
#Override
public void onDestroy() {
stopCapture();
super.onDestroy();
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
throw new IllegalStateException("Binding not supported. Go away.");
}
WindowManager getWindowManager() {
return(windowManager);
}
Handler getHandler() {
return(handler);
}
void processImage(final byte[] png) {
new Thread() {
#Override
public void run() {File output=new File(getExternalFilesDir(null),
"screenshot.png");
try {
FileOutputStream fos=new FileOutputStream(output);
fos.write(png);
fos.flush();
fos.getFD().sync();
fos.close();
MediaScannerConnection.scanFile(ScreenShotService.this,
new String[] {output.getAbsolutePath()},
new String[] {"image/png"},
null);
}
catch (Exception e) {
Log.e(getClass().getSimpleName(), "Exception writing out screenshot", e);
}
}
}.start();
beeper.startTone(ToneGenerator.TONE_PROP_ACK);
stopCapture();
}
private void stopCapture() {
if (projection!=null) {
projection.stop();
vdisplay.release();
projection=null;
}
}
private void startCapture() {
projection = mediaProjectionManager.getMediaProjection(resultCode, resultData);
ImageTransmogrifier it = new ImageTransmogrifier(this);
MediaProjection.Callback cb = new MediaProjection.Callback() {
#Override
public void onStop() {
vdisplay.release();
}
};
vdisplay=projection.createVirtualDisplay("shooter",
it.getWidth(), it.getHeight(),
getResources().getDisplayMetrics().densityDpi,
VIRT_DISPLAY_FLAGS, it.getSurface(), null, handler);
projection.registerCallback(cb, handler);
}
private void foregroundify() {
NotificationCompat.Builder builder = new NotificationCompat.Builder(this);
builder.setAutoCancel(true)
.setDefaults(Notification.DEFAULT_ALL);
builder.setContentTitle(getString(R.string.app_name))
.setSmallIcon(R.drawable.rec_icon)
.setTicker(getString(R.string.app_name));
builder.addAction(R.drawable.ic_record_white_24dp,
getString(R.string.notify_record),
buildPendingIntent(ACTION_RECORD));
builder.addAction(R.drawable.ic_eject_white_24dp,
getString(R.string.notify_shutdown),
buildPendingIntent(ACTION_SHUTDOWN));
startForeground(NOTIFY_ID, builder.build());
}
private PendingIntent buildPendingIntent(String action) {
Intent i=new Intent(this, getClass());
i.setAction(action);
return(PendingIntent.getService(this, 0, i, 0));
}
}
and also imagetransmofrifier class:
public class ImageTransmogrifier implements ImageReader.OnImageAvailableListener {
private final int width;
private final int height;
private final ImageReader imageReader;
private final ScreenShotService svc;
private Bitmap latestBitmap=null;
ImageTransmogrifier(ScreenShotService svc) {
this.svc=svc;
Display display=svc.getWindowManager().getDefaultDisplay();
Point size=new Point();
display.getSize(size);
int width=size.x;
int height=size.y;
while (width*height > (2<<19)) {
width=width>>1;
height=height>>1;
}
this.width=width;
this.height=height;
imageReader=ImageReader.newInstance(width, height,
PixelFormat.RGBA_8888, 2);
imageReader.setOnImageAvailableListener(this, svc.getHandler());
}
#Override
public void onImageAvailable(ImageReader reader) {
final Image image=imageReader.acquireLatestImage();
if (image!=null) {
Image.Plane[] planes=image.getPlanes();
ByteBuffer buffer=planes[0].getBuffer();
int pixelStride=planes[0].getPixelStride();
int rowStride=planes[0].getRowStride();
int rowPadding=rowStride - pixelStride * width;
int bitmapWidth=width + rowPadding / pixelStride;
if (latestBitmap == null ||
latestBitmap.getWidth() != bitmapWidth ||
latestBitmap.getHeight() != height) {
if (latestBitmap != null) {
latestBitmap.recycle();
}
latestBitmap=Bitmap.createBitmap(bitmapWidth,
height, Bitmap.Config.ARGB_8888);
}
latestBitmap.copyPixelsFromBuffer(buffer);
if (image != null) {
image.close();
}
ByteArrayOutputStream baos=new ByteArrayOutputStream();
Bitmap cropped=Bitmap.createBitmap(latestBitmap, 0, 0,
width, height);
cropped.compress(Bitmap.CompressFormat.PNG, 100, baos);
byte[] newPng=baos.toByteArray();
svc.processImage(newPng);
}
}
Surface getSurface() {
return(imageReader.getSurface());
}
int getWidth() {
return(width);
}
int getHeight() {
return(height);
}
void close() {
imageReader.close();
}
}
It only works when your hit the record button.[![button][1]][1]. Then you'll be able to find in the directory that author suggested
public class Demo extends LayoutGameActivity implements SampleApplicationControl,SampleAppMenuInterface {
private static final int CAMERA_WIDTH = 720;
private static final int CAMERA_HEIGHT = 480;
private static final float AUTOWRAP_WIDTH = 720 - 50 - 50;
private EditText mEditText;
private Font mFont;
private Text mText;
private Line mRight;
private Line mLeft;
private static final String LOGTAG = "ImageTargets";
SampleApplicationSession vuforiaAppSession;
private DataSet mCurrentDataset;
private int mCurrentDatasetSelectionIndex = 0;
private int mStartDatasetsIndex = 0;
private int mDatasetsNumber = 0;
private ArrayList<String> mDatasetStrings = new ArrayList<String>();
// Our OpenGL view:
private SampleApplicationGLView mGlView;
// Our renderer:
private ImageTargetRenderer mRenderer;
private GestureDetector mGestureDetector;
// The textures we will use for rendering:
private Vector<Texture> mTextures;
private boolean mSwitchDatasetAsap = false;
private boolean mFlash = false;
private boolean mContAutofocus = false;
private boolean mExtendedTracking = false;
private View mFlashOptionView;
private RelativeLayout mUILayout;
private SampleAppMenu mSampleAppMenu;
LoadingDialogHandler loadingDialogHandler = new LoadingDialogHandler(this);
// Alert Dialog used to display SDK errors
private AlertDialog mErrorDialog;
boolean mIsDroidDevice = false;
// Called when the activity first starts or the user navigates back to an
// activity.
// Process Single Tap event to trigger autofocus
private class GestureListener extends
GestureDetector.SimpleOnGestureListener
{
// Used to set autofocus one second after a manual focus is triggered
private final Handler autofocusHandler = new Handler();
#Override
public boolean onDown(MotionEvent e)
{
return true;
}
#Override
public boolean onSingleTapUp(MotionEvent e)
{
// Generates a Handler to trigger autofocus
// after 1 second
autofocusHandler.postDelayed(new Runnable()
{
public void run()
{
boolean result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO);
if (!result)
Log.e("SingleTapUp", "Unable to trigger focus");
}
}, 1000L);
return true;
}
}
// We want to load specific textures from the APK, which we will later use
// for rendering.
private void loadTextures()
{
mTextures.add(Texture.loadTextureFromApk("TextureTeapotBrass.png",
getAssets()));
mTextures.add(Texture.loadTextureFromApk("TextureTeapotBlue.png",
getAssets()));
mTextures.add(Texture.loadTextureFromApk("TextureTeapotRed.png",
getAssets()));
mTextures.add(Texture.loadTextureFromApk("ImageTargets/Buildings.jpeg",
getAssets()));
}
// Called when the activity will start interacting with the user.
#Override
protected void onResume()
{
Log.d(LOGTAG, "onResume");
super.onResume();
vuforiaAppSession = new SampleApplicationSession(this);
startLoadingAnimation();
mDatasetStrings.add("StonesAndChips.xml");
mDatasetStrings.add("Tarmac.xml");
// mDatasetStrings.add("my_first.xml");
vuforiaAppSession
.initAR(this, ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
mGestureDetector = new GestureDetector(this, new GestureListener());
// Load any sample specific textures:
mTextures = new Vector<Texture>();
loadTextures();
mIsDroidDevice = android.os.Build.MODEL.toLowerCase().startsWith(
"droid");
// This is needed for some Droid devices to force portrait
if (mIsDroidDevice)
{
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
try
{
vuforiaAppSession.resumeAR();
} catch (SampleApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
// Resume the GL view:
if (mGlView != null)
{
mGlView.setVisibility(View.VISIBLE);
mGlView.onResume();
}
}
// Callback for configuration changes the activity handles itself
#Override
public void onConfigurationChanged(Configuration config)
{
Log.d(LOGTAG, "onConfigurationChanged");
super.onConfigurationChanged(config);
vuforiaAppSession.onConfigurationChanged();
}
// Called when the system is about to start resuming a previous activity.
#Override
protected void onPause()
{
Log.d(LOGTAG, "onPause");
super.onPause();
if (mGlView != null)
{
mGlView.setVisibility(View.INVISIBLE);
mGlView.onPause();
}
// Turn off the flash
if (mFlashOptionView != null && mFlash)
{
// OnCheckedChangeListener is called upon changing the checked state
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1)
{
((Switch) mFlashOptionView).setChecked(false);
} else
{
((CheckBox) mFlashOptionView).setChecked(false);
}
}
try
{
vuforiaAppSession.pauseAR();
} catch (SampleApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
}
// The final call you receive before your activity is destroyed.
#Override
protected void onDestroy()
{
Log.d(LOGTAG, "onDestroy");
super.onDestroy();
try
{
vuforiaAppSession.stopAR();
} catch (SampleApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
// Unload texture:
mTextures.clear();
mTextures = null;
System.gc();
}
// Initializes AR application components.
private void initApplicationAR()
{
// Create OpenGL ES view:
int depthSize = 16;
int stencilSize = 0;
boolean translucent = Vuforia.requiresAlpha();
mGlView = new SampleApplicationGLView(this);
mGlView.init(translucent, depthSize, stencilSize);
mRenderer = new ImageTargetRenderer(this, vuforiaAppSession);
mRenderer.setTextures(mTextures);
mGlView.setRenderer(mRenderer);
}
private void startLoadingAnimation()
{
LayoutInflater inflater = LayoutInflater.from(this);
mUILayout = (RelativeLayout) inflater.inflate(R.layout.camera_overlay,
null, false);
mUILayout.setVisibility(View.VISIBLE);
mUILayout.setBackgroundColor(Color.BLACK);
// Gets a reference to the loading dialog
loadingDialogHandler.mLoadingDialogContainer = mUILayout
.findViewById(R.id.loading_indicator);
// Shows the loading indicator at start
loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.SHOW_LOADING_DIALOG);
// Adds the inflated layout to the view
addContentView(mUILayout, new LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
}
// Methods to load and destroy tracking data.
#Override
public boolean doLoadTrackersData()
{
TrackerManager tManager = TrackerManager.getInstance();
ObjectTracker objectTracker = (ObjectTracker) tManager
.getTracker(ObjectTracker.getClassType());
if (objectTracker == null)
return false;
if (mCurrentDataset == null)
mCurrentDataset = objectTracker.createDataSet();
if (mCurrentDataset == null)
return false;
if (!mCurrentDataset.load(
mDatasetStrings.get(mCurrentDatasetSelectionIndex),
STORAGE_TYPE.STORAGE_APPRESOURCE))
return false;
if (!objectTracker.activateDataSet(mCurrentDataset))
return false;
int numTrackables = mCurrentDataset.getNumTrackables();
for (int count = 0; count < numTrackables; count++)
{
Trackable trackable = mCurrentDataset.getTrackable(count);
if(isExtendedTrackingActive())
{
trackable.startExtendedTracking();
}
String name = "Current Dataset : " + trackable.getName();
trackable.setUserData(name);
Log.d(LOGTAG, "UserData:Set the following user data "
+ (String) trackable.getUserData());
}
return true;
}
#Override
public boolean doUnloadTrackersData()
{
// Indicate if the trackers were unloaded correctly
boolean result = true;
TrackerManager tManager = TrackerManager.getInstance();
ObjectTracker objectTracker = (ObjectTracker) tManager
.getTracker(ObjectTracker.getClassType());
if (objectTracker == null)
return false;
if (mCurrentDataset != null && mCurrentDataset.isActive())
{
if (objectTracker.getActiveDataSet().equals(mCurrentDataset)
&& !objectTracker.deactivateDataSet(mCurrentDataset))
{
result = false;
} else if (!objectTracker.destroyDataSet(mCurrentDataset))
{
result = false;
}
mCurrentDataset = null;
}
return result;
}
#Override
public void onInitARDone(SampleApplicationException exception)
{
if (exception == null)
{
initApplicationAR();
mRenderer.mIsActive = true;
// Now add the GL surface view. It is important
// that the OpenGL ES surface view gets added
// BEFORE the camera is started and video
// background is configured.
addContentView(mGlView, new LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
// Sets the UILayout to be drawn in front of the camera
mUILayout.bringToFront();
// Sets the layout background to transparent
mUILayout.setBackgroundColor(Color.TRANSPARENT);
try
{
vuforiaAppSession.startAR(CameraDevice.CAMERA.CAMERA_DEFAULT);
} catch (SampleApplicationException e)
{
Log.e(LOGTAG, e.getString());
}
boolean result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);
if (result)
mContAutofocus = true;
else
Log.e(LOGTAG, "Unable to enable continuous autofocus");
mSampleAppMenu = new SampleAppMenu(this, this, "Image Targets",
mGlView, mUILayout, null);
setSampleAppMenuSettings();
} else
{
Log.e(LOGTAG, exception.getString());
showInitializationErrorMessage(exception.getString());
}
}
// Shows initialization error messages as System dialogs
public void showInitializationErrorMessage(String message)
{
final String errorMessage = message;
runOnUiThread(new Runnable()
{
public void run()
{
if (mErrorDialog != null)
{
mErrorDialog.dismiss();
}
// Generates an Alert Dialog to show the error message
AlertDialog.Builder builder = new AlertDialog.Builder(
Demo.this);
builder
.setMessage(errorMessage)
.setTitle(getString(R.string.INIT_ERROR))
.setCancelable(false)
.setIcon(0)
.setPositiveButton(getString(R.string.button_OK),
new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface dialog, int id)
{
finish();
}
});
mErrorDialog = builder.create();
mErrorDialog.show();
}
});
}
#Override
public void onQCARUpdate(State state)
{
if (mSwitchDatasetAsap)
{
mSwitchDatasetAsap = false;
TrackerManager tm = TrackerManager.getInstance();
ObjectTracker ot = (ObjectTracker) tm.getTracker(ObjectTracker
.getClassType());
if (ot == null || mCurrentDataset == null
|| ot.getActiveDataSet() == null)
{
Log.d(LOGTAG, "Failed to swap datasets");
return;
}
doUnloadTrackersData();
doLoadTrackersData();
}
}
#Override
public boolean doInitTrackers()
{
// Indicate if the trackers were initialized correctly
boolean result = true;
TrackerManager tManager = TrackerManager.getInstance();
Tracker tracker;
// Trying to initialize the image tracker
tracker = tManager.initTracker(ObjectTracker.getClassType());
if (tracker == null)
{
Log.e(
LOGTAG,
"Tracker not initialized. Tracker already initialized or the camera is already started");
result = false;
} else
{
Log.i(LOGTAG, "Tracker successfully initialized");
}
return result;
}
#Override
public boolean doStartTrackers()
{
// Indicate if the trackers were started correctly
boolean result = true;
Tracker objectTracker = TrackerManager.getInstance().getTracker(
ObjectTracker.getClassType());
if (objectTracker != null)
objectTracker.start();
return result;
}
#Override
public boolean doStopTrackers()
{
// Indicate if the trackers were stopped correctly
boolean result = true;
Tracker objectTracker = TrackerManager.getInstance().getTracker(
ObjectTracker.getClassType());
if (objectTracker != null)
objectTracker.stop();
return result;
}
#Override
public boolean doDeinitTrackers()
{
// Indicate if the trackers were deinitialized correctly
boolean result = true;
TrackerManager tManager = TrackerManager.getInstance();
tManager.deinitTracker(ObjectTracker.getClassType());
return result;
}
#Override
public boolean onTouchEvent(MotionEvent event)
{
// Process the Gestures
if (mSampleAppMenu != null && mSampleAppMenu.processEvent(event))
return true;
return mGestureDetector.onTouchEvent(event);
}
boolean isExtendedTrackingActive()
{
return mExtendedTracking;
}
final public static int CMD_BACK = -1;
final public static int CMD_EXTENDED_TRACKING = 1;
final public static int CMD_AUTOFOCUS = 2;
final public static int CMD_FLASH = 3;
final public static int CMD_CAMERA_FRONT = 4;
final public static int CMD_CAMERA_REAR = 5;
final public static int CMD_DATASET_START_INDEX = 6;
// This method sets the menu's settings
private void setSampleAppMenuSettings()
{
SampleAppMenuGroup group;
group = mSampleAppMenu.addGroup("", false);
group.addTextItem(getString(R.string.menu_back), -1);
group = mSampleAppMenu.addGroup("", true);
group.addSelectionItem(getString(R.string.menu_extended_tracking),
CMD_EXTENDED_TRACKING, false);
group.addSelectionItem(getString(R.string.menu_contAutofocus),
CMD_AUTOFOCUS, mContAutofocus);
mFlashOptionView = group.addSelectionItem(
getString(R.string.menu_flash), CMD_FLASH, false);
CameraInfo ci = new CameraInfo();
boolean deviceHasFrontCamera = false;
boolean deviceHasBackCamera = false;
for (int i = 0; i < Camera.getNumberOfCameras(); i++)
{
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_FRONT)
deviceHasFrontCamera = true;
else if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
deviceHasBackCamera = true;
}
if (deviceHasBackCamera && deviceHasFrontCamera)
{
group = mSampleAppMenu.addGroup(getString(R.string.menu_camera),
true);
group.addRadioItem(getString(R.string.menu_camera_front),
CMD_CAMERA_FRONT, false);
group.addRadioItem(getString(R.string.menu_camera_back),
CMD_CAMERA_REAR, true);
}
group = mSampleAppMenu
.addGroup(getString(R.string.menu_datasets), true);
mStartDatasetsIndex = CMD_DATASET_START_INDEX;
mDatasetsNumber = mDatasetStrings.size();
group.addRadioItem("Stones & Chips", mStartDatasetsIndex, true);
group.addRadioItem("Tarmac", mStartDatasetsIndex + 1, false);
// group.addRadioItem("my_first", mStartDatasetsIndex + 2, false);
mSampleAppMenu.attachMenu();
}
#SuppressLint("NewApi") #Override
public boolean menuProcess(int command)
{
boolean result = true;
switch (command)
{
case CMD_BACK:
finish();
break;
case CMD_FLASH:
result = CameraDevice.getInstance().setFlashTorchMode(!mFlash);
if (result)
{
mFlash = !mFlash;
} else
{
showToast(getString(mFlash ? R.string.menu_flash_error_off
: R.string.menu_flash_error_on));
Log.e(LOGTAG,
getString(mFlash ? R.string.menu_flash_error_off
: R.string.menu_flash_error_on));
}
break;
case CMD_AUTOFOCUS:
if (mContAutofocus)
{
result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);
if (result)
{
mContAutofocus = false;
} else
{
showToast(getString(R.string.menu_contAutofocus_error_off));
Log.e(LOGTAG,
getString(R.string.menu_contAutofocus_error_off));
}
} else
{
result = CameraDevice.getInstance().setFocusMode(
CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);
if (result)
{
mContAutofocus = true;
} else
{
showToast(getString(R.string.menu_contAutofocus_error_on));
Log.e(LOGTAG,
getString(R.string.menu_contAutofocus_error_on));
}
}
break;
case CMD_CAMERA_FRONT:
case CMD_CAMERA_REAR:
// Turn off the flash
if (mFlashOptionView != null && mFlash)
{
// OnCheckedChangeListener is called upon changing the checked state
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1)
{
((Switch) mFlashOptionView).setChecked(false);
} else
{
((CheckBox) mFlashOptionView).setChecked(false);
}
}
vuforiaAppSession.stopCamera();
try
{
vuforiaAppSession
.startAR(command == CMD_CAMERA_FRONT ? CameraDevice.CAMERA.CAMERA_FRONT
: CameraDevice.CAMERA.CAMERA_BACK);
} catch (SampleApplicationException e)
{
showToast(e.getString());
Log.e(LOGTAG, e.getString());
result = false;
}
doStartTrackers();
break;
case CMD_EXTENDED_TRACKING:
for (int tIdx = 0; tIdx < mCurrentDataset.getNumTrackables(); tIdx++)
{
Trackable trackable = mCurrentDataset.getTrackable(tIdx);
if (!mExtendedTracking)
{
if (!trackable.startExtendedTracking())
{
Log.e(LOGTAG,
"Failed to start extended tracking target");
result = false;
} else
{
Log.d(LOGTAG,
"Successfully started extended tracking target");
}
} else
{
if (!trackable.stopExtendedTracking())
{
Log.e(LOGTAG,
"Failed to stop extended tracking target");
result = false;
} else
{
Log.d(LOGTAG,
"Successfully started extended tracking target");
}
}
}
if (result)
mExtendedTracking = !mExtendedTracking;
break;
default:
if (command >= mStartDatasetsIndex
&& command < mStartDatasetsIndex + mDatasetsNumber)
{
mSwitchDatasetAsap = true;
mCurrentDatasetSelectionIndex = command
- mStartDatasetsIndex;
}
break;
}
return result;
}
private void showToast(String text)
{
Toast.makeText(this, text, Toast.LENGTH_SHORT).show();
}
#Override
protected void onCreate(Bundle pSavedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(pSavedInstanceState);
}
#Override
public EngineOptions onCreateEngineOptions() {
final org.andengine.engine.camera.Camera camera = new org.andengine.engine.camera.Camera(0, 0, Demo.CAMERA_WIDTH, Demo.CAMERA_HEIGHT);
return new EngineOptions(true, ScreenOrientation.PORTRAIT_FIXED, new RatioResolutionPolicy(Demo.CAMERA_WIDTH, Demo.CAMERA_HEIGHT), camera);
}
private void updateText() {
final String string = this.mEditText.getText().toString();
this.mText.setText(string);
final float left = (this.mText.getWidth() * 0.5f) - (this.mText.getLineWidthMaximum() * 0.5f);
this.mLeft.setPosition(left, 0, left, Demo.CAMERA_HEIGHT);
final float right = (this.mText.getWidth() * 0.5f) + (this.mText.getLineWidthMaximum() * 0.5f);
this.mRight.setPosition(right, 0, right, Demo.CAMERA_HEIGHT);
}
#Override
protected int getLayoutID() {
// TODO Auto-generated method stub
return R.layout.camera_overlay;
}
#Override
protected int getRenderSurfaceViewID() {
// TODO Auto-generated method stub
return R.id.textbreakexample_rendersurfaceview;
}
#Override
protected void onSetContentView() {
// TODO Auto-generated method stub
super.onSetContentView();
}
#Override
public void onCreateResources(
OnCreateResourcesCallback pOnCreateResourcesCallback)
throws IOException {
// TODO Auto-generated method stub
}
#Override
public void onCreateScene(OnCreateSceneCallback pOnCreateSceneCallback)
throws IOException {
// TODO Auto-generated method stub
}
#Override
public void onPopulateScene(Scene pScene,
OnPopulateSceneCallback pOnPopulateSceneCallback)
throws IOException {
// TODO Auto-generated method stub
}
}
I am using andEngine for Developing game and I also use vuforia for AR (Augmented Reality).But if i combine both the codes then i getting error as surfaceview null.
**and if it's not supported then what is the use for augmented reality object tracking in andenginre **
I am making an app in whic h i hava to you stream you tube video and show in my app but i am getting error message an error occured during retrieval of video.My code snippet is as follows:
public class Rads extends Activity {
public static final String SCHEME_YOUTUBE_VIDEO = "ytv";
public static final String SCHEME_YOUTUBE_PLAYLIST = "ytpl";
static final String YOUTUBE_VIDEO_INFORMATION_URL = "http://www.youtube.com/get_video_info?&video_id=";
static final String YOUTUBE_PLAYLIST_ATOM_FEED_URL = "http://gdata.youtube.com/feeds/api/playlists/";
protected ProgressBar mProgressBar;
protected TextView mProgressMessage;
protected VideoView mVideoView;
public final static String MSG_INIT = "com.keyes.video.msg.init";
protected String mMsgInit = "Initializing";
public final static String MSG_DETECT = "com.keyes.video.msg.detect";
protected String mMsgDetect = "Detecting Bandwidth";
public final static String MSG_PLAYLIST = "com.keyes.video.msg.playlist";
protected String mMsgPlaylist = "Determining Latest Video in YouTube Playlist";
public final static String MSG_TOKEN = "com.keyes.video.msg.token";
protected String mMsgToken = "Retrieving YouTube Video Token";
public final static String MSG_LO_BAND = "com.keyes.video.msg.loband";
protected String mMsgLowBand = "Buffering Low-bandwidth Video";
public final static String MSG_HI_BAND = "com.keyes.video.msg.hiband";
protected String mMsgHiBand = "Buffering High-bandwidth Video";
public final static String MSG_ERROR_TITLE = "com.keyes.video.msg.error.title";
protected String mMsgErrorTitle = "Communications Error";
public final static String MSG_ERROR_MSG = "com.keyes.video.msg.error.msg";
protected String mMsgError = "An error occurred during the retrieval of the video. This could be due to network issues or YouTube protocols. Please try again later.";
/** Background task on which all of the interaction with YouTube is done */
protected QueryYouTubeTask mQueryYouTubeTask;
protected String mVideoId = null;
#Override
protected void onCreate(Bundle pSavedInstanceState) {
super.onCreate(pSavedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
// create the layout of the view
setupView();
// determine the messages to be displayed as the view loads the video
extractMessages();
// set the flag to keep the screen ON so that the video can play without the screen being turned off
getWindow().setFlags(android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mProgressBar.bringToFront();
mProgressBar.setVisibility(View.VISIBLE);
mProgressMessage.setText(mMsgInit);
// extract the playlist or video id from the intent that started this video
Uri lVideoIdUri = this.getIntent().getData();
if(lVideoIdUri == null){
Log.i(this.getClass().getSimpleName(), "No video ID was specified in the intent. Closing video activity.");
finish();
}
String lVideoSchemeStr = lVideoIdUri.getScheme();
String lVideoIdStr = lVideoIdUri.getEncodedSchemeSpecificPart();
if(lVideoIdStr == null){
Log.i(this.getClass().getSimpleName(), "No video ID was specified in the intent. Closing video activity.");
finish();
}
if(lVideoIdStr.startsWith("//")){
if(lVideoIdStr.length() > 2){
lVideoIdStr = lVideoIdStr.substring(2);
} else {
Log.i(this.getClass().getSimpleName(), "No video ID was specified in the intent. Closing video activity.");
finish();
}
}
///////////////////
// extract either a video id or a playlist id, depending on the uri scheme
YouTubeId lYouTubeId = null;
if(lVideoSchemeStr != null && lVideoSchemeStr.equalsIgnoreCase(SCHEME_YOUTUBE_PLAYLIST)){
lYouTubeId = new PlaylistId(lVideoIdStr);
}
else if(lVideoSchemeStr != null && lVideoSchemeStr.equalsIgnoreCase(SCHEME_YOUTUBE_VIDEO)){
lYouTubeId = new VideoId(lVideoIdStr);
}
if(lYouTubeId == null){
Log.i(this.getClass().getSimpleName(), "Unable to extract video ID from the intent. Closing video activity.");
finish();
}
mQueryYouTubeTask = (QueryYouTubeTask) new QueryYouTubeTask().execute(lYouTubeId);
}
/**
* Determine the messages to display during video load and initialization.
*/
private void extractMessages() {
Intent lInvokingIntent = getIntent();
String lMsgInit = lInvokingIntent.getStringExtra(MSG_INIT);
if(lMsgInit != null){
mMsgInit = lMsgInit;
}
String lMsgDetect = lInvokingIntent.getStringExtra(MSG_DETECT);
if(lMsgDetect != null){
mMsgDetect = lMsgDetect;
}
String lMsgPlaylist = lInvokingIntent.getStringExtra(MSG_PLAYLIST);
if(lMsgPlaylist != null){
mMsgPlaylist = lMsgPlaylist;
}
String lMsgToken = lInvokingIntent.getStringExtra(MSG_TOKEN);
if(lMsgToken != null){
mMsgToken = lMsgToken;
}
String lMsgLoBand = lInvokingIntent.getStringExtra(MSG_LO_BAND);
if(lMsgLoBand != null){
mMsgLowBand = lMsgLoBand;
}
String lMsgHiBand = lInvokingIntent.getStringExtra(MSG_HI_BAND);
if(lMsgHiBand != null){
mMsgHiBand = lMsgHiBand;
}
String lMsgErrTitle = lInvokingIntent.getStringExtra(MSG_ERROR_TITLE);
if(lMsgErrTitle != null){
mMsgErrorTitle = lMsgErrTitle;
}
String lMsgErrMsg = lInvokingIntent.getStringExtra(MSG_ERROR_MSG);
if(lMsgErrMsg != null){
mMsgError = lMsgErrMsg;
}
}
/**
* Create the view in which the video will be rendered.
*/
private void setupView() {
LinearLayout lLinLayout = new LinearLayout(this);
lLinLayout.setId(1);
lLinLayout.setOrientation(LinearLayout.VERTICAL);
lLinLayout.setGravity(Gravity.CENTER);
lLinLayout.setBackgroundColor(Color.BLACK);
LayoutParams lLinLayoutParms = new LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT);
lLinLayout.setLayoutParams(lLinLayoutParms);
this.setContentView(lLinLayout);
RelativeLayout lRelLayout = new RelativeLayout(this);
lRelLayout.setId(2);
lRelLayout.setGravity(Gravity.CENTER);
lRelLayout.setBackgroundColor(Color.BLACK);
android.widget.RelativeLayout.LayoutParams lRelLayoutParms = new android.widget.RelativeLayout.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT);
lRelLayout.setLayoutParams(lRelLayoutParms);
lLinLayout.addView(lRelLayout);
mVideoView = new VideoView(this);
mVideoView.setId(3);
android.widget.RelativeLayout.LayoutParams lVidViewLayoutParams = new android.widget.RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
lVidViewLayoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
mVideoView.setLayoutParams(lVidViewLayoutParams);
lRelLayout.addView(mVideoView);
mProgressBar = new ProgressBar(this);
mProgressBar.setIndeterminate(true);
mProgressBar.setVisibility(View.VISIBLE);
mProgressBar.setEnabled(true);
mProgressBar.setId(4);
android.widget.RelativeLayout.LayoutParams lProgressBarLayoutParms = new android.widget.RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
lProgressBarLayoutParms.addRule(RelativeLayout.CENTER_IN_PARENT);
mProgressBar.setLayoutParams(lProgressBarLayoutParms);
lRelLayout.addView(mProgressBar);
mProgressMessage = new TextView(this);
mProgressMessage.setId(5);
android.widget.RelativeLayout.LayoutParams lProgressMsgLayoutParms = new android.widget.RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
lProgressMsgLayoutParms.addRule(RelativeLayout.CENTER_HORIZONTAL);
lProgressMsgLayoutParms.addRule(RelativeLayout.BELOW, 4);
mProgressMessage.setLayoutParams(lProgressMsgLayoutParms);
mProgressMessage.setTextColor(Color.LTGRAY);
mProgressMessage.setTextSize(TypedValue.COMPLEX_UNIT_SP, 12);
mProgressMessage.setText("...");
lRelLayout.addView(mProgressMessage);
}
#Override
protected void onDestroy() {
super.onDestroy();
YouTubeUtility.markVideoAsViewed(this, mVideoId);
if(mQueryYouTubeTask != null){
mQueryYouTubeTask.cancel(true);
}
if(mVideoView != null){
mVideoView.stopPlayback();
}
// clear the flag that keeps the screen ON
getWindow().clearFlags(android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
this.mQueryYouTubeTask = null;
this.mVideoView = null;
}
public void updateProgress(String pProgressMsg){
try {
mProgressMessage.setText(pProgressMsg);
} catch(Exception e) {
Log.e(this.getClass().getSimpleName(), "Error updating video status!", e);
}
}
private class ProgressUpdateInfo {
public String mMsg;
public ProgressUpdateInfo(String pMsg){
mMsg = pMsg;
}
}
/**
* Task to figure out details by calling out to YouTube GData API. We only use public methods that
* don't require authentication.
*
*/
private class QueryYouTubeTask extends AsyncTask<YouTubeId, ProgressUpdateInfo, Uri> {
private boolean mShowedError = false;
#Override
protected Uri doInBackground(YouTubeId... pParams) {
String lUriStr = null;
String lYouTubeFmtQuality = "17"; // 3gpp medium quality, which should be fast enough to view over EDGE connection
String lYouTubeVideoId = null;
if(isCancelled())
return null;
try {
publishProgress(new ProgressUpdateInfo(mMsgDetect));
WifiManager lWifiManager = (WifiManager) Rads.this.getSystemService(Context.WIFI_SERVICE);
TelephonyManager lTelephonyManager = (TelephonyManager) Rads.this.getSystemService(Context.TELEPHONY_SERVICE);
////////////////////////////
// if we have a fast connection (wifi or 3g), then we'll get a high quality YouTube video
if( (lWifiManager.isWifiEnabled() && lWifiManager.getConnectionInfo() != null && lWifiManager.getConnectionInfo().getIpAddress() != 0) ||
( (lTelephonyManager.getNetworkType() == TelephonyManager.NETWORK_TYPE_UMTS ||
/* icky... using literals to make backwards compatible with 1.5 and 1.6 */
lTelephonyManager.getNetworkType() == 9 /*HSUPA*/ ||
lTelephonyManager.getNetworkType() == 10 /*HSPA*/ ||
lTelephonyManager.getNetworkType() == 8 /*HSDPA*/ ||
lTelephonyManager.getNetworkType() == 5 /*EVDO_0*/ ||
lTelephonyManager.getNetworkType() == 6 /*EVDO A*/)
&& lTelephonyManager.getDataState() == TelephonyManager.DATA_CONNECTED)
){
lYouTubeFmtQuality = "18";
}
///////////////////////////////////
// if the intent is to show a playlist, get the latest video id from the playlist, otherwise the video
// id was explicitly declared.
if(pParams[0] instanceof PlaylistId){
publishProgress(new ProgressUpdateInfo(mMsgPlaylist));
lYouTubeVideoId = YouTubeUtility.queryLatestPlaylistVideo((PlaylistId) pParams[0]);
}
else if(pParams[0] instanceof VideoId){
lYouTubeVideoId = pParams[0].getId();
}
mVideoId = lYouTubeVideoId;
publishProgress(new ProgressUpdateInfo(mMsgToken));
if(isCancelled())
return null;
////////////////////////////////////
// calculate the actual URL of the video, encoded with proper YouTube token
lUriStr = YouTubeUtility.calculateYouTubeUrl(lYouTubeFmtQuality, true, lYouTubeVideoId);
if(isCancelled())
return null;
if(lYouTubeFmtQuality.equals("17")){
publishProgress(new ProgressUpdateInfo(mMsgLowBand));
} else {
publishProgress(new ProgressUpdateInfo(mMsgHiBand));
}
} catch(Exception e) {
Log.e(this.getClass().getSimpleName(), "Error occurred while retrieving information from YouTube.", e);
}
if(lUriStr != null){
return Uri.parse(lUriStr);
} else {
return null;
}
}
#Override
protected void onPostExecute(Uri pResult) {
super.onPostExecute(pResult);
try {
if(isCancelled())
return;
if(pResult == null){
throw new RuntimeException("Invalid NULL Url.");
}
mVideoView.setVideoURI(pResult);
if(isCancelled())
return;
// TODO: add listeners for finish of video
mVideoView.setOnCompletionListener(new OnCompletionListener(){
public void onCompletion(MediaPlayer pMp) {
if(isCancelled())
return;
Rads.this.finish();
}
});
if(isCancelled())
return;
final MediaController lMediaController = new MediaController(Rads.this);
mVideoView.setMediaController(lMediaController);
lMediaController.show(0);
//mVideoView.setKeepScreenOn(true);
mVideoView.setOnPreparedListener( new MediaPlayer.OnPreparedListener() {
public void onPrepared(MediaPlayer pMp) {
if(isCancelled())
return;
Rads.this.mProgressBar.setVisibility(View.GONE);
Rads.this.mProgressMessage.setVisibility(View.GONE);
}
});
if(isCancelled())
return;
mVideoView.requestFocus();
mVideoView.start();
} catch(Exception e){
Log.e(this.getClass().getSimpleName(), "Error playing video!", e);
if(!mShowedError){
showErrorAlert();
}
}
}
private void showErrorAlert() {
try {
Builder lBuilder = new AlertDialog.Builder(Rads.this);
lBuilder.setTitle(mMsgErrorTitle);
lBuilder.setCancelable(false);
lBuilder.setMessage(mMsgError);
lBuilder.setPositiveButton("OK", new DialogInterface.OnClickListener(){
public void onClick(DialogInterface pDialog, int pWhich) {
Rads.this.finish();
}
});
AlertDialog lDialog = lBuilder.create();
lDialog.show();
} catch(Exception e){
Log.e(this.getClass().getSimpleName(), "Problem showing error dialog.", e);
}
}
#Override
protected void onProgressUpdate(ProgressUpdateInfo... pValues) {
super.onProgressUpdate(pValues);
Rads.this.updateProgress(pValues[0].mMsg);
}
}
#Override
protected void onStart() {
super.onStart();
}
#Override
protected void onStop() {
super.onStop();
}
}