Libgdx - scrolling shop - android

Making a shop (about 50 objects in it that can be bought), so I need a scrollable area as not everything can go on on it.
This is the part when you are holding and sliding your finger.
public boolean pan(float x, float y, float deltaX, float deltaY) {
float scaleY = viewport.getWorldHeight() / (float)viewport.getScreenHeight();
camera.translate((int) 0, (int) (deltaY * scaleY));
return true;
}
The moment you release the finger this fires up.
public boolean fling(float velocityX, float velocityY, int button) {
float scaleY = viewport.getWorldHeight() / (float)viewport.getScreenHeight();
translation = scaleY * velocityY;
return false;
}
The logic to move the camera from fling is this:
if (translation != 0) {
camera.translate(0, translation * delta);
if (translation >= 0) {
up = true;
translation -= increasing * delta;
} else {
up = false;
translation += increasing * delta;
}
if (translation <= 0 && up) {
translation = 0;
} else if (translation >= 0 && !up) {
translation = 0;
}
increasing += 10000*delta;
}
So I have been playing around with these a little but cannot get a perfect scrolling speed. Has anybody done this before and knows the perfect starting speed ('translation') and stopping speed ('increasing')?

Related

ARCore OpenGL ES Moving Object using onScroll

I'm attempting to add in the moving of objects around with single finger scrolling. (See Google AR Stickers for example) I'm using native ARCore/OpenGL based originally off the ARCore examples. When you slide 1 finger, I want to move the object in 3D space along the X and Z axes.
I can easily create the movement using Translation, however, it performs the movement based on the original camera orientation. If you physically move the phone/camera a few lateral steps, the finger movements no longer match what a user would expect.
So I changed up my code and mapped the finger distanceX and distanceY that is swiped to affect X and Z coordinates depending on the angle of change from the original camera starting point to the current camera point.
The issue that I'm running into is determining the angle at which the camera has been moved. I've been looking at the value from the Camera View matrix:
camera.getViewMatrix(viewmtx, 0);
But the X, Y, and Z coordinates always say 0. I'm assuming this is because it's always making the camera the origin? Does anyone know of a way to calculate the angle of rotation of a camera from a 3D object using the ARCore/OpenGL ES libraries? The red angle in the illustration below (from top-down perspective) is what I'm trying to get. Sorry for the crude drawing:
Here's my code for your reference:
// Handle Gestures - Single touch for Translation and Placement
mGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener()
{
#Override
public boolean onSingleTapUp(MotionEvent e)
{
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY)
{
if (e2.getPointerCount() == 1 && ...)
{
double angle = findCameraAngleFromOrigin();
double speed = 0.005d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
}
else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
}
EDIT: Update to code
I wasn't able to figure out how to find the distance from the camera points, but I was able to at least find the difference in the phone's rotation, which is closer. I accomplished it by the code below. I'm still not happy with the results, so I'll post updates later on when I find a more effective solution.
private double getDegree(double value1, double value2)
{
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0)
{
Log.d(TAG, "FIRST QUADRANT");
return firstAngle; // first quadrant
}
else if (secondAngle < 0 && firstAngle >= 0)
{
Log.d(TAG, "SECOND QUADRANT");
return 90 + (90 - firstAngle); //second quadrant
}
else if (secondAngle < 0 && firstAngle < 0)
{
Log.d(TAG, "THIRD QUADRANT");
return 180 - firstAngle; //third quadrant
}
else
{
Log.d(TAG, "FOURTH QUADRANT");
return 270 + (90 + firstAngle); //fourth quadrant
}
}
private double findCameraAngleFromOrigin()
{
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) - getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
#Override
public void onDrawFrame(GL10 gl)
{
...
//When creating a new object
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
camera.getDisplayOrientedPose().toMatrix( mOriginCameraMatrix, 0);
//During each draw frame
camera.getDisplayOrientedPose().toMatrix( mCurrentCameraMatrix, 0);
int ac = 0;
for (Anchor anchor : mAnchors)
{
if (anchor.getTrackingState() != TrackingState.TRACKING)
{
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(mAnchorMatrix, 0);
// Update and draw the model
if (mModelSet)
{
if (mScaleFactors.size() <= ac)
{
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac)
{
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac)
{
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac)
{
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
ObjectRenderer virtualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
virtualObject.updateModelMatrix(mAnchorMatrix, mScaleFactors.get(ac));
virtualObject.draw(viewmtx, projmtx, lightIntensity);
}
ac++;
}
}
This is kind of hard to track, so I'll probably just post the entire class once I feel more comfortable with the implementation and code cleanup.
public class HelloArActivity extends AppCompatActivity implements GLSurfaceView.Renderer {
private static final String TAG = HelloArActivity.class.getSimpleName();
// Rendering. The Renderers are created here, and initialized when the GL surface is created.
private GLSurfaceView surfaceView;
private boolean installRequested;
private Session session;
private GestureDetector gestureDetector;
private Snackbar messageSnackbar;
private DisplayRotationHelper displayRotationHelper;
private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
private final ObjectRenderer virtualObject = new ObjectRenderer();
private final ObjectRenderer virtualObjectShadow = new ObjectRenderer();
private final PlaneRenderer planeRenderer = new PlaneRenderer();
private final PointCloudRenderer pointCloud = new PointCloudRenderer();
private int mCurrent = -1;
private final List<ObjectRenderer> mVirtualObjects = new ArrayList<ObjectRenderer>();
// Temporary matrix allocated here to reduce number of allocations for each frame.
private final float[] anchorMatrix = new float[16];
//Rotation, Moving, & Scaling
private final List<Float> mRotationThetas = new ArrayList<Float>();
private GestureDetector mGestureDetector;
private ScaleGestureDetector mScaleDetector;
private RotationGestureDetector mRotationDetector;
private final List<Float> mScaleFactors = new ArrayList<Float>();
private final List<Float> mTranslationX = new ArrayList<Float>();
private final List<Float> mTranslationZ = new ArrayList<Float>();
private final float[] mOriginCameraMatrix = new float[16];
private final float[] mCurrentCameraMatrix = new float[16];
private boolean mModelSet = false;
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> mQueuedSingleTaps = new ArrayBlockingQueue<>(16);
private final ArrayList<Anchor> mAnchors = new ArrayList<>();
// Tap handling and UI.
private final ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
private final List<Integer> mAnchorReferences = new ArrayList<Integer>();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.surfaceview);
displayRotationHelper = new DisplayRotationHelper(/*context=*/ this);
//Handle Gestures - Multitouch for Scaling and Rotation
mScaleDetector = new ScaleGestureDetector(this, new ScaleGestureDetector.SimpleOnScaleGestureListener() {
#Override
public boolean onScale(ScaleGestureDetector detector) {
if (mScaleFactors.size() > 0) {
mScaleFactors.set(mScaleFactors.size() - 1, Math.max(0.1f, Math.min(detector.getScaleFactor() * mScaleFactors.get(mScaleFactors.size() - 1), 5.0f)));
return true;
}
return false;
}
});
mRotationDetector = new RotationGestureDetector(this, new RotationGestureDetector.OnRotationGestureListener() {
#Override
public void OnRotation(RotationGestureDetector rotationDetector) {
if (mRotationThetas.size() > 0) {
mRotationThetas.set(mRotationThetas.size() - 1, (mRotationThetas.get(mRotationThetas.size() - 1) + (rotationDetector.getAngle() * -0.001f)));
}
}
});
// Set up tap listener.
gestureDetector =
new GestureDetector(
this,
new GestureDetector.SimpleOnGestureListener() {
#Override
public boolean onSingleTapUp(MotionEvent e) {
onSingleTap(e);
return true;
}
#Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (e2.getPointerCount() == 1 && mTranslationX.size() > 0 && mTranslationZ.size() > 0) {
double angle = findCameraAngleFromOrigin();
double speed = 0.001d;
if (angle / 90d < 1) //Quadrant 1
{
double transX = -(distanceY * (angle / 90d)) + (distanceX * ((90d - angle) / 90d));
double transY = (distanceY * ((90d - angle) / 90d)) + (distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else if (angle / 90d < 2) //Quadrant 2
{
angle -= 90d;
double transX = (distanceX * (angle / 90d)) + (distanceY * ((90d - angle) / 90d));
double transY = (-distanceX * ((90d - angle) / 90d)) + (distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
} else if (angle / 90d < 3) //Quadrant 3
{
angle -= 180d;
double transX = (distanceY * (angle / 90d)) + (-distanceX * ((90d - angle) / 90d));
double transY = (-distanceY * ((90d - angle) / 90d)) + (-distanceX * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * -speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * -speed)));
} else //Quadrant 4
{
angle -= 270d;
double transX = (-distanceX * (angle / 90d)) + (-distanceY * ((90d - angle) / 90d));
double transY = (distanceX * ((90d - angle) / 90d)) + (-distanceY * (angle / 90d));
// showSnackbarMessage("ANGLE: " + angle + ", distanceX: " + distanceX + ", distanceY: " + distanceY, false);
mTranslationX.set(mTranslationX.size() - 1, (float) (mTranslationX.get(mTranslationX.size() - 1) + (transX * speed)));
mTranslationZ.set(mTranslationZ.size() - 1, (float) (mTranslationZ.get(mTranslationZ.size() - 1) + (transY * speed)));
}
return true;
}
return false;
}
#Override
public boolean onDown(MotionEvent e) {
return true;
}
});
surfaceView.setOnTouchListener(
new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
boolean retVal = mScaleDetector.onTouchEvent(event);
if (retVal)
mRotationDetector.onTouchEvent(event);
retVal = gestureDetector.onTouchEvent(event) || retVal;
return retVal || gestureDetector.onTouchEvent(event);
//return gestureDetector.onTouchEvent(event);
}
});
// Set up renderer.
surfaceView.setPreserveEGLContextOnPause(true);
surfaceView.setEGLContextClientVersion(2);
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
surfaceView.setRenderer(this);
surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
installRequested = false;
}
private double findCameraAngleFromOrigin() {
double angle = getDegree(mCurrentCameraMatrix[2], mCurrentCameraMatrix[0]) -
getDegree(mOriginCameraMatrix[2], mOriginCameraMatrix[0]);
if (angle < 0)
return angle + 360;
return angle;
}
private double getDegree(double value1, double value2) {
double firstAngle = value1 * 90;
double secondAngle = value2 * 90;
if (secondAngle >= 0 && firstAngle >= 0) {
return firstAngle; // first quadrant
} else if (secondAngle < 0 && firstAngle >= 0) {
return 90 + (90 - firstAngle); //second quadrant
} else if (secondAngle < 0 && firstAngle < 0) {
return 180 - firstAngle; //third quadrant
} else {
return 270 + (90 + firstAngle); //fourth quadrant
}
}
#Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
showSnackbarMessage(message, true);
Log.e(TAG, "Exception creating session", exception);
return;
}
// Create default config and check if supported.
Config config = new Config(session);
if (!session.isSupported(config)) {
showSnackbarMessage("This device does not support AR", true);
}
session.configure(config);
}
showLoadingMessage();
// Note that order matters - see the note in onPause(), the reverse applies here.
session.resume();
surfaceView.onResume();
displayRotationHelper.onResume();
}
#Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
surfaceView.onPause();
session.pause();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
if (!CameraPermissionHelper.hasCameraPermission(this)) {
Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
.show();
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
// Standard Android full-screen functionality.
getWindow()
.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
private void onSingleTap(MotionEvent e) {
// Queue tap if there is space. Tap is lost if queue is full.
queuedSingleTaps.offer(e);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
// Create the texture and pass it to ARCore session to be filled during update().
backgroundRenderer.createOnGlThread(/*context=*/ this);
// Prepare the other rendering objects.
try {
virtualObject.createOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);
virtualObjectShadow.createOnGlThread(/*context=*/ this, "andy_shadow.obj", "andy_shadow.png");
virtualObjectShadow.setBlendMode(BlendMode.Shadow);
virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
} catch (IOException e) {
Log.e(TAG, "Failed to read obj file");
}
try {
planeRenderer.createOnGlThread(/*context=*/ this, "trigrid.png");
} catch (IOException e) {
Log.e(TAG, "Failed to read plane texture");
}
pointCloud.createOnGlThread(/*context=*/ this);
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
GLES20.glViewport(0, 0, width, height);
mVirtualObjects.add(virtualObject);
}
#Override
public void onDrawFrame(GL10 gl) {
// Clear screen to notify driver it should not load any pixels from previous frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (session == null) {
return;
}
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
try {
session.setCameraTextureName(backgroundRenderer.getTextureId());
// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame = session.update();
Camera camera = frame.getCamera();
// Handle taps. Handling only one tap per frame, as taps are usually low frequency
// compared to frame rate.
MotionEvent tap = queuedSingleTaps.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(tap)) {
// Check if any plane was hit, and if it was hit inside the plane polygon
Trackable trackable = hit.getTrackable();
// Creates an anchor if a plane or an oriented point was hit.
if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose()))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
// Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (mAnchors.size() >= 20)
{
//Alert the user that the maximum has been reached, must call from
//Handler as this is a UI action being done on a worker thread.
new Handler(Looper.getMainLooper())
{
#Override
public void handleMessage(Message message)
{
Toast.makeText(HelloArActivity.this,
"You've reached the maximum!", Toast.LENGTH_LONG).show();
}
};
// Alternatively, you can start detaching, however, a revision to the
// mAnchorReferences and mScalingFactors should be made!
// mAnchors.get(0).detach();
// mAnchors.remove(0);
}
else
{
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3d model
// in the correct position relative both to the world and to the plane.
mScaleFactors.add(1.0f);
Anchor anchor = hit.createAnchor();
mAnchors.add(anchor);
mAnchorReferences.add(mCurrent);
camera.getDisplayOrientedPose().toMatrix(mOriginCameraMatrix, 0);
}
break;
}
}
}
// Draw background.
backgroundRenderer.draw(frame);
// If not tracking, don't draw 3d objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// Get projection matrix.
float[] projmtx = new float[16];
camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);
// Get camera matrix and draw.
float[] viewmtx = new float[16];
camera.getViewMatrix(viewmtx, 0);
// Compute lighting from average intensity of the image.
final float lightIntensity = frame.getLightEstimate().getPixelIntensity();
// Visualize tracked points.
PointCloud pointCloud = frame.acquirePointCloud();
this.pointCloud.update(pointCloud);
this.pointCloud.draw(viewmtx, projmtx);
// Application is responsible for releasing the point cloud resources after
// using it.
pointCloud.release();
// Check if we detected at least one plane. If so, hide the loading message.
if (messageSnackbar != null) {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getType() == com.google.ar.core.Plane.Type.HORIZONTAL_UPWARD_FACING
&& plane.getTrackingState() == TrackingState.TRACKING) {
hideLoadingMessage();
break;
}
}
}
camera.getDisplayOrientedPose().toMatrix(mCurrentCameraMatrix, 0);
// Visualize planes.
planeRenderer.drawPlanes(
session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);
// Visualize anchors created by touch.
int ac = 0;
for (Anchor anchor : mAnchors) {
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(anchorMatrix, 0);
if (mScaleFactors.size() <= ac) {
mScaleFactors.add(1.0f);
}
if (mRotationThetas.size() <= ac) {
mRotationThetas.add(0.0f);
}
if (mTranslationX.size() <= ac) {
mTranslationX.add(viewmtx[3]);
}
if (mTranslationZ.size() <= ac) {
mTranslationZ.add(viewmtx[11]);
}
translateMatrix(mTranslationX.get(ac), 0, mTranslationZ.get(ac));
rotateYAxisMatrix(mRotationThetas.get(ac));
// Update and draw the model and its shadow.
ObjectRenderer vitualObject = mVirtualObjects.get(mAnchorReferences.get(ac));
vitualObject.updateModelMatrix(anchorMatrix, mScaleFactors.get(ac));
vitualObject.draw(viewmtx, projmtx, lightIntensity);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
private void rotateYAxisMatrix(float rotationTheta) {
if (rotationTheta != 0.0f) {
anchorMatrix[0] = (float) Math.cos(rotationTheta);
anchorMatrix[2] = (float) Math.sin(rotationTheta);
anchorMatrix[5] = 1;
anchorMatrix[8] = -(float) Math.sin(rotationTheta);
anchorMatrix[10] = (float) Math.cos(rotationTheta);
anchorMatrix[15] = 1;
}
}
private void translateMatrix(float xDistance, float yDistance, float zDistance) {
Matrix.translateM(anchorMatrix, 0, xDistance, yDistance, zDistance);
}

Correcting coordinates of touch events after zooming and paning canvas

When using scaling, MotionEvent coordinates can be corrected by dividing by the ScaleFactor.
Further, when scaling and paning, divide by scalefactor and subtract offset.
When dealing with zoom, however, it isn't as easy. Dividing does get the correct relative coordinates, but because pan is involved, 0 isn't 0. 0 can be -2000 in offset.
So how can I correct the TouchEvents to give the correct coordinates after zoom and pan?
Code:
Zoom:
class Scaler extends ScaleGestureDetector {
public Scaler(Context context, OnScaleGestureListener listener) {
super(context, listener);
}
#Override
public float getScaleFactor() {
return super.getScaleFactor();
}
}
class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener{
#Override
public boolean onScale(ScaleGestureDetector detector) {
scaleFactor *= detector.getScaleFactor();
if(scaleFactor > 2) scaleFactor = 2;
else if(scaleFactor < 0.3f) scaleFactor = 0.3f;
scaleFactor = ((float)((int)(scaleFactor * 100))) / 100;//jitter-protection
scaleMatrix.setScale(scaleFactor, scaleFactor, detector.getFocusX(), detector.getFocusY());
return true;
}
#Override
public boolean onScaleBegin(ScaleGestureDetector detector) {return true;}
#Override
public void onScaleEnd(ScaleGestureDetector detector) {
System.out.println("ScaleFactor: " + scaleFactor);
}
}
TouchEvent:
#Override
public boolean onTouchEvent(MotionEvent ev) {
int pointers = ev.getPointerCount();
if(pointers == 2 ) {
zoom = true;
s.onTouchEvent(ev);
}else if(pointers == 1 && zoom){
if(ev.getAction() == MotionEvent.ACTION_UP)
zoom = false;
return true;
}else {
if (ev.getAction() == MotionEvent.ACTION_DOWN) {
//scaled physical coordinates
x = ev.getX() /*/ mScaleFactorX*/;//unscaled
y = ev.getY() /*/ mScaleFactorY*/;
sx = ev.getX() / scaleFactor;//scaled
sy = ev.getY() / scaleFactor;
//////////////////////////////////////////
tox = toy = true;
} else if (ev.getAction() == MotionEvent.ACTION_UP) {
if (tox && toy) {
x = ev.getX() /*/ mScaleFactorX*/;
y = ev.getY() /*/ mScaleFactorY*/;
sx = ev.getX() / scaleFactor;
sy = ev.getY() / scaleFactor;
System.out.println("XY: " + sx + "/" + sy);
Rect cursor = new Rect((int) x, (int) y, (int) x + 1, (int) y + 1);
Rect scaledCursor = new Rect((int)sx, (int)sy, (int)sx+1, (int)sy+1);
...
}
} else if (ev.getAction() == MotionEvent.ACTION_MOVE) {
//This is where the pan happens.
float currX = ev.getX() / scaleFactor;
float currY = ev.getY() / scaleFactor;
float newOffsetX = (sx - currX),
newOffsetY = (sy - currY);
if (newOffsetY < Maths.convertDpToPixel(1, c) && newOffsetY > -Maths.convertDpToPixel(1, c))
newOffsetY = 0;
else tox = false;
if (newOffsetX < Maths.convertDpToPixel(1, c) && newOffsetX > -Maths.convertDpToPixel(1, c))
newOffsetX = 0;
else toy = false;
this.newOffsetX = newOffsetX;
this.newOffsetY = newOffsetY;
offsetX += newOffsetX;
offsetY += newOffsetY;
sx = ev.getX() / scaleFactor;
sy = ev.getY() / scaleFactor;
}
}
return true;
}
Implementation of the zooming matrix:
Matrix scaleMatrix = new Matrix();
public void render(Canvas c) {
super.draw(c);
if (c != null) {
backgroundRender(c);
c.setMatrix(scaleMatrix);
//Example rendering:
c.drawRect(0 - offsetX,0 - offsetY,10 - offsetX,10 - offsetY,paint);
c.setMatrix(null);//null the matrix to allow for unscaled rendering after this line. For UI objects.
}
}
What the issue is, is that when zooming 0 shifts but the coordinates of the objects does not. Meaning objects rendered at e.g. -2500, -2500 will appear to be rendered at over 0,0. Their coordinates are different from the TouchEvent. So how can I correct the touch events?
What I have tried:
This causes laggy zoom and the objects flying away. ev = MotionEvent in onTouchEvent. Doesn't correct the coordinates
Matrix invert = new Matrix(scaleMatrix);
invert.invert(invert);
ev.transform();
This doesn't work because the coordinates are wrong compared to objects. Objects with coordinates < 0 show over 0 meaning MotionEvents are wrong no matter what.
int sx = ev.getX() / scaleFactor;//same with y, but ev.getY()
Found a solution after doing a ton more research
Whenever getting the scaled coordinates, get the clipBounds of the canvas and add the top and left coordinates to X/Y coordinates:
sx = ev.getX() / scaleFactor + clip.left;
sy = ev.getY() / scaleFactor + clip.top ;
clip is a Rect defined as the clipBounds of the Canvas.
public void render(Canvas c) {
super.draw(c);
if (c != null) {
c.setMatrix(scaleMatrix);
clip = c.getClipBounds();
(...)
}
}

LibGDX panning is jumping sometimes

I am working on a tile based game which should be scrollable but only inside the boundaries of the world. So I set up the pan() method as several examples/tutorials suggest but it is not always working properly. Sometimes it is jumping back to the starting point of the last gesture or is only moving very slow. Additionally the borders are not working either. Maybe somebody can point out the mistakes I made.
public void pan(float x, float y, float deltaX, float deltaY) {
moveCamera(x, y);
}
private void moveCamera(float x, float y) {
Vector3 new_position = getNewCameraPosition((int) x, (int)y);
if(!cameraOutOfLimit(new_position))
this.getViewport().getCamera().translate(new_position.sub(this.getViewport().getCamera().position));
lastTouchDown.set(x, y, 0);
}
private Vector3 getNewCameraPosition(int x, int y) {
Vector3 newPosition = lastTouchDown;
newPosition.sub(x, y, 0);
newPosition.y = -newPosition.y;
newPosition.add(this.getViewport().getCamera().position);
return newPosition;
}
private boolean cameraOutOfLimit( Vector3 position ) {
int x_left_limit = (int) (Global.SCREEN_WIDTH / 2);
int x_right_limit = (int) (Global.COLS_OF_TILES * Global.TILE_WIDTH - (Global.SCREEN_WIDTH / 2));
int y_bottom_limit = (int) (Global.SCREEN_HEIGHT / 2);
int y_top_limit = (int) (Global.ROWS_OF_TILES * Global.TILE_HEIGHT - Global.SCREEN_HEIGHT / 2);
if( position.x < x_left_limit || position.x > x_right_limit )
return true;
else if( position.y < y_bottom_limit || position.y > y_top_limit )
return true;
else
return false;
}
The code above seems convoluted to me, and I don't really understandwhy you modify a vector called lastTouchPosition in ways that have nothing to do with touch position.
I would do something like this. These methods clamp your target X and Y positions whenever you want to move your camera.
float clampCamTargetX(float x) {
x = Math.max(x, (int) (Global.SCREEN_WIDTH / 2));
x = Math.min(x, (int) (Global.COLS_OF_TILES * Global.TILE_WIDTH - (Global.SCREEN_WIDTH / 2)));
return x;
}
float clampCamTargetY (float y){
y = Math.max(y,(int)(Global.SCREEN_HEIGHT/2));
y = Math.min(y,(int)(Global.ROWS_OF_TILES*Global.TILE_HEIGHT-Global.SCREEN_HEIGHT/2));
return y;
}
Then if you want to pan it, you would do something like this:
void panCamera(float deltaX, float deltaY) {
Camera camera = this.getViewport().getCamera();
Vector3 camPosition = camera.position;
camPosition.x = clampCamTargetX(camPosition.x + deltaX);
camPosition.y = clampCamTargetY(camPosition.y + deltaY);
camera.update();
}
Or if you want a complete solution for smoothly moving the camera to the last position touched, try this:
float startXCam, startYCam, targetXCam, targetYCam;
float elapsedTimeCam;
boolean panningCam = false;
static final float CAM_PAN_DURATION = 0.4f;
public void render (){
//...
panCameraToTouchPoint(Gdx.graphics.getDeltaTime());
//...
}
void panCameraToTouchPoint (float deltaTime){
Camera camera = getViewport().getCamera();
Vector3 camPosition = camera.position;
if (Gdx.input.justTouched()) {
startXCam = camPosition.x;
startYCam = camPosition.y;
targetXCam = clampCamTargetX(Gdx.input.getX());
targetYCam = clampCamTargetY(Gdx.input.getY());
elapsedTimeCam = 0;
panningCam = true;
}
if (panningCam){
elapsedTimeCam += deltaTime;
float alpha = elapsedTimeCam / CAM_PAN_DURATION;
if (alpha >= 1){
alpha = 1;
panningCam = false;
}
camPosition.x = Interpolation.pow2Out.apply(startXCam, targetXCam, alpha);
camPosition.y = Interpolation.pow2Out.apply(startYCam, targetYCam, alpha);
camera.update();
}
}

Sprite movement based on rotation

I have a sprite in Android OpenGL. This sprite (a small beetlebug) is always moving in a forward direction and I use:
sprite.setPosition(posX, posY);
Now I have a rotation method, when the user gestures left or right the bug rotates:
private void applyRotation() {
for(int i=0;i<beetleBug.size;i++) {
Sprite s = beetleBug.get(i);
s.setOrigin(s.getWidth() / 2, s.getHeight() / 2);
s.setRotation(angle);
}
}
Now when the bug is moving forward which he always does the new x and y coordinates have to be calculated which depend on the rotation-angle, so that the bug is always moving forward. Does anybody have an algorithm to calculate the direction by the rotation-angle?
Here is the whole Bug-class:
public class Bug {
private SpriteBatch spriteBatch = null;
private TextureAtlas spriteSheet;
private Array<Sprite> beetleBug;
private int currentFrame = 0;
private final float frameLength = 0.10f; //in seconds, how long a frame last
private float animationElapsed = 0.0f;
private float angle = 0.0f;
private float posX = 0.0f;
private float posY = 0.0f;
private float sizeX = 100.0f;
private float sizeY = 100.0f;
private float offSet = 50.0f;
public Bug() {
spriteBatch = new SpriteBatch();
spriteSheet = new TextureAtlas("assets/data/bug.txt");
beetleBug = spriteSheet.createSprites("bug");
// dont forget to set the size of your sprites!
for(int i=0; i<beetleBug.size; i++){
beetleBug.get(i).setSize(sizeX, sizeY);
}
applyPosition();
}
public void handleInput() {
boolean leftKey = Gdx.input.isKeyPressed(Input.Keys.LEFT);
boolean rightKey = Gdx.input.isKeyPressed(Input.Keys.RIGHT);
if(rightKey) {
if(angle <= 0) {
angle = 360;
}
angle -= 2f;
applyRotation();
}
if(leftKey) {
if(angle >= 360) {
angle = 0;
}
angle += 2f;
applyRotation();
}
applyPosition();
}
private void applyPosition() {
float x = (float) Math.cos(angle);
float y = (float) Math.sin(angle);
posX = posX + x;
posY = posY + y;
for(int i=0; i<beetleBug.size; i++){
beetleBug.get(i).setPosition(posX - offSet, posY -offSet); // optional: center the sprite to screen
}
}
private void applyRotation() {
for(int i=0;i<beetleBug.size;i++) {
Sprite s = beetleBug.get(i);
s.setOrigin(s.getWidth() / 2, s.getHeight() / 2);
s.setRotation(angle);
}
}
public void render(OrthographicCamera cam) {
float dt = Gdx.graphics.getDeltaTime();
animationElapsed += dt;
while(animationElapsed > frameLength){
animationElapsed -= frameLength;
currentFrame = (currentFrame == beetleBug.size - 1) ? 0 : ++currentFrame;
}
spriteBatch.setProjectionMatrix(cam.combined);
spriteBatch.begin();
beetleBug.get(currentFrame).draw(spriteBatch);
spriteBatch.end();
}
}
Works perfectly now:
Converted degrees to radians
Set x-coordintae to -
private void applyPosition() {
float radians = (float) Math.toRadians(angle);
float x = -(float) Math.sin(radians);
float y = (float) Math.cos(radians);
posX = posX + x;
posY = posY + y;
for(int i=0; i<beetleBug.size; i++){
beetleBug.get(i).setPosition(posX - offSet, posY -offSet);
}
}
Create a normalized vector to represent the beetle's direction, then multiply by the speed. Add that vector to the beetle's current position and you've got his new position.
Create the normalized vector (i.e. has a length of 1) using your angle. vx = cos(angle), vy = sin(angle)
Multiply by your beetle's speed. vx = vx*speed, vy = vy*speed
Add it to the current position. x = x + vx, y = y + vy
Repeat
Some gotchas: Watch out that your sprite's graphical rotation and your own internal representation of rotation go the same way. Some frameworks flip which way they rotate graphics. The above [cos(angle), sin(angle)] is for an angle of zero pointing towards the positive x axis. Many implementations of cos/sin/tan use radians instead of degrees for their calculations, so convert as appropriate.
[cos angle, sin angle]is for zero to the right (positive x), counterclockwise. [-sin angle, cos angle]is for zero pointing up (positive y), counterclockwise.
This might work:
int currentX = 100; //beetleCurrentX
int currentY = 100; //beetleCurrentY
int angle = 200; //beetleAngle
int len = 2; //Step that the beetle makes (jumps 2 in this case)
int x2Pos = sin(angle)*len + currentX;
int y2Pos = cos(angle)*len + currentY;
sprite.setPosition(x2Pos,y2Pos);
If you execute this each frame you will have your beetle moving in the angles direction.

Touch Listener on a Bitmap Android (Overlay to make parts of bitmap clickable)

Am using bitmap, as rotating dial which consists of sections. Means user can able to rotate bitmap. Now i want to define a singleTap on ImageView for navigating to different sections.
We can do by motionevent x and y, but positions of sections will change dynamically. So, this is not possible.
Is there any other way to do this ?
public class TutorialActivity extends Activity {
private static Bitmap imageOriginal, imageScaled;
private static Matrix matrix;
private ImageView dialer;
private int dialerHeight, dialerWidth;
private GestureDetector detector;
// needed for detecting the inversed rotations
private boolean[] quadrantTouched;
private boolean allowRotating;
View viewNew;
Button aboutus,profile,colleagues,picasa,facebook,twitter,youtube,referral;
int one_piecewidth,one_pieceheight;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
// initialize the matrix only once
if (matrix == null) {
matrix = new Matrix();
} else {
// not needed, you can also post the matrix immediately to restore the old state
matrix.reset();
}
detector = new GestureDetector(this, new MyGestureDetector());
// there is no 0th quadrant, to keep it simple the first value gets ignored
quadrantTouched = new boolean[] { false, false, false, false, false };
allowRotating = true;
//viewNew=LayoutInflater.from(TutorialActivity.this).inflate(R.layout.new_view,null,false);
// load the image only once
if(imageOriginal==null){
//imageOriginal=loadBitmapFromView(viewNew);
imageOriginal=BitmapFactory.decodeResource(getResources(), R.drawable.circular_menu);
}
imageScaled=imageOriginal;
one_piecewidth=((int) imageOriginal.getWidth()/4);
one_pieceheight=((int) imageOriginal.getHeight()/4);
dialer = (ImageView) findViewById(R.id.imageView_ring);
dialer.setImageBitmap(imageOriginal);
dialer.setOnTouchListener(new MyOnTouchListener());
dialer.getViewTreeObserver().addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
// method called more than once, but the values only need to be initialized one time
if (dialerHeight == 0 || dialerWidth == 0) {
dialerHeight = dialer.getHeight();
dialerWidth = dialer.getWidth();
// resize
Matrix resize = new Matrix();
resize.postScale((float)Math.min(dialerWidth, dialerHeight) / (float)imageOriginal.getWidth(), (float)Math.min(dialerWidth, dialerHeight) / (float)imageOriginal.getHeight());
imageScaled = Bitmap.createBitmap(imageOriginal, 0, 0, imageOriginal.getWidth(), imageOriginal.getHeight(), resize, false);
// translate to the image view's center
float translateX = dialerWidth / 2 - imageScaled.getWidth() / 2;
float translateY = dialerHeight / 2 - imageScaled.getHeight() / 2;
matrix.postTranslate(translateX, translateY);
dialer.setImageBitmap(imageScaled);
dialer.setImageMatrix(matrix);
}
}
});
}
/**
* Rotate the dialer.
*
* #param degrees The degrees, the dialer should get rotated.
*/
private void rotateDialer(float degrees) {
matrix.postRotate(degrees, dialerWidth / 2, dialerHeight / 2);
dialer.setImageMatrix(matrix);
//viewNew.po
}
/**
* #return The angle of the unit circle with the image view's center
*/
private double getAngle(double xTouch, double yTouch) {
double x = xTouch - (dialerWidth / 2d);
double y = dialerHeight - yTouch - (dialerHeight / 2d);
switch (getQuadrant(x, y)) {
case 1:
return Math.asin(y / Math.hypot(x, y)) * 180 / Math.PI;
case 2:
case 3:
return 180 - (Math.asin(y / Math.hypot(x, y)) * 180 / Math.PI);
case 4:
return 360 + Math.asin(y / Math.hypot(x, y)) * 180 / Math.PI;
default:
// ignore, does not happen
return 0;
}
}
/**
* #return The selected quadrant.
*/
private static int getQuadrant(double x, double y) {
if (x >= 0) {
return y >= 0 ? 1 : 4;
} else {
return y >= 0 ? 2 : 3;
}
}
/**
* Simple implementation of an {#link OnTouchListener} for registering the dialer's touch events.
*/
private class MyOnTouchListener implements OnTouchListener {
private double startAngle;
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// reset the touched quadrants
for (int i = 0; i < quadrantTouched.length; i++) {
quadrantTouched[i] = false;
}
allowRotating = false;
startAngle = getAngle(event.getX(), event.getY());
Drawable drawable = dialer.getDrawable();
Rect imageBounds = drawable.getBounds();
//original height and width of the bitmap
int intrinsicHeight = drawable.getIntrinsicHeight();
int intrinsicWidth = drawable.getIntrinsicWidth();
//height and width of the visible (scaled) image
int scaledHeight = imageBounds.height();
int scaledWidth = imageBounds.width();
//Find the ratio of the original image to the scaled image
//Should normally be equal unless a disproportionate scaling
//(e.g. fitXY) is used.
float heightRatio = intrinsicHeight / scaledHeight;
float widthRatio = intrinsicWidth / scaledWidth;
//do whatever magic to get your touch point
//MotionEvent event;
//get the distance from the left and top of the image bounds
int scaledImageOffsetX = (int) (event.getX() - imageBounds.left);
int scaledImageOffsetY = (int) (event.getY() - imageBounds.top);
//scale these distances according to the ratio of your scaling
//For example, if the original image is 1.5x the size of the scaled
//image, and your offset is (10, 20), your original image offset
//values should be (15, 30).
int originalImageOffsetX = (int) (scaledImageOffsetX * widthRatio);
int originalImageOffsetY = (int) (scaledImageOffsetY * heightRatio);
break;
case MotionEvent.ACTION_MOVE:
double currentAngle = getAngle(event.getX(), event.getY());
if((float) (startAngle - currentAngle)>1.0)
rotateDialer((float) (startAngle - currentAngle));
startAngle = currentAngle;
break;
case MotionEvent.ACTION_UP:
allowRotating = true;
break;
}
// set the touched quadrant to true
quadrantTouched[getQuadrant(event.getX() - (dialerWidth / 2), dialerHeight - event.getY() - (dialerHeight / 2))] = true;
detector.onTouchEvent(event);
return true;
}
}
/**
* Simple implementation of a {#link SimpleOnGestureListener} for detecting a fling event.
*/
private class MyGestureDetector extends SimpleOnGestureListener {
#Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
// get the quadrant of the start and the end of the fling
int q1 = getQuadrant(e1.getX() - (dialerWidth / 2), dialerHeight - e1.getY() - (dialerHeight / 2));
int q2 = getQuadrant(e2.getX() - (dialerWidth / 2), dialerHeight - e2.getY() - (dialerHeight / 2));
System.out.println("q1 and q2"+q1+".."+q2);
// the inversed rotations
if ((q1 == 2 && q2 == 2 && Math.abs(velocityX) < Math.abs(velocityY))
|| (q1 == 3 && q2 == 3)
|| (q1 == 1 && q2 == 3)
|| (q1 == 4 && q2 == 4 && Math.abs(velocityX) > Math.abs(velocityY))
|| ((q1 == 2 && q2 == 3) || (q1 == 3 && q2 == 2))
|| ((q1 == 3 && q2 == 4) || (q1 == 4 && q2 == 3))
|| (q1 == 2 && q2 == 4 && quadrantTouched[3])
|| (q1 == 4 && q2 == 2 && quadrantTouched[3])) {
dialer.post(new FlingRunnable(-1 * (velocityX + velocityY)));
} else {
// the normal rotation
dialer.post(new FlingRunnable(velocityX + velocityY));
}
return true;
}
#Override
public boolean onSingleTapUp(MotionEvent e){
int pixel = imageScaled.getPixel((int)(e.getX()),(int)(e.getY()));
System.out.println("pixel"+pixel+"...");
return true;
}
}
/**
* A {#link Runnable} for animating the the dialer's fling.
*/
private class FlingRunnable implements Runnable {
private float velocity;
public FlingRunnable(float velocity) {
this.velocity = velocity;
}
#Override
public void run() {
if (Math.abs(velocity) > 5 && allowRotating) {
rotateDialer(velocity / 75);
velocity /= 1.0666F;
// post this instance again
dialer.post(this);
}
}
}
public static Bitmap loadBitmapFromView(View v) {
System.out.println("vvv"+v+"....");
System.out.println("vvvparams"+v.getMeasuredHeight()+"....");
// System.out.println("vvvparamswidth"+v.getLayoutParams().height+"....");
Bitmap b = Bitmap.createBitmap( 300, 300, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
v.layout(0, 0, 300, 300);
v.draw(c);
return b;
}
public Bitmap addView(View v) {
v.setDrawingCacheEnabled(true);
// this is the important code :)
// Without it the view will have a dimension of 0,0 and the bitmap will be null
System.out.println("mesaure spec"+MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
v.measure(MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED),
MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
v.layout(0, 0, v.getMeasuredWidth(), v.getMeasuredHeight());
v.buildDrawingCache(true);
Bitmap b = Bitmap.createBitmap(v.getDrawingCache());
v.setDrawingCacheEnabled(false);
return b;// clear drawing cache
}
public void onResume(){
super.onResume();
}
}
Add this code on touch event:
{
case MotionEvent.ACTION_DOWN:
start = e.getEventTime();
break;
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_UP:
stop = e.getEventTime();
break;
}
//delay or use thread
if (stop - start > 1500) {
//your code
}
return true;

Categories

Resources