Using Google's MapView v2 in Live Wallpaper - android

I have been trying to resolve this for a while now. I would like to use a MapView v2 with my Live wallpaper, so far my results are a black screen with a google logo on the bottom.
My manifest has required permissions:
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.INTERNET" />
and meta-data:
<meta-data
android:name="com.google.android.maps.v2.API_KEY"
android:value="#string/google_maps_key" />
<meta-data
android:name="com.google.android.gms.version"
android:value="#integer/google_play_services_version" />
And I also tried changing hardware acceleration as suggested in some other MapView related questions:
android:hardwareAccelerated="false"
Also I can confirm that the maps Api key is indeed correct, since I have a MapFragment that renders properly (not in the wallaper service but in a test activity):
WallpaperService.Engine code:
private class ENGINE extends Engine implements OnMapReadyCallback,
GoogleMap.SnapshotReadyCallback {
private final String LOG_TAG = ENGINE.class.getSimpleName();
private int width = 0;
private int height = 0;
private SurfaceHolder holder;
private boolean visible;
private Bitmap bitmap;
private final Handler handler = new Handler();
private final Runnable drawRunner = new Runnable() {
#Override
public void run() {
draw();
}
};
MapView mapView;
GoogleMap map;
private static final int FPS = 5000;
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
this.holder = surfaceHolder;
this.width = getDesiredMinimumWidth();
this.height = getDesiredMinimumHeight();
int margin = 100;
Rect rect = new Rect();
rect.set(margin, margin, width, height);
mapView = new MapView(getApplicationContext());
mapView.onCreate(new Bundle()); // could this be the problem?
// since onCreate(null) has exact same result
int widthSpec = View.MeasureSpec.makeMeasureSpec(rect.width(), View.MeasureSpec.EXACTLY);
int heightSpec = View.MeasureSpec.makeMeasureSpec(rect.height(), View.MeasureSpec.EXACTLY);
mapView.measure(widthSpec, heightSpec);
mapView.layout(0, 0, rect.width(), rect.height());
Log.d(LOG_TAG, "Width: " + mapView.getMeasuredWidth());
Log.d(LOG_TAG, "Height: " + mapView.getMeasuredHeight());
mapView.onResume();
try {
MapsInitializer.initialize(getApplicationContext());
} catch (Exception e) {
e.printStackTrace();
}
mapView.getMapAsync(this);
}
public ENGINE() {
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawRunner);
mapView.onResume();
} else {
handler.removeCallbacks(drawRunner);
mapView.onPause();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mapView.onDestroy();
handler.removeCallbacks(drawRunner);
}
private void draw() {
final Canvas canvas = holder.lockCanvas();
try {
if (canvas != null) {
if (mapView != null) {
mapView.draw(canvas);
Log.d(LOG_TAG, "Drawing Map view on the canvas");
} else {
Paint paint = new Paint();
paint.setColor(getColor(R.color.colorAccent));
paint.setStyle(Paint.Style.FILL);
canvas.drawPaint(paint);
}
}
} finally {
if (canvas != null)
holder.unlockCanvasAndPost(canvas);
}
handler.removeCallbacks(drawRunner);
if (visible) {
handler.postDelayed(drawRunner, FPS);
}
}
#Override
public void onSnapshotReady(Bitmap bitmap) {
// never reaches here
this.bitmap = bitmap;
Log.d(LOG_TAG, "Bitmap ready");
}
#Override
public void onMapReady(GoogleMap googleMap) {
this.map = googleMap;
//this.map.setMyLocationEnabled(true);
this.map.getUiSettings().setZoomControlsEnabled(true);
this.map.getUiSettings().setMyLocationButtonEnabled(true);
this.map.getUiSettings().setCompassEnabled(true);
this.map.getUiSettings().setRotateGesturesEnabled(false);
this.map.getUiSettings().setZoomGesturesEnabled(false);
Log.d(LOG_TAG, "Map is ready");
double latitude = 56.875300;
double longitude = -76.783658;
LatLng marker = new LatLng(latitude, longitude);
this.map.addMarker(new MarkerOptions().position(marker).title("Marker Title"));
this.map.snapshot(this);
// I tried using map's snapshot to render it, however this callback never returns a value
}
}
Logcat does not show any errors nor any hints on what might be the issue
07-14 10:26:39.213 3167-3167/removed.for.privacy I/Google Maps
Android API: Google Play services package version: 11055440
07-14 10:26:39.398 3167-3195/removed.for.privacy D/OpenGLRenderer:
endAllActiveAnimators on 0x75f689e800 (RippleDrawable) with handle
0x75f65bede0
07-14 10:27:02.772 3167-3167/removed.for.privacy I/Google Maps
Android API: Google Play services package version: 11055440
07-14 10:27:02.838 3167-3167/removed.for.privacy
D/ENGINE : Width: 980
07-14 10:27:02.838 3167-3167/removed.for.privacy
D/ENGINE : Height: 1820
07-14 10:27:02.867 3167-3167/removed.for.privacy
D/ENGINE : Map is ready
07-14 10:27:02.885 3167-3195/removed.for.privacy D/OpenGLRenderer:
endAllActiveAnimators on 0x75f689ec00 (RippleDrawable) with handle
0x75e6820020
07-14 10:27:02.900 3167-3167/removed.for.privacy
D/ENGINE : Drawing Map view on the canvas
07-14 10:27:07.955 3167-3167/removed.for.privacy
D/ENGINE : Drawing Map view on the canvas
I understand that WallpaperService is not designed to be used with regular UI elements, however it is possible to use them and I successfully did it with TextViews and such. Is it a limitation placed on the MapView purposefully and I am beating a dead horse? Thank you

The lite mode was the only way I could make the map become visible.
mapView = new MapView(context, new GoogleMapOptions().liteMode(true));
However tilt, zoom and rotate are not possible.

Anyway, You can use Google Static Maps API to get the wallpaper picture (e.g. like in this request: https://maps.googleapis.com/maps/api/staticmap?center=0,0&zoom=1&scale=1&size=300x500&maptype=roadmap&format=png&visual_refresh=true), then, just show it on wallpaper (and reload and update screen by timer or action, if You need).

If you are trying to take a picture of your map you can use the following method. By defining the variable "path" you can save the picture and reuse it elsewhere.
Alternative: you can save the Bitmap internally and reuse it also.
public void savePhoto(){
mMap.snapshot(new GoogleMap.SnapshotReadyCallback() {
#Override
public void onSnapshotReady(Bitmap bitmap) {
FileOutputStream out = null;
try {
out = new FileOutputStream(new File(path), true);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
}
catch (Exception e) {e.printStackTrace();}
finally {
try {if (out != null) {out.close();}}
catch (IOException e) {e.printStackTrace();}
saveForm(isArea, datum, path);
}
}
});
}
Its very important to call this method from the method OnMapLoadedCallback like:
mMap.setOnMapLoadedCallback(new GoogleMap.OnMapLoadedCallback() {
#Override
public void onMapLoaded() {savePhoto();}
});

Related

OpenGL ES: Rotate the complete scene to match portrait mode

Note Im new to Android and OpenGL
Im building an Augmented Reality App based on ARToolKitX (Github: https://github.com/artoolkitx/artoolkitx/tree/8c6bd4e7be5e80c8439066b23473506aebbb496c/Source/ARXJ/ARXJProj/arxj/src/main/java/org/artoolkitx/arx/arxj).
The application shows the camera frame and displays objects with opengl on top.
My Problem:
ARToolKitX forces the app to be in landscape mode:
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
but when I change the screen orientation to SCREEN_ORIENTATION_PORTRAIT, the camera image and the opengl objects dont rotate to the correct orientation and stay in landscape mode.
Inside the ARRenderer I can use the drawVideoSettings method to rotate the camera image by itself, but that doesnt apply to the opengl objects.
ARToolKitX also provides a SurfaceChanged method inside the CameraSurface class, with the comment: "This is where [...] to create transformation matrix to scale and then rotate surface view, if the app is going to handle orientation changes."
But I dont have any idea, how the transformation matrix has too look like and how to apply it.
Any help is appreciated.
ARRenderer:
public abstract class ARRenderer implements GLSurfaceView.Renderer {
private MyShaderProgram shaderProgram;
private int width, height, cameraIndex;
private int[] viewport = new int[4];
private boolean firstRun = true;
private final static String TAG = ARRenderer.class.getName();
/**
* Allows subclasses to load markers and prepare the scene. This is called after
* initialisation is complete.
*/
public boolean configureARScene() {
return true;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
// Transparent background
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.f);
this.shaderProgram = new MyShaderProgram(new MyVertexShader(), new MyFragmentShader());
GLES20.glUseProgram(shaderProgram.getShaderProgramHandle());
}
public void onSurfaceChanged(GL10 unused, int w, int h) {
this.width = w;
this.height = h;
if(ARController.getInstance().isRunning()) {
//Update the frame settings for native rendering
ARController.getInstance().drawVideoSettings(cameraIndex, w, h, false, false, false, ARX_jni.ARW_H_ALIGN_CENTRE, ARX_jni.ARW_V_ALIGN_CENTRE, ARX_jni.ARW_SCALE_MODE_FILL, viewport);
}
}
public void onDrawFrame(GL10 unused) {
if (ARController.getInstance().isRunning()) {
// Initialize artoolkitX video background rendering.
if (firstRun) {
boolean isDisplayFrameInited = ARController.getInstance().drawVideoInit(cameraIndex);
if (!isDisplayFrameInited) {
Log.e(TAG, "Display Frame not inited");
}
if (!ARController.getInstance().drawVideoSettings(cameraIndex, this.width, this.height, false, false,
false, ARX_jni.ARW_H_ALIGN_CENTRE, ARX_jni.ARW_V_ALIGN_CENTRE,
ARX_jni.ARW_SCALE_MODE_FILL, viewport)) {
Log.e(TAG, "Error during call of displayFrameSettings.");
} else {
Log.i(TAG, "Viewport {" + viewport[0] + ", " + viewport[1] + ", " + viewport[2] + ", " + viewport[3] + "}.");
}
firstRun = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
if (!ARController.getInstance().drawVideoSettings(cameraIndex)) {
Log.e(TAG, "Error during call of displayFrame.");
}
draw();
}
}
/**
* Should be overridden in subclasses and used to perform rendering.
*/
public void draw() {
GLES20.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
//TODO: Check how to refactor near and far plane
shaderProgram.setProjectionMatrix(ARController.getInstance().getProjectionMatrix(10.0f, 10000.0f));
float[] camPosition = {1f, 1f, 1f};
shaderProgram.render(camPosition);
}
#SuppressWarnings("unused")
public ShaderProgram getShaderProgram() {
return shaderProgram;
}
public void setCameraIndex(int cameraIndex) {
this.cameraIndex = cameraIndex;
}
}
CameraSurface
class CameraSurfaceImpl implements CameraSurface {
/**
* Android logging tag for this class.
*/
private static final String TAG = CameraSurfaceImpl.class.getSimpleName();
private CameraDevice mCameraDevice;
private ImageReader mImageReader;
private Size mImageReaderVideoSize;
private final Context mAppContext;
private final CameraDevice.StateCallback mCamera2DeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera2DeviceInstance) {
mCameraDevice = camera2DeviceInstance;
startCaptureAndForwardFramesSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera2DeviceInstance) {
camera2DeviceInstance.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice camera2DeviceInstance, int error) {
camera2DeviceInstance.close();
mCameraDevice = null;
}
};
/**
* Listener to inform of camera related events: start, frame, and stop.
*/
private final CameraEventListener mCameraEventListener;
/**
* Tracks if SurfaceView instance was created.
*/
private boolean mImageReaderCreated;
public CameraSurfaceImpl(CameraEventListener cameraEventListener, Context appContext){
this.mCameraEventListener = cameraEventListener;
this.mAppContext = appContext;
}
private final ImageReader.OnImageAvailableListener mImageAvailableAndProcessHandler = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader)
{
Image imageInstance = reader.acquireLatestImage();
if (imageInstance == null) {
//Note: This seems to happen quite often.
Log.v(TAG, "onImageAvailable(): unable to acquire new image");
return;
}
// Get a ByteBuffer for each plane.
final Image.Plane[] imagePlanes = imageInstance.getPlanes();
final int imagePlaneCount = Math.min(4, imagePlanes.length); // We can handle up to 4 planes max.
final ByteBuffer[] imageBuffers = new ByteBuffer[imagePlaneCount];
final int[] imageBufferPixelStrides = new int[imagePlaneCount];
final int[] imageBufferRowStrides = new int[imagePlaneCount];
for (int i = 0; i < imagePlaneCount; i++) {
imageBuffers[i] = imagePlanes[i].getBuffer();
// For ImageFormat.YUV_420_888 the order of planes in the array returned by Image.getPlanes()
// is guaranteed such that plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).
// The Y-plane is guaranteed not to be interleaved with the U/V planes (in particular, pixel stride is
// always 1 in yPlane.getPixelStride()). The U/V planes are guaranteed to have the same row stride and
// pixel stride (in particular, uPlane.getRowStride() == vPlane.getRowStride() and uPlane.getPixelStride() == vPlane.getPixelStride(); ).
imageBufferPixelStrides[i] = imagePlanes[i].getPixelStride();
imageBufferRowStrides[i] = imagePlanes[i].getRowStride();
}
if (mCameraEventListener != null) {
mCameraEventListener.cameraStreamFrame(imageBuffers, imageBufferPixelStrides, imageBufferRowStrides);
}
imageInstance.close();
}
};
#Override
public void surfaceCreated() {
Log.i(TAG, "surfaceCreated(): called");
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mAppContext);
int defaultCameraIndexId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraIndex","string", mAppContext.getPackageName());
mCamera2DeviceID = Integer.parseInt(prefs.getString("pref_cameraIndex", mAppContext.getResources().getString(defaultCameraIndexId)));
Log.i(TAG, "surfaceCreated(): will attempt to open camera \"" + mCamera2DeviceID +
"\", set orientation, set preview surface");
/*
Set the resolution from the settings as size for the glView. Because the video stream capture
is requested based on this size.
WARNING: While coding the preferences are taken from the res/xml/preferences.xml!!!
When building for Unity the actual used preferences are taken from the UnityARPlayer project!!!
*/
int defaultCameraValueId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraResolution","string",mAppContext.getPackageName());
String camResolution = prefs.getString("pref_cameraResolution", mAppContext.getResources().getString(defaultCameraValueId));
String[] dims = camResolution.split("x", 2);
mImageReaderVideoSize = new Size(Integer.parseInt(dims[0]),Integer.parseInt(dims[1]));
// Note that maxImages should be at least 2 for acquireLatestImage() to be any different than acquireNextImage() -
// discarding all-but-the-newest Image requires temporarily acquiring two Images at once. Or more generally,
// calling acquireLatestImage() with less than two images of margin, that is (maxImages - currentAcquiredImages < 2)
// will not discard as expected.
mImageReader = ImageReader.newInstance(mImageReaderVideoSize.getWidth(),mImageReaderVideoSize.getHeight(), ImageFormat.YUV_420_888, /* The maximum number of images the user will want to access simultaneously:*/ 2 );
mImageReader.setOnImageAvailableListener(mImageAvailableAndProcessHandler, null);
mImageReaderCreated = true;
} // end: public void surfaceCreated(SurfaceHolder holder)
/* Interface implemented by this SurfaceView subclass
holder: SurfaceHolder instance associated with SurfaceView instance that changed
format: pixel format of the surface
width: of the SurfaceView instance
height: of the SurfaceView instance
*/
#Override
public void surfaceChanged() {
Log.i(TAG, "surfaceChanged(): called");
// This is where to calculate the optimal size of the display and set the aspect ratio
// of the surface view (probably the service holder). Also where to Create transformation
// matrix to scale and then rotate surface view, if the app is going to handle orientation
// changes.
if (!mImageReaderCreated) {
surfaceCreated();
}
if (!isCamera2DeviceOpen()) {
openCamera2(mCamera2DeviceID);
}
if (isCamera2DeviceOpen() && (null == mYUV_CaptureAndSendSession)) {
startCaptureAndForwardFramesSession();
}
}
private void openCamera2(int camera2DeviceID) {
Log.i(TAG, "openCamera2(): called");
CameraManager camera2DeviceMgr = (CameraManager)mAppContext.getSystemService(Context.CAMERA_SERVICE);
try {
if (PackageManager.PERMISSION_GRANTED == ContextCompat.checkSelfPermission(mAppContext, Manifest.permission.CAMERA)) {
camera2DeviceMgr.openCamera(Integer.toString(camera2DeviceID), mCamera2DeviceStateCallback, null);
return;
}
} catch (CameraAccessException ex) {
Log.e(TAG, "openCamera2(): CameraAccessException caught, " + ex.getMessage());
} catch (Exception ex) {
Log.e(TAG, "openCamera2(): exception caught, " + ex.getMessage());
}
if (null == camera2DeviceMgr) {
Log.e(TAG, "openCamera2(): Camera2 DeviceMgr not set");
}
Log.e(TAG, "openCamera2(): abnormal exit");
}
private int mCamera2DeviceID = -1;
private CaptureRequest.Builder mCaptureRequestBuilder;
private CameraCaptureSession mYUV_CaptureAndSendSession;
private void startCaptureAndForwardFramesSession() {
if ((null == mCameraDevice) || (!mImageReaderCreated) /*|| (null == mPreviewSize)*/) {
return;
}
closeYUV_CaptureAndForwardSession();
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
List<Surface> surfaces = new ArrayList<>();
Surface surfaceInstance;
surfaceInstance = mImageReader.getSurface();
surfaces.add(surfaceInstance);
mCaptureRequestBuilder.addTarget(surfaceInstance);
mCameraDevice.createCaptureSession(
surfaces, // Output surfaces
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
if (mCameraEventListener != null) {
mCameraEventListener.cameraStreamStarted(mImageReaderVideoSize.getWidth(), mImageReaderVideoSize.getHeight(), "YUV_420_888", mCamera2DeviceID, false);
}
mYUV_CaptureAndSendSession = session;
// Session to repeat request to update passed in camSensorSurface
mYUV_CaptureAndSendSession.setRepeatingRequest(mCaptureRequestBuilder.build(), /* CameraCaptureSession.CaptureCallback cameraEventListener: */null, /* Background thread: */ null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Toast.makeText(mAppContext, "Unable to setup camera sensor capture session", Toast.LENGTH_SHORT).show();
}
}, // Callback for capture session state updates
null); // Secondary thread message queue
} catch (CameraAccessException ex) {
ex.printStackTrace();
}
}
#Override
public void closeCameraDevice() {
closeYUV_CaptureAndForwardSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
if (mCameraEventListener != null) {
mCameraEventListener.cameraStreamStopped();
}
mImageReaderCreated = false;
}
private void closeYUV_CaptureAndForwardSession() {
if (mYUV_CaptureAndSendSession != null) {
mYUV_CaptureAndSendSession.close();
mYUV_CaptureAndSendSession = null;
}
}
/**
* Indicates whether or not camera2 device instance is available, opened, enabled.
*/
#Override
public boolean isCamera2DeviceOpen() {
return (null != mCameraDevice);
}
#Override
public boolean isImageReaderCreated() {
return mImageReaderCreated;
}
}
Edit:
/**
* Override the draw function from ARRenderer.
*/
#Override
public void draw() {
super.draw();
fpsCounter.frame();
if(maxfps<fpsCounter.getFPS()){
maxfps= fpsCounter.getFPS();
}
logger.log(Level.INFO, "FPS: " + maxfps);
// Initialize GL
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glFrontFace(GLES20.GL_CCW);
// Look for trackables, and draw on each found one.
for (int trackableUID : trackables.keySet()) {
// If the trackable is visible, apply its transformation, and render the object
float[] modelViewMatrix = new float[16];
if (ARController.getInstance().queryTrackableVisibilityAndTransformation(trackableUID, modelViewMatrix)) {
float[] projectionMatrix = ARController.getInstance().getProjectionMatrix(10.0f, 10000.0f);
trackables.get(trackableUID).draw(projectionMatrix, modelViewMatrix);
}
}
}

Using WallpaperService to set wallpaper based on selected frequency by user

I am using WallpaperService to set the live wallpaper on homescreen. I am able to set 10 pics from resources and rotate them,
based on static frequency using thread. I want user to give the option of selecting multiple frequency. I tried to pass the thread sleep duration with user selected frequency. But the thread which is running is neither stopping or updating the values.
Its updating wallpaper with the previous selected frequency. Im stuck with this app from 3 weeks at various levels. I have tried wallpaper manager and setting bitmap with services, alarm manager but all are stopping to set the wallpaper after certain period of time. So finally using Wallpaper service. Help to overcome from this issue.
class WallpaperEngine extends Engine {
//frequency selected by user to update the wallpaper
private final int Wallpaper_DURATION = utils.getFrequency;
private int[] mImagesArray;
private int mImagesArrayIndex = 0;
private Thread mDrawWallpaper;
public WallpaperEngine() {
customWallpaperHelper = new CustomWallpaperHelper(getApplicationContext(), getResources());
mImagesArray = new int[] {R.drawable.one,R.drawable.two,R.drawable.three,R.drawable.four,R.drawable.five};
mDrawWallpaper = new Thread(new Runnable() {
#Override
public void run() {
try {
while (true) {
drawFrame();
incrementCounter();
Thread.sleep(Wallpaper_DURATION);
}
} catch (Exception e) {
//
}
}
});
mDrawWallpaper.start();
}
private void incrementCounter() {
mImagesArrayIndex++;
if (mImagesArrayIndex >= mImagesArray.length) {
mImagesArrayIndex = 0;
}
}
private void drawFrame() {
final SurfaceHolder holder = getSurfaceHolder();
Canvas canvas = null;
try {
canvas = holder.lockCanvas();
if (canvas != null) {
drawImage(canvas);
}
} finally {
if (canvas != null) {
holder.unlockCanvasAndPost(canvas);
}
}
}
private void drawImage(Canvas canvas)
{
Bitmap image = BitmapFactory.decodeResource(getResources(),
mImagesArray[mImagesArrayIndex]);
Bitmap b=Bitmap.createScaledBitmap(image, canvas.getWidth(), canvas.getHeight(), true);
canvas.drawBitmap(b, 0,0, null);
}

Android Google Maps Update Camera Before Snapshot Out of Order

I am trying to snapshot the Google Map image AFTER I set the map to the bounds of the Polyline. I have used separate code samples found on StackExchange to do both the moveToBounds() and Snapshot which work fine individually, but when run in sequence the Snapshot is of the map image BEFORE the map was updated. I assume I need to insert a OnCameraChangeListener but I cannot make it work. Do I need to somehow nest Callbacks? Please advise.
public void mapCapture() {
moveToBounds(gpsTrackingPolyline);
mMap.snapshot(new GoogleMap.SnapshotReadyCallback() {
public void onSnapshotReady(Bitmap bitmap) {
// Write image to disk
try {
File bmpFile = new File(getApplicationContext().getExternalFilesDir(null), DEFAULT_BMP_FILENAME);
FileOutputStream out = new FileOutputStream(bmpFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
private void moveToBounds(Polyline p)
{
LatLngBounds.Builder builder = new LatLngBounds.Builder();
List<LatLng> arr = p.getPoints();
for(int i = 0; i < arr.size();i++){
builder.include(arr.get(i));
}
LatLngBounds bounds = builder.build();
int padding = 40; // offset from edges of the map in pixels
CameraUpdate cu = CameraUpdateFactory.newLatLngBounds(bounds, padding);
mMap.animateCamera(cu);
}
Don't know if you have found solution or if you still need it. But still here it goes. Yes you need a callback which will let you know when map has finished animation. Do something like this:
mMap.animateCamera(cu, new GoogleMap.CancelableCallback() {
#Override
public void onFinish() {
//your code related to snapshot
mMap.snapshot(new GoogleMap.SnapshotReadyCallback() {
public void onSnapshotReady(Bitmap bitmap) {
// Write image to disk
//rest of your code
});
}
#Override
public void onCancel() {
// TODO Auto-generated method stub
}
});

How to take google Maps v2 snapshot?

i have to solve this with the new "snapshot maker" which is implemented in the google maps release august but i dont' know how to do this.
Can somone give me a simple example?
here is my code:
public class MainActivity extends Activity {
static LatLng HAMBURG = new LatLng(47.524749, 21.632745);
GoogleMap map;
File dbFile;
private File imageFile;
#SuppressLint("NewApi")
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
PolylineOptions line = new PolylineOptions();
map = ((MapFragment) getFragmentManager().findFragmentById(R.id.map))
.getMap();
/*
* Adatbázis
*/
try {
dbFile = getDatabasePath("/mnt/sdcard/Download/TeleSensors.db");
} catch (Exception e) {
}
SQLiteDatabase myDataBase = SQLiteDatabase.openDatabase(
dbFile.getAbsolutePath(), null, SQLiteDatabase.OPEN_READONLY);
Cursor curTAB = myDataBase.rawQuery("SELECT * FROM GPS_Values;", null);
Integer count = 0;
while (curTAB.moveToNext()) {
String s_latitude = curTAB.getString(1);
String s_longitude = curTAB.getString(2);
count++;
double latitude = Double.parseDouble(s_latitude);
double longitude = Double.parseDouble(s_longitude);
line.add(new LatLng(latitude, longitude));
Log.i("Coordinates",
s_latitude.toString() + " --- " + s_longitude.toString());
}
curTAB.close();
myDataBase.close();
// adatbázis vége
map.addPolyline(line);
// map.setMapType(GoogleMap.MAP_TYPE_HYBRID);
// map.setMapType(GoogleMap.MAP_TYPE_NONE);
map.setMapType(GoogleMap.MAP_TYPE_NORMAL);
// map.setMapType(GoogleMap.MAP_TYPE_SATELLITE);
// map.setMapType(GoogleMap.MAP_TYPE_TERRAIN);
// Move the camera instantly to hamburg with a zoom of 15.
map.moveCamera(CameraUpdateFactory.newLatLngZoom(HAMBURG, 15));
// Zoom in, animating the camera.
map.animateCamera(CameraUpdateFactory.zoomTo(15), 2000, null);
}
}
Thank you very mouch!
You have to call the Google maps snapshot method in a button listener because if you should take it too early, it will give you error bitmap width has to be larger than 0 or something like this.
Here is the code
private void button_listener() {
Button button = (Button) findViewById(R.id.button1);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
SnapshotReadyCallback callback = new SnapshotReadyCallback() {
Bitmap bitmap;
#Override
public void onSnapshotReady(Bitmap snapshot) {
bitmap = snapshot;
try {
FileOutputStream out = new FileOutputStream("/mnt/sdcard/Download/TeleSensors.png");
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
} catch (Exception e) {
e.printStackTrace();
}
}
};
map.snapshot(callback);
}
});
}
This one is better, it waits for your Map to be fully rendered, before taking the snapshot.
It was updated on 31-Oct-2013.
mMap.setOnMapLoadedCallback(new GoogleMap.OnMapLoadedCallback() {
public void onMapLoaded() {
mMap.snapshot(new GoogleMap.SnapshotReadyCallback() {
public void onSnapshotReady(Bitmap bitmap) {
// Write image to disk
FileOutputStream out = new FileOutputStream("/mnt/sdcard/map.png");
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
}
});
}
});
Extracted from http://googlegeodevelopers.blogspot.sg/2013/10/ghost-markers-in-your-neighborhood-new.html
Try for Kotlin Android like this when click on button to take google map snapshot:
val snapshotReadyCallback : GoogleMap.SnapshotReadyCallback = GoogleMap.SnapshotReadyCallback { selectedScreenShot ->
ivMapPreview.setImageBitmap(selectedScreenShot);
}
val onMapLoadedCallback : GoogleMap.OnMapLoadedCallback = GoogleMap.OnMapLoadedCallback {
mMap!!.snapshot(snapshotReadyCallback)
}
mMap!!.setOnMapLoadedCallback(onMapLoadedCallback)
Be successful.
I' ve tried the accepted answer but it didn't work then tried another approach that worked for me.
private void CaptureScreen() {
if(initMap()){
SnapshotReadyCallback callback = new SnapshotReadyCallback() {
Bitmap bitmap=null;
#Override
public void onSnapshotReady(Bitmap snapshot) {
// TODO Auto-generated method stub
bitmap = snapshot;
try {
saveImage(bitmap);
Toast.makeText(getApplicationContext(), "Image Saved", Toast.LENGTH_LONG).show();
} catch (Exception e) {
e.printStackTrace();
}
}
private void saveImage(Bitmap bitmap) throws IOException{
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 40, bytes);
File f = new File(Environment.getExternalStorageDirectory() + File.separator + "test.png");
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
}
};
mMap.snapshot(callback);
}
else{
Toast.makeText(this, "Map is not Initialized yet", Toast.LENGTH_LONG).show();
return ;
}
}
https://developers.google.com/maps/documentation/android/reference/com/google/android/gms/maps/GoogleMap#snapshot(com.google.android.gms.maps.GoogleMap.SnapshotReadyCallback, android.graphics.Bitmap)
public final void snapshot (GoogleMap.SnapshotReadyCallback callback)
Takes a snapshot of the map. You can use snapshots within your application when an interactive map would be difficult, or impossible,
to use. For example, images produced with the snapshot() method can be
used to display a thumbnail of the map in your app, or a snapshot in
the notification center.

Capture screen shot of GoogleMap Android API V2

Final Update
The feature request has been fulfilled by Google. Please see this answer below.
Original Question
Using the old version of the Google Maps Android API, I was able to capture a screenshot of the google map to share via social media. I used the following code to capture the screenshot and save the image to a file and it worked great:
public String captureScreen()
{
String storageState = Environment.getExternalStorageState();
Log.d("StorageState", "Storage state is: " + storageState);
// image naming and path to include sd card appending name you choose for file
String mPath = this.getFilesDir().getAbsolutePath();
// create bitmap screen capture
Bitmap bitmap;
View v1 = this.mapView.getRootView();
v1.setDrawingCacheEnabled(true);
bitmap = Bitmap.createBitmap(v1.getDrawingCache());
v1.setDrawingCacheEnabled(false);
OutputStream fout = null;
String filePath = System.currentTimeMillis() + ".jpeg";
try
{
fout = openFileOutput(filePath,
MODE_WORLD_READABLE);
// Write the string to the file
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fout);
fout.flush();
fout.close();
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "FileNotFoundException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
catch (IOException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "IOException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
return filePath;
}
However, the new GoogleMap object used by V2 of the api does not have a "getRootView()" method like MapView does.
I tried to do this:
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.basicMap);
View v1 = mapFragment.getView();
But the screenshot that I get does not have any map content and looks like this:
Has anyone figured out how to take a screenshot of the new Google Maps Android API V2?
Update
I also tried to get the rootView this way:
View v1 = getWindow().getDecorView().getRootView();
This results in a screenshot that includes the action bar at the top of the screen, but the map is still blank like the screenshot I attached.
Update
A feature request has been submitted to Google. Please go star the feature request if this is something you want google to add in the future: Add screenshot ability to Google Maps API V2
Update - Google has added a snapshot method**!:
The feature request for a method to take a screen shot of the Android Google Map API V2 OpenGL layer has been fulfilled.
To take a screenshot, simply implement the following interface:
public abstract void onSnapshotReady (Bitmap snapshot)
and call:
public final void snapshot (GoogleMap.SnapshotReadyCallback callback)
Example that takes a screenshot, then presents the standard "Image Sharing" options:
public void captureScreen()
{
SnapshotReadyCallback callback = new SnapshotReadyCallback()
{
#Override
public void onSnapshotReady(Bitmap snapshot)
{
// TODO Auto-generated method stub
bitmap = snapshot;
OutputStream fout = null;
String filePath = System.currentTimeMillis() + ".jpeg";
try
{
fout = openFileOutput(filePath,
MODE_WORLD_READABLE);
// Write the string to the file
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fout);
fout.flush();
fout.close();
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "FileNotFoundException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
catch (IOException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "IOException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
openShareImageDialog(filePath);
}
};
mMap.snapshot(callback);
}
Once the image is finished being captured, it will trigger the standard "Share Image" dialog so the user can pick how they'd like to share it:
public void openShareImageDialog(String filePath)
{
File file = this.getFileStreamPath(filePath);
if(!filePath.equals(""))
{
final ContentValues values = new ContentValues(2);
values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
values.put(MediaStore.Images.Media.DATA, file.getAbsolutePath());
final Uri contentUriFile = getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
final Intent intent = new Intent(android.content.Intent.ACTION_SEND);
intent.setType("image/jpeg");
intent.putExtra(android.content.Intent.EXTRA_STREAM, contentUriFile);
startActivity(Intent.createChooser(intent, "Share Image"));
}
else
{
//This is a custom class I use to show dialogs...simply replace this with whatever you want to show an error message, Toast, etc.
DialogUtilities.showOkDialogWithText(this, R.string.shareImageFailed);
}
}
Documentation is here
Below are the steps to capture screen shot of Google Map V2 with example
Step 1. open Android Sdk Manager (Window > Android Sdk Manager) then Expand Extras now update/install Google Play Services to Revision 10 ignore this step if already installed
Read Notes here https://developers.google.com/maps/documentation/android/releases#august_2013
Step 2. Restart Eclipse
Step 3. import com.google.android.gms.maps.GoogleMap.SnapshotReadyCallback;
Step 4. Make Method to Capture/Store Screen/image of Map like below
public void CaptureMapScreen()
{
SnapshotReadyCallback callback = new SnapshotReadyCallback() {
Bitmap bitmap;
#Override
public void onSnapshotReady(Bitmap snapshot) {
// TODO Auto-generated method stub
bitmap = snapshot;
try {
FileOutputStream out = new FileOutputStream("/mnt/sdcard/"
+ "MyMapScreen" + System.currentTimeMillis()
+ ".png");
// above "/mnt ..... png" => is a storage path (where image will be stored) + name of image you can customize as per your Requirement
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
} catch (Exception e) {
e.printStackTrace();
}
}
};
myMap.snapshot(callback);
// myMap is object of GoogleMap +> GoogleMap myMap;
// which is initialized in onCreate() =>
// myMap = ((SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map_pass_home_call)).getMap();
}
Step 5. Now call this CaptureMapScreen() method where you want to capture the image
in my case i am calling this method on Button click in my onCreate() which is working fine
like:
Button btnCap = (Button) findViewById(R.id.btnTakeScreenshot);
btnCap.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
try {
CaptureMapScreen();
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
});
Check Doc here and here
I capctured Map screenshot.It will be helpful
private GoogleMap map;
private static LatLng latLong;
`
public void onMapReady(GoogleMap googleMap) {
map = googleMap;
setMap(this.map);
animateCamera();
map.moveCamera (CameraUpdateFactory.newLatLng (latLong));
map.setOnMapLoadedCallback (new GoogleMap.OnMapLoadedCallback () {
#Override
public void onMapLoaded() {
snapShot();
}
});
}
`
snapShot() method for taking screenshot of map
public void snapShot(){
GoogleMap.SnapshotReadyCallback callback=new GoogleMap.SnapshotReadyCallback () {
Bitmap bitmap;
#Override
public void onSnapshotReady(Bitmap snapshot) {
bitmap=snapshot;
try{
file=new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),"map.png");
FileOutputStream fout=new FileOutputStream (file);
bitmap.compress (Bitmap.CompressFormat.PNG,90,fout);
Toast.makeText (PastValuations.this, "Capture", Toast.LENGTH_SHORT).show ();
}catch (Exception e){
e.printStackTrace ();
Toast.makeText (PastValuations.this, "Not Capture", Toast.LENGTH_SHORT).show ();
}
}
};map.snapshot (callback);
}
My output is below
Edit: this answer is no longer valid - the feature request for screenshots on Google Maps Android API V2 has been fulfilled. See this answer for an example.
Original Accepted Answer
Since the new Android API v2 Maps are displayed using OpenGL, there are no possibilities to create a screenshot.
Since the top voted answer doesnt work with polylines and other overlays on top of the map fragment (What I was looking for), I want to share this solution.
public void captureScreen()
{
GoogleMap.SnapshotReadyCallback callback = new GoogleMap.SnapshotReadyCallback()
{
#Override
public void onSnapshotReady(Bitmap snapshot) {
try {
getWindow().getDecorView().findViewById(android.R.id.content).setDrawingCacheEnabled(true);
Bitmap backBitmap = getWindow().getDecorView().findViewById(android.R.id.content).getDrawingCache();
Bitmap bmOverlay = Bitmap.createBitmap(
backBitmap.getWidth(), backBitmap.getHeight(),
backBitmap.getConfig());
Canvas canvas = new Canvas(bmOverlay);
canvas.drawBitmap(snapshot, new Matrix(), null);
canvas.drawBitmap(backBitmap, 0, 0, null);
OutputStream fout = null;
String filePath = System.currentTimeMillis() + ".jpeg";
try
{
fout = openFileOutput(filePath,
MODE_WORLD_READABLE);
// Write the string to the file
bmOverlay.compress(Bitmap.CompressFormat.JPEG, 90, fout);
fout.flush();
fout.close();
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "FileNotFoundException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
catch (IOException e)
{
// TODO Auto-generated catch block
Log.d("ImageCapture", "IOException");
Log.d("ImageCapture", e.getMessage());
filePath = "";
}
openShareImageDialog(filePath);
} catch (Exception e) {
e.printStackTrace();
}
}
};
;
map.snapshot(callback);
}
private GoogleMap mMap;
SupportMapFragment mapFragment;
LinearLayout linearLayout;
String jobId="1";
File file;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
setContentView (R.layout.activity_maps);
linearLayout=(LinearLayout)findViewById (R.id.linearlayout);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
mapFragment = (SupportMapFragment)getSupportFragmentManager ()
.findFragmentById (R.id.map);
mapFragment.getMapAsync (this);
//Taking Snapshot of Google Map
}
/**
* Manipulates the map once available.
* This callback is triggered when the map is ready to be used.
* This is where we can add markers or lines, add listeners or move the camera. In this case,
* we just add a marker near Sydney, Australia.
* If Google Play services is not installed on the device, the user will be prompted to install
* it inside the SupportMapFragment. This method will only be triggered once the user has
* installed Google Play services and returned to the app.
*/
#Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
// Add a marker in Sydney and move the camera
LatLng sydney = new LatLng (-26.888033, 75.802754);
mMap.addMarker (new MarkerOptions ().position (sydney).title ("Kailash Tower"));
mMap.moveCamera (CameraUpdateFactory.newLatLng (sydney));
mMap.setOnMapLoadedCallback (new GoogleMap.OnMapLoadedCallback () {
#Override
public void onMapLoaded() {
snapShot();
}
});
}
// Initializing Snapshot Method
public void snapShot(){
GoogleMap.SnapshotReadyCallback callback=new GoogleMap.SnapshotReadyCallback () {
Bitmap bitmap;
#Override
public void onSnapshotReady(Bitmap snapshot) {
bitmap=snapshot;
bitmap=getBitmapFromView(linearLayout);
try{
file=new File (getExternalCacheDir (),"map.png");
FileOutputStream fout=new FileOutputStream (file);
bitmap.compress (Bitmap.CompressFormat.PNG,90,fout);
Toast.makeText (MapsActivity.this, "Capture", Toast.LENGTH_SHORT).show ();
sendSceenShot (file);
}catch (Exception e){
e.printStackTrace ();
Toast.makeText (MapsActivity.this, "Not Capture", Toast.LENGTH_SHORT).show ();
}
}
};mMap.snapshot (callback);
}
private Bitmap getBitmapFromView(View view) {
Bitmap returnedBitmap = Bitmap.createBitmap(view.getWidth(), view.getHeight(),Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas (returnedBitmap);
Drawable bgDrawable =view.getBackground();
if (bgDrawable!=null) {
//has background drawable, then draw it on the canvas
bgDrawable.draw(canvas);
} else{
//does not have background drawable, then draw white background on the canvas
canvas.drawColor(Color.WHITE);
}
view.draw(canvas);
return returnedBitmap;
}
//Implementing Api using Retrofit
private void sendSceenShot(File file) {
RequestBody job=null;
Gson gson = new GsonBuilder ()
.setLenient ()
.create ();
Retrofit retrofit = new Retrofit.Builder ()
.baseUrl (BaseUrl.url)
.addConverterFactory (GsonConverterFactory.create (gson))
.build ();
final RequestBody requestBody = RequestBody.create (MediaType.parse ("image/*"),file);
job=RequestBody.create (MediaType.parse ("text"),jobId);
MultipartBody.Part fileToUpload = MultipartBody.Part.createFormData ("name",file.getName (), requestBody);
API service = retrofit.create (API.class);
Call<ScreenCapture_Pojo> call=service.sendScreen (job,fileToUpload);
call.enqueue (new Callback<ScreenCapture_Pojo> () {
#Override
public void onResponse(Call <ScreenCapture_Pojo> call, Response<ScreenCapture_Pojo> response) {
if (response.body ().getMessage ().equalsIgnoreCase ("Success")){
Toast.makeText (MapsActivity.this, "success", Toast.LENGTH_SHORT).show ();
}
}
#Override
public void onFailure(Call <ScreenCapture_Pojo> call, Throwable t) {
}
});
}
}
I hope this would help to capture the screenshot of your map
Method call:
gmap.setOnMapLoadedCallback(mapLoadedCallback);
Method declaration:
final SnapshotReadyCallback snapReadyCallback = new SnapshotReadyCallback() {
Bitmap bitmap;
#Override
public void onSnapshotReady(Bitmap snapshot) {
bitmap = snapshot;
try {
//do something with your snapshot
imageview.setImageBitmap(bitmap);
} catch (Exception e) {
e.printStackTrace();
}
}
};
GoogleMap.OnMapLoadedCallback mapLoadedCallback = new GoogleMap.OnMapLoadedCallback() {
#Override
public void onMapLoaded() {
gmap.snapshot(snapReadyCallback);
}
};
Eclipse DDMS can capture the screen even it's google map V2.
Try to call /system/bin/screencap or /system/bin/screenshot if you have the "root". I learned that from How Eclipse android DDMS implement "screen capture"

Categories

Resources