I am trying to create the setting panel at the info window such that we can use the slider inside the info window to adjust something. When it comes to the implementation and execution , I have found that the info window is always gaining the onclick focus. There are n NO any responses by clicking the sliders inside the info window. Woudl you please tell me how to make slider controls to be focused when the user clicks and drag the info windows
?
The below is my code :
package com.larry.proto.maptest;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BitmapFactory.Options;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorMatrix;
import android.graphics.ColorMatrixColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Shader.TileMode;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.Environment;
import android.os.StrictMode;
import android.os.Vibrator;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.SeekBar.OnSeekBarChangeListener;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.GoogleMap.InfoWindowAdapter;
import com.google.android.gms.maps.GoogleMap.OnInfoWindowClickListener;
import com.google.android.gms.maps.GoogleMap.OnMapLongClickListener;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.gms.maps.model.Polyline;
import com.google.android.gms.maps.model.PolylineOptions;
public class MainActivity extends FragmentActivity implements LocationListener {
final int RQS_GooglePlayServices = 1;
private GoogleMap myMap;
Location myLocation;
LocationManager lm;
private ArrayList<Checkpoint> cpList = new ArrayList<Checkpoint> ();
private ArrayList<Marker> thisMarkerList = new ArrayList<Marker> ();
private List<LatLng> ppoints = new ArrayList<LatLng> ();
private int requestCode;
private LatLng globalPoint;
private String infoDesp;
private String infoLat;
private String infoLong;
private Marker mSelectedMarker;
private boolean mRefreshingInfoWindow;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
SupportMapFragment sFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
myMap = sFragment.getMap();
myMap.setMyLocationEnabled(true);
myMap.setMapType(GoogleMap.MAP_TYPE_SATELLITE);
lm = (LocationManager) getSystemService (LOCATION_SERVICE);
String provider = lm.getBestProvider(new Criteria(), true);
Location loc = lm.getLastKnownLocation(provider);
if(loc!=null)
{
onLocationChanged(loc);
}
SharedPreferences prefs = getApplicationContext().getSharedPreferences("MyPref", 0);
String restoredLat = prefs.getString("lat", null);
String restoredLng = prefs.getString("lng", null);
String restoredText = prefs.getString("title", null);
myMap.setOnMapLongClickListener(setting());
myMap.getUiSettings().setRotateGesturesEnabled(false);
if(null!=restoredText){
double lat = null!=restoredLat ? Double.parseDouble(restoredLat) : null;
double lng = null!= restoredLng ? Double.parseDouble(restoredLng) : null;
LatLng posi = new LatLng(lat, lng);
myMap.addMarker(new MarkerOptions().position(posi).title(restoredText)
.snippet(String.valueOf(restoredLat) + "," +
String.valueOf(restoredLng))
.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_ORANGE)));
CameraPosition cameraPosition = new CameraPosition.Builder().target(posi).zoom(14).build();
myMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition));
}
}
private OnMapLongClickListener setting() {
// TODO Auto-generated method stub
return new OnMapLongClickListener(){
#Override
public void onMapLongClick(LatLng point) {
// TODO Auto-generated method stub
globalPoint = point;
requestCode = 0;
Intent mIntent = new Intent();
mIntent.setClass(MainActivity.this, SliderActivity.class);
startActivityForResult(mIntent, requestCode);
}};
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
protected void onResume() {
super.onResume();
int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(getApplicationContext());
if (resultCode == ConnectionResult.SUCCESS) {
Toast.makeText(getApplicationContext(),
"isGooglePlayServicesAvailable SUCCESS", Toast.LENGTH_LONG)
.show();
} else {
GooglePlayServicesUtil.getErrorDialog(resultCode, this,
RQS_GooglePlayServices);
}
}
#Override
public void onLocationChanged(Location location) {
// TODO Auto-generated method stub
LatLng latlng = new LatLng(location.getLatitude() , location.getLongitude());
myMap.moveCamera(CameraUpdateFactory.newLatLng(latlng));
myMap.animateCamera(CameraUpdateFactory.zoomTo(11));
}
private void addMarkersToMap() {
myMap.clear();
ppoints.clear();
LatLng ll;
for (int i = 0; i < cpList.size(); i++) {
ll = new LatLng(cpList.get(i).getPoint().latitude, cpList.get(i).getPoint().longitude);
ppoints.add(ll);
}
int length = ppoints.size();
LatLng first = null ;
if(length >2){
for( int i = 0 ; i < ppoints.size() -1 ; i++){
first = ppoints.get(0);
LatLng pt = ppoints.get(i);
LatLng ptNext = ppoints.get(i+1);
DrawArrowHead(myMap, pt, ptNext , Color.RED , cpList.get(i).getDesp() , i+1 , true);
createDashedLine(myMap, pt, ptNext , Color.RED);
}
List<LatLng> current = ppoints.subList(ppoints.size()-2, ppoints.size());
for( int i = 0 ; i < current.size() -1 ; i++){
first = ppoints.get(0);
LatLng pt = current.get(i);
LatLng ptNext = current.get(i+1);
DrawArrowHead(myMap, pt, ptNext , Color.BLUE , cpList.get(i).getDesp() ,i+1 , false);
createDashedLine(myMap, pt, ptNext , Color.BLUE);
}
myMap.addMarker(new MarkerOptions().position(new LatLng(first.latitude ,
first.longitude )).title("Starting Point")
.snippet(String.valueOf(first.latitude) + "," +
String.valueOf(first.longitude))
.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_RED)));
}else if( length ==1){
for( int i = 0 ; i < ppoints.size() ; i++){
myMap.addMarker(new MarkerOptions().position(new LatLng(ppoints.get(i).latitude ,
ppoints.get(i).longitude )).title("Starting Point")
.snippet(String.valueOf(ppoints.get(i).latitude) + "," +
String.valueOf(ppoints.get(i).longitude))
.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_RED)));
}
}
else{
myMap.addPolyline(new PolylineOptions().width(5).color(Color.BLUE).addAll(ppoints));
for( int i = 0 ; i < ppoints.size() -1 ; i++){
first = ppoints.get(0);
LatLng pt = ppoints.get(i);
LatLng ptNext = ppoints.get(i+1);
DrawArrowHead(myMap, pt, ptNext , Color.BLUE , cpList.get(i).getDesp() , i+1 , true);
createDashedLine(myMap, pt, ptNext , Color.BLUE);
}
myMap.addMarker(new MarkerOptions().position(new LatLng(first.latitude ,
first.longitude )).title("Starting Point")
.snippet(String.valueOf(first.latitude) + "," +
String.valueOf(first.longitude))
.icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_RED)));
}
}
private final double degreesPerRadian = 180.0 / Math.PI;
private void DrawArrowHead(GoogleMap mMap, LatLng from, LatLng to , int color, String desp , int number , boolean boolR){
double bearing = GetBearing(from, to);
double adjBearing = Math.round(bearing / 3) * 3;
while (adjBearing >= 120) {
adjBearing -= 120;
}
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
URL url;
Bitmap image = null;
try {
String name = "dir_" + String.valueOf((int)adjBearing) + ".png";
String link = "http://www.google.com/intl/en_ALL/mapfiles/" + name;
Log.d("ling k" , link);
url = new URL(link);
try {
String imageName = link.substring(link.lastIndexOf("/"), link.length());
File file = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + imageName );
if(!file.exists()){
startService(new Intent(MainActivity.this , ImageIntentService.class).putExtra("urlpath", link));
Options mBitmapOptions = new BitmapFactory.Options();
mBitmapOptions.inPreferredConfig = Bitmap.Config.ARGB_4444;
mBitmapOptions.inDither = true;
mBitmapOptions.inPurgeable = true;
mBitmapOptions.inInputShareable = true;
Rect rec = new Rect();
rec.set(-1, -1, -1, -1);
image = BitmapFactory.decodeStream(url.openConnection().getInputStream() , rec , mBitmapOptions);
image = getResizedBitmap(image , image.getHeight()*2 , image.getWidth()*2);
}else{
Options mBitmapOptions = new BitmapFactory.Options();
mBitmapOptions.inPreferredConfig = Bitmap.Config.ARGB_4444;
mBitmapOptions.inDither = true;
mBitmapOptions.inPurgeable = true;
mBitmapOptions.inInputShareable = true;
Rect rec = new Rect();
rec.set(-1, -1, -1, -1);
try {
image = BitmapFactory.decodeStream(new FileInputStream(file), null, mBitmapOptions);
image = getResizedBitmap(image , image.getHeight()*2 , image.getWidth()*2);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (image != null){
float anchorX = 0.5f;
float anchorY = 0.5f;
int offsetX = 0;
int offsetY = 0;
int width = image.getWidth();
int height = image.getHeight();
if (bearing >= 292.5 && bearing < 335.5){
offsetX = 24;
offsetY = 24;
offsetX = width;
offsetY = height;
}
else if (bearing >= 247.5 && bearing < 292.5){
offsetX = 24;
offsetY = 12;
offsetX = width;
offsetY = height/2;
}
else if (bearing >= 202.5 && bearing < 247.5){
offsetX = 24;
offsetY = 0;
offsetX = width;
offsetY = 0;
}
else if (bearing >= 157.5 && bearing < 202.5){
offsetX = 12;
offsetY = 0;
offsetX = width/2;
offsetY = 0;
}
else if (bearing >= 112.5 && bearing < 157.5){
offsetX = 0;
offsetY = 0;
}
else if (bearing >= 67.5 && bearing < 112.5){
offsetX = 0;
offsetY = 12;
offsetX = 0;
offsetY = height/2;
}
else if (bearing >= 22.5 && bearing < 67.5){
offsetX = 0;
offsetY = 24;
offsetX = 0;
offsetY = height;
}
else {
offsetX = 12;
offsetY = 24;
offsetX = width/2;
offsetY = height;
}
Bitmap wideBmp;
Canvas wideBmpCanvas;
Rect src, dest;
wideBmp = Bitmap.createBitmap(image.getWidth() *2, image.getHeight() * 2, image.getConfig());
wideBmpCanvas = new Canvas(wideBmp);
src = new Rect(0, 0, image.getWidth(), image.getHeight());
dest = new Rect(src);
dest.offset(offsetX, offsetY);
Paint maskedPaint = new Paint();
int r = Color.red(color);
int g = Color.green(color);
int b = Color.blue(color);
ColorMatrix cm = new ColorMatrix();
cm.set(new float[] {
1, 0, 0, 0, r,
0, 1, 0, 0, g,
0, 0, 1, 0, b,
0, 0, 0, 1, 0 }); // last line is antialias
maskedPaint.setColorFilter(new ColorMatrixColorFilter(cm));
maskedPaint.setShader(new BitmapShader(image, TileMode.REPEAT, TileMode.REPEAT));
// color
wideBmpCanvas.drawBitmap(image, src, dest, maskedPaint);
if(boolR == true){
Paint stroke = new Paint();
stroke.setColor(Color.YELLOW);
stroke.setAntiAlias(false);
stroke.setStrokeWidth(8);
stroke.setTextSize(60);
wideBmpCanvas.drawText(String.valueOf(number),
(float)wideBmpCanvas.getWidth()/2, (float)wideBmpCanvas.getHeight()/2, stroke);
}
infoDesp = desp;
infoLat =String.valueOf(to.latitude);
infoLong = String.valueOf(to.longitude);
mMap.setInfoWindowAdapter(new CustomInfoAdapter());
mMap.addMarker(new MarkerOptions()
.position(to).title(desp)
.snippet(String.valueOf(to.latitude) + "," + String.valueOf(to.longitude))
.icon(BitmapDescriptorFactory.fromBitmap(wideBmp))
.anchor(anchorX, anchorY));
}
}
class CustomInfoAdapter implements InfoWindowAdapter{
#Override
public View getInfoContents(Marker arg0) {
View marker = ((LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE)).
inflate(R.layout.custom_marker_layout, null);
if (!mRefreshingInfoWindow) {
mSelectedMarker = arg0;
TextView numTxt = (TextView) marker.findViewById(R.id.num_txt);
TextView lat1 = (TextView) marker.findViewById(R.id.textView1);
TextView long1 = (TextView) marker.findViewById(R.id.textView2);
SeekBar sk = (SeekBar) marker.findViewById(R.id.seekBar1);
numTxt.setText(arg0.getTitle());
lat1.setText(String.valueOf(arg0.getPosition().latitude));
long1.setText(String.valueOf(arg0.getPosition().longitude));
sk.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
#Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
#Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
#Override
public void onProgressChanged(SeekBar seekBar, int progress,boolean fromUser) {
if(progress==25){
Vibrator v = (Vibrator) MainActivity.this.getSystemService(Context.VIBRATOR_SERVICE);
v.vibrate(500);
}
}
});
sk.requestFocus();
}else{
refreshInfoWindow();
}
return marker;
}
#Override
public View getInfoWindow(Marker arg0) {
return null;
}
private void refreshInfoWindow() {
if (mSelectedMarker == null) {
return;
}
mRefreshingInfoWindow = true;
mSelectedMarker.showInfoWindow();
mRefreshingInfoWindow = false;
}
}
private double GetBearing(LatLng from, LatLng to){
double lat1 = from.latitude * Math.PI / 180.0;
double lon1 = from.longitude * Math.PI / 180.0;
double lat2 = to.latitude * Math.PI / 180.0;
double lon2 = to.longitude * Math.PI / 180.0;
double angle = - Math.atan2( Math.sin( lon1 - lon2 ) * Math.cos( lat2 ),
Math.cos( lat1 ) * Math.sin( lat2 ) - Math.sin( lat1 ) * Math.cos( lat2 ) * Math.cos( lon1 - lon2 ) );
if (angle < 0.0)
angle += Math.PI * 2.0;
angle = angle * degreesPerRadian;
return angle;
}
public Bitmap getResizedBitmap(Bitmap bm, int newHeight, int newWidth) {
int width = bm.getWidth();
int height = bm.getHeight();
float scaleWidth = ((float) newWidth) / width;
float scaleHeight = ((float) newHeight) / height;
Matrix matrix = new Matrix();
matrix.postScale(scaleWidth, scaleHeight);
Bitmap resizedBitmap = Bitmap.createBitmap(bm, 0, 0, width, height, matrix, false);
return resizedBitmap;
}
public void writeToFile(Checkpoint cp , String vertical , String circle , File f){
OutputStreamWriter outStreamWriter = null;
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(f, true);
outStreamWriter = new OutputStreamWriter(outStream);
outStreamWriter.write(new char[]{});
outStreamWriter.append(cp.getDesp());
outStreamWriter.append('\n');
outStreamWriter.append(String.valueOf(cp.getPoint().latitude));
outStreamWriter.append('\n');
outStreamWriter.append(String.valueOf(cp.getPoint().longitude));
outStreamWriter.append('\n');
outStreamWriter.append("the vertical slider value is :" + vertical);
outStreamWriter.append('\n');
outStreamWriter.append("the circle slider value is :" + circle);
outStreamWriter.append('\n');
outStreamWriter.flush();
outStreamWriter.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void createDashedLine(GoogleMap map, LatLng latLngOrig, LatLng latLngDest, int color){
double difLat = latLngDest.latitude - latLngOrig.latitude;
double difLng = latLngDest.longitude - latLngOrig.longitude;
double zoom = map.getCameraPosition().zoom;
double divLat = difLat / (zoom * 2);
double divLng = difLng / (zoom * 2);
LatLng tmpLatOri = latLngOrig;
for(int i = 0; i < (zoom * 2); i++){
LatLng loopLatLng = tmpLatOri;
if(i > 0){
loopLatLng = new LatLng(tmpLatOri.latitude + (divLat * 0.25f), tmpLatOri.longitude + (divLng * 0.25f));
}
Polyline polyline = map.addPolyline(new PolylineOptions()
.add(loopLatLng).add(new LatLng(tmpLatOri.latitude + divLat, tmpLatOri.longitude + divLng))
.color(color).width(5f));
tmpLatOri = new LatLng(tmpLatOri.latitude + divLat, tmpLatOri.longitude + divLng);
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(requestCode ==0){
if(resultCode ==RESULT_OK){
String change01 = data.getStringExtra("change01");
String change02 = data.getStringExtra("change02");
Toast.makeText(MainActivity.this , change01 + " " + change02 , Toast.LENGTH_SHORT).show();
Checkpoint cp = new Checkpoint("Destination", globalPoint, 0);
Log.d("lati long" , String.valueOf(globalPoint.latitude) + ":" + String.valueOf(globalPoint.longitude));
cpList.add(cp);
File dir = new File( Environment.getExternalStorageDirectory().getAbsolutePath() +
File.separator + "NOVAX" );
if(!dir.exists()){
dir.mkdirs();
}
Checkpoint lastlyAdded = cpList.get(cpList.size() -1 );
writeToFile(lastlyAdded , change01 , change02 , new File(dir.getAbsolutePath()+ File.separator + "text.txt" ));
SharedPreferences pref = getApplicationContext().getSharedPreferences("MyPref", 0); // 0 - for private mode
SharedPreferences.Editor editor = pref.edit();
editor.clear();
editor.putString("title", lastlyAdded.getDesp());
editor.putString("lat", String.valueOf(lastlyAdded.getPoint().latitude));
editor.putString("lng", String.valueOf(lastlyAdded.getPoint().longitude));
editor.commit();
for(int i = 0 ; i < cpList.size() ; i ++){
Checkpoint cPoint = cpList.get(i);
if(!cPoint.getPoint().equals(lastlyAdded.getPoint())){
cPoint.setState(1);
cPoint.setDesp("Checkpoint "+String.valueOf(i+1) );
}
}
addMarkersToMap();
}
}
}
}
According to Google's documentation, custom info windows are not real windows, but instead a rendered image inserted into the map view. The only items in a google map that can gain focus seem to be the markers themselves. The info windows an take a onClickListener though. Here's the relevant info. Look at the note in the Custom Info Windows section in their docs.
Note: The info window that is drawn is not a live view. The view is rendered as an image (using View.draw(Canvas)) at the time it is returned. This means that any subsequent changes to the view will not be reflected by the info window on the map. To update the info window later (for example, after an image has loaded), call showInfoWindow(). Furthermore, the info window will not respect any of the interactivity typical for a normal view such as touch or gesture events. However you can listen to a generic click event on the whole info window as described in the section below.
There may be a way to use a FrameLayout and a view overtop, acting like a custom info window, but that's more of a hack than a supported solution.
Related
I am trying to following the google-vision face-tracker sample listed in here. I would like to know how to get the number of face detected by the application and how to save a frame to the phone memory, is it possible from the application ?
Yes, you can use the following code:
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.media.FaceDetector;
import android.util.SparseArray;
import com.google.android.gms.vision.Frame;
import com.piisoft.upecfacerecognition.utility.Image;
import java.io.File;
import java.util.List;
public class extractFacesFromImage {
public extractFacesFromImage(String imagePath , String OutPutPath , Context context){
File folder = new File(OutPutPath);
if(!folder.exists()){
folder.mkdirs();
}
detectFacesInImage(imagePath , OutPutPath, context);
}
private void detectFacesInImage(String imagePath , String OutPutPath){
//ImageWindow[] imageWindow = null;
Bitmap image = Image.bitmapFromJpg(imagePath);
FaceDetector.Face[] faces = detectFaces(image);
for(FaceDetector.Face fs:faces)
{
if(fs == null){continue;}
PointF midPoint=new PointF();
fs.getMidPoint(midPoint);
float eyeDistance=fs.eyesDistance();
int left = (int)(midPoint.x - (float)(1.4 * eyeDistance));
int top = (int)(midPoint.y - (float)(1.8 * eyeDistance));
Bitmap bmFace = Bitmap.createBitmap(image, (int) left, (int) top, (int) (2.8 * eyeDistance), (int) (3.6 * eyeDistance));
Bitmap bmp= bmFace.createBitmap(bmFace.getWidth(), bmFace.getHeight(), Bitmap.Config.ARGB_8888);
Image.saveBitmapToJpg(bmp,OutPutPath, "face_" + System.currentTimeMillis() +".jpg" ,256,256);
//ImageWindow Iw = new ImageWindow(fs.)
}
//return imageWindow;
}
private void detectFacesInImage(String imagePath , String OutPutPath, Context context){
//ImageWindow[] imageWindow = null;
Bitmap image = Image.bitmapFromJpg(imagePath);
if(image == null){
return;
}
SparseArray<com.google.android.gms.vision.face.Face> faces = detectFaces(image,context);
for (int i = 0; i < faces.size(); ++i) {
com.google.android.gms.vision.face.Face face = faces.valueAt(i);
if(face == null){continue;}
try {
Bitmap bmFace = Bitmap.createBitmap(image, (int) face.getPosition().x, (int) face.getPosition().y, (int) face.getWidth(), (int) face.getHeight());
Image.saveBitmapToJpg(bmFace, OutPutPath, "face_" + System.currentTimeMillis() + ".jpg",256);
}
catch (Exception e){
e.printStackTrace();
}
}
new File(imagePath).delete();
}
private SparseArray<com.google.android.gms.vision.face.Face> detectFaces(Bitmap image , Context context) {
int h = image.getHeight();
int w = image.getWidth();
int max = 10;
Frame frame = new Frame.Builder().setBitmap(image).build();
com.google.android.gms.vision.face.FaceDetector detector = new com.google.android.gms.vision.face.FaceDetector.Builder(context)
.setTrackingEnabled(false)
.setLandmarkType(com.google.android.gms.vision.face.FaceDetector.ALL_LANDMARKS)
.build();
SparseArray<com.google.android.gms.vision.face.Face> faces = detector.detect(frame);
detector.release();
return faces;
}
private FaceDetector.Face[] detectFaces(Bitmap image ) {
int h = image.getHeight();
int w = image.getWidth();
int max = 10;
FaceDetector detector = new FaceDetector(w, h, max);
FaceDetector.Face[] faces = new FaceDetector.Face[max];
int facesFound = detector.findFaces(image, faces);
return faces;
}
}
I'm using Color Blob detector Sample code of OpenCV (Sample Code), but app is crashing at Core.multiply(contour, new Scalar(4,4), contour); giving some fatal error
libc: Fatal signal 11 (SIGSEGV), code 1, fault addr 0x0 in tid 11016 (Thread-4857)
and detailed Log is available to get idea of exact reason. In some other part of code I was using Core.add(fg, bg, markers); and app was giving same fatal error there as well. What is the issue? Please guide towards the solution. I 'm using openCV(3.1)
Code is as following:
package com.iu.kamraapp.utils;
import android.util.Log;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Cache
Mat mPyrDownMat = new Mat();
Mat mHsvMat = new Mat();
Mat mMask = new Mat();
Mat mDilatedMask = new Mat();
Mat mHierarchy = new Mat();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum() {
return mSpectrum;
}
public void setMinContourArea(double area) {
mMinContourArea = area;
}
public void process(Mat rgbaImage) {
try {
Imgproc.pyrDown(rgbaImage, mPyrDownMat);
Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
Imgproc.dilate(mMask, mDilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
Core.multiply(contour, new Scalar(4, 4), contour); // issue is here
mContours.add(contour);
}
}
}
catch (Exception e) { e.printStackTrace(); }
}
public List<MatOfPoint> getContours() {
return mContours;
}
}
package com.iu.kamraapp;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import com.iu.kamraapp.utils.AppGlobal;
import com.iu.kamraapp.utils.ColorBlobDetector;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
import java.util.List;
public class ColorBlobActivity extends AppCompatActivity implements View.OnTouchListener,CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
Context context;
int screenWidth, screenHeight;
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
setContentView(R.layout.activity_main);
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
screenWidth = AppGlobal.getScreenResolution(context, false);
screenHeight = AppGlobal.getScreenResolution(context, true);
} else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
OpenCVLoader.initDebug(true);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
try {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int) event.getX() - xOffset;
int y = (int) event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x > 4) ? x - 4 : 0;
touchedRect.y = (y > 4) ? y - 4 : 0;
touchedRect.width = (x + 4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y + 4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width * touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
} catch (Exception e) { e.printStackTrace(); }
return false; // don't need subsequent touch events
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.d(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
I wants to add cropping fature in my ANDROID app. I know there are deafault cropping feature available in gallery but in them only rectangular or circular selection of area is possible. I wants to selected any part of image with free hand and them crop the selected part of image from original image. for example selecting head part of an complete human picture and then crop it. See below what i wants.
BEFORE
AFTER
Please help me and also sugest if any free lib is there.
Thanks
Here is the library I used once:
Android widget for cropping and rotating an image.
To add the Cropper to your application, specify com.edmodo.cropper.CropImageView in your layout XML
<com.edmodo.cropper.CropImageView
xmlns:custom="http://schemas.android.com/apk/res-auto"
android:id="#+id/CropImageView"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
Or you can modify attributes programmatically.
See the WIKI here.
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Path;
import android.graphics.Point;
import android.os.Build.VERSION;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.ScaleGestureDetector.SimpleOnScaleGestureListener;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewGroup.LayoutParams;
import java.util.ArrayList;
import java.util.List;
public class HandsCropView extends View implements OnTouchListener {
static Bitmap bitmap;
public static List<Point> points;
int C_H_Point;
int C_W_Point;
int DIST = 2;
int D_height;
int D_width;
boolean NutralButton = false;
boolean bfirstpoint = false;
int canvasHeight;
int canvasWidth;
boolean flgPathDraw = true;
Bitmap img;
int img_height;
int img_width;
LayoutParams layoutParams;
Context mContext;
private ScaleGestureDetector mScaleDetector;
private float mScaleFactor = 1.0f;
Point mfirstpoint = null;
Point mlastpoint = null;
private Paint paint;
Paint tectcolor = new Paint();
private class ScaleListener extends SimpleOnScaleGestureListener {
private ScaleListener() {
}
public boolean onScale(ScaleGestureDetector detector) {
HandsCropView.this.mScaleFactor = HandsCropView.this.mScaleFactor * detector.getScaleFactor();
HandsCropView.this.mScaleFactor = Math.max(0.1f, Math.min(HandsCropView.this.mScaleFactor, 5.0f));
HandsCropView.this.invalidate();
return true;
}
}
public HandsCropView(Context c, Bitmap image) {
super(c);
bitmap = image;
this.img_width = bitmap.getWidth();
this.img_height = bitmap.getHeight();
System.out.println("img_width" + this.img_width + "img_height" + this.img_height);
DisplayMetrics metrics1 = getResources().getDisplayMetrics();
this.D_width = metrics1.widthPixels;
this.D_height = metrics1.heightPixels;
if (this.img_width <= this.D_width) {
this.C_W_Point = this.D_width - this.img_width;
}
if (this.img_height <= this.D_height) {
this.C_H_Point = this.D_height - this.img_height;
}
this.mContext = c;
setFocusable(true);
setFocusableInTouchMode(true);
this.paint = new Paint(1);
this.paint.setStyle(Style.STROKE);
this.paint.setPathEffect(new DashPathEffect(new float[]{10.0f, 20.0f}, 5.0f));
this.paint.setStrokeWidth(5.0f);
this.paint.setColor(-1);
if (VERSION.SDK_INT >= 15) {
setLayerType(1, this.paint);
}
this.paint.setShadowLayer(5.5f, 6.0f, 6.0f, Integer.MIN_VALUE);
this.layoutParams = new LayoutParams(bitmap.getWidth(), bitmap.getHeight());
setOnTouchListener(this);
points = new ArrayList<>();
this.bfirstpoint = false;
this.mScaleDetector = new ScaleGestureDetector(c, new ScaleListener());
}
public HandsCropView(Context context, AttributeSet attrs) {
super(context, attrs);
this.mContext = context;
setFocusable(true);
setFocusableInTouchMode(true);
this.paint = new Paint(1);
this.paint.setStyle(Style.STROKE);
this.paint.setStrokeWidth(2.0f);
setOnTouchListener(this);
points = new ArrayList<>();
this.bfirstpoint = false;
}
public void onDraw(Canvas canvas) {
canvas.scale(this.mScaleFactor, this.mScaleFactor);
canvas.drawBitmap(bitmap, 0.0f, 0.0f, null);
Path path = new Path();
boolean first = true;
for (int i = 0; i < points.size(); i += 2) {
Point point = (Point) points.get(i);
if (first) {
first = false;
path.moveTo((float) point.x, (float) point.y);
} else if (i < points.size() - 1) {
Point next = (Point) points.get(i + 1);
path.quadTo((float) point.x, (float) point.y, (float) next.x, (float) next.y);
} else {
this.mlastpoint = (Point) points.get(i);
path.lineTo((float) point.x, (float) point.y);
}
}
canvas.drawPath(path, this.paint);
}
public boolean onTouch(View view, MotionEvent event) {
Point point = new Point();
point.x = (int) event.getX();
point.y = (int) event.getY();
if (this.flgPathDraw) {
if (this.bfirstpoint) {
if (comparepoint(this.mfirstpoint, point)) {
points.add(this.mfirstpoint);
this.flgPathDraw = false;
GetValue();
} else if (point.x <= this.img_width && point.y <= this.img_height) {
points.add(point);
}
} else if (point.x <= this.img_width && point.y <= this.img_height) {
points.add(point);
}
if (!this.bfirstpoint) {
this.mfirstpoint = point;
this.bfirstpoint = true;
}
} else {
this.mScaleDetector.onTouchEvent(event);
}
invalidate();
Log.e("Hi ==>", "Size: " + point.x + " " + point.y);
if (event.getAction() == 1) {
this.mlastpoint = point;
if (this.flgPathDraw && points.size() > 12 && !comparepoint(this.mfirstpoint, this.mlastpoint)) {
this.flgPathDraw = false;
points.add(this.mfirstpoint);
GetValue();
}
}
return true;
}
private boolean comparepoint(Point first, Point current) {
int left_range_y = current.y - 3;
int right_range_x = current.x + 3;
int right_range_y = current.y + 3;
if (current.x - 3 >= first.x || first.x >= right_range_x || left_range_y >= first.y || first.y >= right_range_y || points.size() < 10) {
return false;
}
return true;
}
public void fillinPartofPath() {
Point point = new Point();
point.x = ((Point) points.get(0)).x;
point.y = ((Point) points.get(0)).y;
points.add(point);
invalidate();
}
public void resetView() {
points.clear();
this.paint.setColor(-1);
this.paint.setStyle(Style.STROKE);
this.flgPathDraw = true;
invalidate();
}
public static boolean GetValue() {
return true;
}
public boolean getBooleanValue() {
return this.NutralButton;
}
}
I'm using Google Maps v2 to display tiles from a wms. I referred this
site. Problem in loading tiles, they are loading multiple times i dont konw y? Can any1 help me.
Here is my code
package com.example.testgooglemaps;
import android.app.Activity;
import android.os.Bundle;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.model.TileOverlayOptions;
import com.google.android.gms.maps.model.TileProvider;
public class Lanch extends Activity {
// Google Map
private GoogleMap googleMap;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_lanch);
try {
// Loading map
initilizeMap();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* function to load map. If map is not created it will create it for you
* */
private void initilizeMap() {
if (googleMap == null) {
googleMap = ((MapFragment) getFragmentManager().findFragmentById(R.id.map)).getMap();
// check if map is created successfully or not
if (googleMap != null) {
setUpMap();
}
}
}
#Override
protected void onResume() {
super.onResume();
initilizeMap();
}
private void setUpMap() {
TileProvider wmsTileProvider = TileProviderFactory.getOsgeoWmsTileProvider();
googleMap.addTileOverlay(new TileOverlayOptions().tileProvider(wmsTileProvider));
// to satellite so we can see the WMS overlay.
googleMap.setMapType(GoogleMap.MAP_TYPE_NORMAL);
}
}
TileProvider class...
package com.example.testgooglemaps;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Locale;
import android.util.Log;
public class TileProviderFactory {
public static WMSTileProvider getOsgeoWmsTileProvider() {
final String OSGEO_WMS = "http://localhost/geoserver/magnamaps/wms?service=WMS&version=1.1.0&request=GetMap&layers=magnamaps:bang_apartments&styles=&bbox=%f,%f,%f,%f&width=256&height=256&crs=EPSG:4326&format=image/png&transparent=true";
WMSTileProvider tileProvider = new WMSTileProvider(256, 256) {
#Override
public synchronized URL getTileUrl(int x, int y, int zoom) {
double[] bbox = getBoundingBox(x, y, zoom);
String s = String.format(Locale.US, OSGEO_WMS, bbox[MINX], bbox[MINY], bbox[MAXX], bbox[MAXY]);
Log.d("WMSDEMO", s);
URL url = null;
try {
url = new URL(s);
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
return url;
}
};
return tileProvider;
}
}
WMSTileProvider class...
package com.example.testgooglemaps;
import java.net.URLEncoder;
import com.google.android.gms.maps.model.UrlTileProvider;
public abstract class WMSTileProvider extends UrlTileProvider {
// Web Mercator n/w corner of the map.
private static final double[] TILE_ORIGIN = { -20037508.34789244, 20037508.34789244 };
// array indexes for that data
private static final int ORIG_X = 0;
private static final int ORIG_Y = 1; // "
// Size of square world map in meters, using WebMerc projection.
private static final double MAP_SIZE = 20037508.34789244 * 2;
// array indexes for array to hold bounding boxes.
protected static final int MINX = 0;
protected static final int MAXX = 1;
protected static final int MINY = 2;
protected static final int MAXY = 3;
// cql filters
private String cqlString = "";
// Construct with tile size in pixels, normally 256, see parent class.
public WMSTileProvider(int x, int y) {
super(x, y);
}
protected String getCql() {
return URLEncoder.encode(cqlString);
}
public void setCql(String c) {
cqlString = c;
}
// Return a web Mercator bounding box given tile x/y indexes and a zoom
// level.
protected double[] getBoundingBox(int x, int y, int zoom) {
double tileSize = MAP_SIZE / Math.pow(2, zoom);
double minx = TILE_ORIGIN[ORIG_X] + x * tileSize;
double maxx = TILE_ORIGIN[ORIG_X] + (x + 1) * tileSize;
double miny = TILE_ORIGIN[ORIG_Y] - (y + 1) * tileSize;
double maxy = TILE_ORIGIN[ORIG_Y] - y * tileSize;
double[] bbox = new double[4];
bbox[MINX] = minx;
bbox[MINY] = miny;
bbox[MAXX] = maxx;
bbox[MAXY] = maxy;
return bbox;
}
}
EDIT : While initializing map itself, zoom level is set to 3, Inside this method getTileUrl(int x, int y, int zoom)
In WMSTileProvider.getBoundingBox you are computing the bounding box in units of the Web Mercator projection, which are meters. In your OSGEO_WMS URL string, you are specifying that the bbox units are in EPSG:4326 (degrees). It's likely that the query for each tile is incorrect as a result.
See the WMS reference for bbox and srs:
bbox: Bounding box for map extent. Value is minx,miny,maxx,maxy in
units of the SRS.
Try replacing your srs value with EPSG:3857 (WebMercator)
I'm working on an image processing application that analyzes ECG graph. To do so, I need to detect certain peaks of the graph.
How can I print the "coordinates", "xcoor" and "ycoor" in the user interface? I tried toasting it but it doesn't work. I tried textView but the application force closes.
package com.thesis.results;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Handler;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import java.util.ArrayList;
import android.widget.Toast;
import com.example.edgedetection.R;
public class MainActivity extends Activity {
// initialize variables
static int i = 0;
static int bl_ = 0; // number of black; pixels in the image
static int op_ = 0;
static int Al = 0;
static int Re = 0;
static int Gr = 0;
static int Bl = 0;
static int Alp = 0;
static int Red = 0;
static int Gre = 0;
static int Blu = 0;
static int stop = 0;
static int stopx = 0;
static int stopy = 1000;
static int xcoor[];
static int ycoor[];
static int width;
static int height;
static int RRdistance;
static double voltage;
static int peakcoordinates;
ImageView imageSource, imageAfter;
Bitmap bitmap_Source;
ProgressBar progressBar;
Button process;
TextView counter;
TextView coordinates;
private Handler handler;
Bitmap afterProcess;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
process = (Button) findViewById(R.id.btn_process);
imageSource = (ImageView) findViewById(R.id.imageSource);
imageAfter = (ImageView) findViewById(R.id.imageAfter);
progressBar = (ProgressBar) findViewById(R.id.progressBar);
counter = (TextView) findViewById(R.id.counter);
coordinates = (TextView) findViewById(R.id.coordinates);
process.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
bitmap_Source = BitmapFactory.decodeResource(getResources(),
R.drawable.test_ideal_graph);
handler = new Handler();
StratBackgroundProcess();
}
});
}
private void StratBackgroundProcess() {
Runnable runnable = new Runnable() {
public void run() {
afterProcess = processingBitmap(bitmap_Source, 0, 0, 0);
handler.post(new Runnable() {
public void run() {
progressBar.setVisibility(View.GONE);
imageAfter.setImageBitmap(afterProcess);
calculatevoltage();
// calculateRRdistance();
counter.setText("" + bl_ + "# (" + stopx + "," + stopy
+ " " + "and" +
")" + " {" + width + "," + height + " } = "
+ voltage + "mV" + " " + "R-R distance:" + " "
+ RRdistance);
coordinates.setText(" " + xcoor + "," + ycoor + " " );
}
private void calculatevoltage() {
// TODO Auto-generated method stub
voltage = ((0.1 * height) * (height - stopy)) / height;
// 1.5 mV is the total voltage of the graph, 1 box =
// 0.1mV
}
//private void calculateRRdistance() {
// TODO Auto-generated method stub
// RRdistance = stopx1 - stopx;
// 1.5 mV is the total voltage of the graph, 1 box =
// 0.1mV
// }
});
}
};
new Thread(runnable).start();
}
public static Bitmap processingBitmap(Bitmap src, double red, double green,
double blue) {
// image size
width = src.getWidth();
height = src.getHeight();
// create output bitmap
Bitmap bmOut = Bitmap.createBitmap(width, height, src.getConfig());
// color information
int A, R, G, B;
int pixel;
int flag = 0;
//array
int[] trial = new int[width];
// scan through all pixels
for (int x = 0; x < width; ++x) {
flag = 0;
for (int y = 0; y < height; ++y) {
// get pixel color
pixel = src.getPixel(x, y);
// apply filtering on each channel R, G, B
Al = Color.alpha(pixel);
Re = (int) (Color.red(pixel));
Gr = (int) (Color.green(pixel));
Bl = (int) (Color.blue(pixel));
// set new color pixel to output bitmap
if ((Re == 0) && (Gr == 0) && (Bl == 0) && (flag == 0)) {
bmOut.setPixel(x, y, Color.argb(255, 0, 0, 0));
flag = 1;
trial[x] = y;
} else
bmOut.setPixel(x, y, Color.argb(255, 255, 255, 255));
}
}
//detect all possible peaks
for (int x = 1; x < width; x++) {
if (trial[x] < trial[x - 1] && trial[x] < trial[x + 1]) {
peakcoordinates = src.getPixel(x, trial[x]); //get pixels, how to display? (textview, toast?)
//Toast.makeText(getApplicationContext(), "hi", Toast.LENGTH_LONG).show();
}
//detect all R peaks
for (int y = 1; y > (trial[1]-50); y++ ){
xcoor[i] = x;
ycoor[i] = y;
}
return bmOut;
}
return bmOut;
}
}
Your application crashes because you're trying to access your Views from a worker thread, which is prohibited in Android. I'd recommend using the AsyncTask class, which runs a certain task on a worker thread and provides methods that run on UI thread, where you can update your Views. Hope this helps.
You can modify your view (something like TextView, ImageView) only in UI thread. Try AsyncTask:
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
process = (Button) findViewById(R.id.btn_process);
imageSource = (ImageView) findViewById(R.id.imageSource);
imageAfter = (ImageView) findViewById(R.id.imageAfter);
progressBar = (ProgressBar) findViewById(R.id.progressBar);
counter = (TextView) findViewById(R.id.counter);
coordinates = (TextView) findViewById(R.id.coordinates);
process.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
bitmap_Source = BitmapFactory.decodeResource(getResources(),
R.drawable.test_ideal_graph);
handler = new Handler();
new BackgroundTask().execute();
}
});
}
private class BackgroundTask extends AsyncTask<String, Long, String> {
#Override
protected String doInBackground(String... s) {
afterProcess = processingBitmap(bitmap_Source, 0, 0, 0);
return null;
}
#Override
protected void onPostExecute(String result) {
progressBar.setVisibility(View.GONE);
imageAfter.setImageBitmap(afterProcess);
calculatevoltage();
// calculateRRdistance();
counter.setText("...");
coordinates.setText(" " + xcoor + "," + ycoor + " " );
}
}