Xamarin Android Location updates not being called - android

I am trying to log location during an activity however I am not getting location updates. I have played around with the min refresh interval as well as the distance interval however it still doesn't work. I followed Xamarin's example for location updates but I've been banging my head against the wall as to why location updates arent working!
public class ActivityManager : Java.Lang.Object, ILocationListener, ISensorEventListener
{
private readonly LocationManager _locationManager;
private readonly SensorManager _sensorManager;
private readonly List<Location> _locationCache;
private readonly List<SensorEvent> _sensorCache;
private bool Continous { get; set; }
public ActivityManager(LocationManager locationManager, SensorManager sensorManager)
{
_locationManager = locationManager;
_sensorManager = sensorManager;
_locationCache = new List<Location>();
_sensorCache = new List<SensorEvent>();
Continous = false;
}
public void StartTrackingLocation()
{
const string provider = LocationManager.GpsProvider;
if (_locationManager.IsProviderEnabled(provider))
{
_locationManager.RequestLocationUpdates(provider, 0, 0, this);
}
}
public void StartTrackingAccelerometer()
{
var mHandlerThread = new HandlerThread("sensorThread");
mHandlerThread.Start();
var handler = new Handler(mHandlerThread.Looper);
_sensorManager.RegisterListener(this, _sensorManager.GetDefaultSensor(SensorType.Accelerometer),
SensorDelay.Normal, handler);
}
public void StopTrackingLocation()
{
_locationManager.RemoveUpdates(this);
}
public void StopTrackingAccelerometer()
{
_sensorManager.UnregisterListener(this);
}
public void StartContinousTracking()
{
_locationCache.Clear();
_sensorCache.Clear();
Continous = true;
}
public void StopContinousTracking()
{
_locationCache.Clear();
_sensorCache.Clear();
Continous = false;
}
public void ExportLocationData(string path)
{
var kml = new Kml
{
Feature = new Placemark
{
Geometry = new LineString
{
Coordinates = new CoordinateCollection(_locationCache.Select(l => new Vector {Latitude = l.Latitude, Longitude = l.Longitude}))
}
}
};
var kmlFile = KmlFile.Create(kml, true);
using (var stream = File.OpenWrite(path))
{
kmlFile.Save(stream);
}
}
public void ExportSensorData(string path)
{
var csv = new CsvWriter(new StreamWriter(path));
csv.WriteField("AccX");
csv.WriteField("AccY");
csv.WriteField("AccZ");
csv.NextRecord();
foreach (var s in _sensorCache.ToList())
{
csv.WriteField(s.Values[0]);
csv.WriteField(s.Values[1]);
csv.WriteField(s.Values[2]);
csv.NextRecord();
}
csv.Dispose();
}
public void OnLocationChanged(Location location)
{
_locationCache.Add(location);
if (!Continous) _locationCache.RemoveAll(l => location.Time - l.Time > 120000);
}
public void OnSensorChanged(SensorEvent e)
{
_sensorCache.Add(e);
if (!Continous) _sensorCache.RemoveAll(s => e.Timestamp - s.Timestamp > 120000000000);
}
public void OnProviderDisabled(string provider) { }
public void OnProviderEnabled(string provider) { }
public void OnStatusChanged(string provider, Availability status, Bundle extras) { }
public void OnAccuracyChanged(Sensor sensor, SensorStatus accuracy) { }
}
Thanks in advance.

This is the implementation I have used for the Android element of a Xamarin.Forms app:
The communicating class
public class PlatformLocation : ILocation
{
private readonly global::Android.Locations.LocationManager _locationManager;
private readonly string[] _providers;
private readonly TaskCompletionSource<PfgLocationInfo> _tcs;
public PlatformLocation()
{
_locationManager = (global::Android.Locations.LocationManager)Application.Context.GetSystemService(Context.LocationService);
_providers = _locationManager.GetProviders(false).Where(s => s != global::Android.Locations.LocationManager.PassiveProvider).ToArray();
_tcs = new TaskCompletionSource<PfgLocationInfo>();
}
#region Private Methods
Task<PfgLocationInfo> StartUpdatingLocation(int timeout = 0)
{
var lastKnownGpsLocation = _locationManager.GetLastKnownLocation("gps");
if (lastKnownGpsLocation != null)
{
var pfgLocation = new PfgLocationInfo()
{
Longitude = lastKnownGpsLocation.Longitude,
Latitude = lastKnownGpsLocation.Latitude,
Timestamp = DateTime.Now,
Success = true,
Status = PfgLocationStatus.Valid
};
_tcs.TrySetResult(pfgLocation);
return _tcs.Task;
}
var lastKnownNetworkLocation = _locationManager.GetLastKnownLocation("network");
if (lastKnownNetworkLocation != null)
{
var pfgLocation = new PfgLocationInfo()
{
Longitude = lastKnownNetworkLocation.Longitude,
Latitude = lastKnownNetworkLocation.Latitude,
Timestamp = DateTime.Now,
Success = true,
Status = PfgLocationStatus.Valid
};
_tcs.TrySetResult(pfgLocation);
return _tcs.Task;
}
LocationListener listener = null;
listener = new LocationListener(
_providers.Where(_locationManager.IsProviderEnabled),
() =>
{
if (listener.Task.IsCanceled)
{
_locationManager.RemoveUpdates(listener);
}
},
timeout
);
try
{
var looper = Looper.MyLooper() ?? Looper.MainLooper;
var enabled = 0;
for (var i = 0; i < _providers.Length; ++i)
{
if (_locationManager.IsProviderEnabled(_providers[i]))
{
enabled++;
}
_locationManager.RequestLocationUpdates(_providers[i], 0, 0, listener, looper);
}
if (enabled == 0)
{
for (var i = 0; i < _providers.Length; ++i)
{
_locationManager.RemoveUpdates(listener);
}
_tcs.TrySetResult(new PfgLocationInfo{ Timestamp = DateTime.Now, Status = PfgLocationStatus.Restricted, Success = false });
return _tcs.Task;
}
}
catch (TaskCanceledException tcex)
{
_tcs.TrySetResult(new PfgLocationInfo{ Timestamp = DateTime.Now, Status = PfgLocationStatus.Restricted, Success = false });
return _tcs.Task;
}
catch (SecurityException)
{
_tcs.TrySetResult(new PfgLocationInfo{ Timestamp = DateTime.Now, Status = PfgLocationStatus.Restricted, Success = false });
return _tcs.Task;
}
return listener.Task;
}
#endregion
#region ILocation implementation
public Task<PfgLocationInfo> GetLocationAsync()
{
return StartUpdatingLocation();
}
public Task<PfgLocationInfo> GetLocationAsync(int timeout)
{
return StartUpdatingLocation(timeout);
}
#endregion
}
The ILocationListener implementation:
public class LocationListener : Java.Lang.Object, ILocationListener
{
private readonly HashSet<string> _activeProviders;
private readonly TaskCompletionSource<PfgLocationInfo> _completionSource = new TaskCompletionSource<PfgLocationInfo>();
private readonly Action _finishedCallback;
private readonly Timer _timer;
private readonly int _timeout;
private readonly object _locationLock = new object();
public Task<PfgLocationInfo> Task
{
get
{
return _completionSource.Task;
}
}
public LocationListener(IEnumerable<string> providers, Action finishedCallback, int timeout = -1)
{
_activeProviders = new HashSet<string>(providers);
_finishedCallback = finishedCallback;
if (timeout > 0)
{
_timeout = timeout;
_timer = new Timer(TimesUp, null, _timeout * 1000, 0);
}
}
#region Timeout Methods
void TimesUp(object state)
{
lock (_locationLock)
{
if (_completionSource.TrySetCanceled() && _finishedCallback != null)
{
_finishedCallback();
}
}
_timer.Dispose();
}
#endregion
#region ILocationListener implementation
public void OnLocationChanged(global::Android.Locations.Location location)
{
if (location != null)
{
Finish(location);
return;
}
}
public void OnProviderDisabled(string provider)
{
lock (_activeProviders)
{
if (_activeProviders.Remove(provider) && _activeProviders.Count == 0)
{
_completionSource.TrySetResult(new PfgLocationInfo{ Timestamp = DateTime.Now, Status = PfgLocationStatus.Restricted, Success = false });
}
}
}
public void OnProviderEnabled(string provider)
{
lock (_activeProviders)
_activeProviders.Add(provider);
}
public void OnStatusChanged(string provider, Availability status, global::Android.OS.Bundle extras)
{
switch (status)
{
case Availability.Available:
OnProviderEnabled(provider);
break;
case Availability.OutOfService:
OnProviderDisabled(provider);
break;
}
}
#endregion
#region Private Methods
void Finish(global::Android.Locations.Location location)
{
var pfgLocationInfo = new PfgLocationInfo()
{
Longitude = location.Longitude,
Latitude = location.Latitude,
Timestamp = DateTime.Now,
Success = true,
Status = PfgLocationStatus.Valid
};
_completionSource.TrySetResult(pfgLocationInfo);
}
#endregion
}
The PfgLocationInfo is just a simple class for holding the results
This works exactly as required and should be relatively easy to transform to your requirements.

It turns out this was working but very intermittently. The issue appeared to be that another thread that was running was asynchronously to log the sensor data was queueing up operations which were effectively drowning out the location updates.
Removing the sensor data logging started to allow location updates to come through. This seemed strange to me as this seemed to occur despite the sensor data logging being contained in their own thread.

Related

Android main thread UI not responding while implementing Google Speech-to-text. How to solve?

Currently, I am implementing google Speech to Text in my project. The sample code referred is this: Click Here.
I have used the SpeechService and Voice Recorder class from this project.
public class SpeechService extends Service {
public static final List<String> SCOPE =
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform");
private static final String TAG = "SpeechService";
private static final String PREFS = "SpeechService";
private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value";
private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time";
/**
* We reuse an access token if its expiration time is longer than this.
*/
private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes
/**
* We refresh the current access token before it expires.
*/
private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute
private static final String HOSTNAME = "speech.googleapis.com";
private static final int PORT = 443;
private static Handler mHandler;
private final SpeechBinder mBinder = new SpeechBinder();
private final ArrayList<Listener> mListeners = new ArrayList<>();
private final StreamObserver<StreamingRecognizeResponse> mResponseObserver
= new StreamObserver<StreamingRecognizeResponse>() {
#Override
public void onNext(StreamingRecognizeResponse response) {
Log.e("Speech", "Recognized");
String text = null;
boolean isFinal = false;
if (response.getResultsCount() > 0) {
System.out.println("result count....."+String.valueOf(response.getResultsCount()));
final StreamingRecognitionResult result = response.getResults(0);
isFinal = result.getIsFinal();
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null && isFinal) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, isFinal);
}
} else {
for (Listener listener : mListeners) {
listener.onRandomStupidity();
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
for(Listener listener : mListeners){
listener.onErrorRecognizing();
}
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private volatile AccessTokenTask mAccessTokenTask;
private final Runnable mFetchAccessTokenRunnable = new Runnable() {
#Override
public void run() {
fetchAccessToken();
}
};
private SpeechGrpc.SpeechStub mApi;
private StreamObserver<StreamingRecognizeRequest> mRequestObserver;
public static SpeechService from(IBinder binder) {
return ((SpeechBinder) binder).getService();
}
#Override
public void onCreate() {
super.onCreate();
mHandler = new Handler();
fetchAccessToken();
}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mFetchAccessTokenRunnable);
mHandler = null;
// Release the gRPC channel.
if (mApi != null) {
final ManagedChannel channel = (ManagedChannel) mApi.getChannel();
if (channel != null && !channel.isShutdown()) {
try {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Log.e(TAG, "Error shutting down the gRPC channel.", e);
}
}
mApi = null;
}
}
private void fetchAccessToken() {
if (mAccessTokenTask != null) {
return;
}
mAccessTokenTask = new AccessTokenTask();
mAccessTokenTask.execute();
}
private String getDefaultLanguageCode() {
final LangInnerResponse languageToLearn = MemoryCache.getLanguageToLearn();
if(languageToLearn != null) {
Log.e("Test Lang", languageToLearn.getCode());
return languageToLearn.getCode();
} else {
final Locale locale = Locale.getDefault();
final StringBuilder language = new StringBuilder(locale.getLanguage());
final String country = locale.getCountry();
if (!TextUtils.isEmpty(country)) {
language.append("-");
language.append(country);
}
return language.toString();
}
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public void addListener(#NonNull Listener listener) {
mListeners.add(listener);
}
public void removeListener(#NonNull Listener listener) {
mListeners.remove(listener);
}
/**
** Starts recognizing speech audio.
*
* #param sampleRate The sample rate of the audio.
*/
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
System.out.println("calling api....");
// Configure the API
mRequestObserver = mApi.streamingRecognize(mResponseObserver);
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(StreamingRecognitionConfig.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setLanguageCode(getDefaultLanguageCode())
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setInterimResults(true)
.setSingleUtterance(true)
.build())
.build());
}
/**
* Recognizes the speech audio. This method should be called every time a chunk of byte buffer
* is ready.
*
* #param data The audio data.
* #param size The number of elements that are actually relevant in the {#code data}.
*/
public void recognize(byte[] data, int size) {
if (mRequestObserver == null) {
return;
}
// Call the streaming recognition API
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data, 0, size))
.build());
}
/**
* Finishes recognizing speech audio.
*/
public void finishRecognizing() {
if (mRequestObserver == null) {
return;
}
mRequestObserver.onCompleted();
mRequestObserver = null;
}
public interface Listener {
/**
* Called when a new piece of text was recognized by the Speech API.
*
* #param text The text.
* #param isFinal {#code true} when the API finished processing audio.
*/
void onSpeechRecognized(String text, boolean isFinal);
void onErrorRecognizing();
void onRandomStupidity();
}
/**
* Authenticates the gRPC channel using the specified {#link GoogleCredentials}.
*/
private static class GoogleCredentialsInterceptor implements ClientInterceptor {
private final Credentials mCredentials;
private Metadata mCached;
private Map<String, List<String>> mLastMetadata;
GoogleCredentialsInterceptor(Credentials credentials) {
mCredentials = credentials;
}
private static Metadata toHeaders(Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
Metadata.Key<String> headerKey = Metadata.Key.of(
key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
return headers;
}
#Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(
final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions,
final Channel next) {
return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>(
next.newCall(method, callOptions)) {
#Override
protected void checkedStart(Listener<RespT> responseListener, Metadata headers)
throws StatusException {
Metadata cachedSaved;
URI uri = serviceUri(next, method);
synchronized (this) {
Map<String, List<String>> latestMetadata = getRequestMetadata(uri);
if (mLastMetadata == null || mLastMetadata != latestMetadata) {
mLastMetadata = latestMetadata;
mCached = toHeaders(mLastMetadata);
}
cachedSaved = mCached;
}
headers.merge(cachedSaved);
delegate().start(responseListener, headers);
}
};
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough
* information for a service to know that the JWT was intended for it. The URI will
* commonly be verified with a simple string equality check.
*/
private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method)
throws StatusException {
String authority = channel.authority();
if (authority == null) {
throw Status.UNAUTHENTICATED
.withDescription("Channel has no authority")
.asException();
}
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName());
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI after removing port")
.withCause(e).asException();
}
}
private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException {
try {
return mCredentials.getRequestMetadata(uri);
} catch (IOException e) {
throw Status.UNAUTHENTICATED.withCause(e).asException();
}
}
}
private class SpeechBinder extends Binder {
SpeechService getService() {
return SpeechService.this;
}
}
private class CreateApiSingle implements SingleOnSubscribe<SpeechGrpc.SpeechStub> {
#Override
public void subscribe(SingleEmitter<SpeechGrpc.SpeechStub> emitter) throws Exception {
final AccessToken accessToken = generateCredentials();
final SpeechGrpc.SpeechStub api = generateApi(accessToken);
emitter.onSuccess(api);
}
private AccessToken generateCredentials() throws IOException {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
stream.close();
return token;
}
private SpeechGrpc.SpeechStub generateApi(AccessToken accessToken) {
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
return SpeechGrpc.newStub(channel);
}
}
private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> {
#Override
protected AccessToken doInBackground(Void... voids) {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
try {
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
return token;
} catch (IOException e) {
Log.e(TAG, "Failed to obtain access token.", e);
}
return null;
}
#Override
protected void onPostExecute(AccessToken accessToken) {
mAccessTokenTask = null;
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
mApi = SpeechGrpc.newStub(channel);
// Schedule access token refresh before it expires
if (mHandler != null) {
mHandler.postDelayed(mFetchAccessTokenRunnable,
Math.max(accessToken.getExpirationTime().getTime()
- System.currentTimeMillis()
- ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE));
}
}
}}
public class VoiceRecorder {
private static final int[] SAMPLE_RATE_CANDIDATES = new int[]{48000, 44100};
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AMPLITUDE_THRESHOLD = 1500;
private static final int SPEECH_TIMEOUT_MILLIS = 2000;
private static final int MAX_SPEECH_LENGTH_MILLIS = 30 * 1000;
public static abstract class Callback {
/**
* Called when the recorder starts hearing voice.
*/
public void onVoiceStart() {
}
/**
* Called when the recorder is hearing voice.
*
* #param data The audio data in {#link AudioFormat#ENCODING_PCM_16BIT}.
* #param size The size of the actual data in {#code data}.
*/
public void onVoice(byte[] data, int size) {
}
/**
* Called when the recorder stops hearing voice.
*/
public void onVoiceEnd() {
}
}
private final Callback mCallback;
private AudioRecord mAudioRecord;
private Thread mThread;
private byte[] mBuffer;
private final Object mLock = new Object();
/** The timestamp of the last time that voice is heard. */
private long mLastVoiceHeardMillis = Long.MAX_VALUE;
/** The timestamp when the current voice is started. */
private long mVoiceStartedMillis;
public VoiceRecorder(#NonNull Callback callback) {
mCallback = callback;
}
/**
* Starts recording audio.
*
* <p>The caller is responsible for calling {#link #stop()} later.</p>
*/
public void start() {
// Stop recording if it is currently ongoing.
stop();
// Try to create a new recording session.
mAudioRecord = createAudioRecord();
if (mAudioRecord == null) {
throw new RuntimeException("Cannot instantiate VoiceRecorder");
}
// Start recording.
mAudioRecord.startRecording();
// Start processing the captured audio.
mThread = new Thread(new ProcessVoice());
mThread.start();
}
/**
* Stops recording audio.
*/
public void stop() {
synchronized (mLock) {
System.out.println("stop audio record....");
dismiss();
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (mAudioRecord != null) {
mAudioRecord.stop();
mAudioRecord.release();
mAudioRecord = null;
}
mBuffer = null;
System.out.println("stop audio record....2");
}
}
/**
* Dismisses the currently ongoing utterance.
*/
public void dismiss() {
if (mLastVoiceHeardMillis != Long.MAX_VALUE) {
mLastVoiceHeardMillis = Long.MAX_VALUE;
mCallback.onVoiceEnd();
}
}
/**
* Retrieves the sample rate currently used to record audio.
*
* #return The sample rate of recorded audio.
*/
public int getSampleRate() {
if (mAudioRecord != null) {
return mAudioRecord.getSampleRate();
}
return 0;
}
/**
* Creates a new {#link AudioRecord}.
*
* #return A newly created {#link AudioRecord}, or null if it cannot be created (missing
* permissions?).
*/
private AudioRecord createAudioRecord() {
for (int sampleRate : SAMPLE_RATE_CANDIDATES) {
final int sizeInBytes = AudioRecord.getMinBufferSize(sampleRate, CHANNEL, ENCODING);
if (sizeInBytes == AudioRecord.ERROR_BAD_VALUE) {
continue;
}
final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, CHANNEL, ENCODING, sizeInBytes);
if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
mBuffer = new byte[sizeInBytes];
return audioRecord;
} else {
audioRecord.release();
}
}
return null;
}
/**
* Continuously processes the captured audio and notifies {#link #mCallback} of corresponding
* events.
*/
private class ProcessVoice implements Runnable {
#Override
public void run() {
while (true) {
synchronized (mLock) {
if (Thread.currentThread().isInterrupted()) {
break;
}
final int size = mAudioRecord.read(mBuffer, 0, mBuffer.length);
final long now = System.currentTimeMillis();
if (isHearingVoice(mBuffer, size)) {
if (mLastVoiceHeardMillis == Long.MAX_VALUE) {
mVoiceStartedMillis = now;
mCallback.onVoiceStart();
}
mCallback.onVoice(mBuffer, size);
mLastVoiceHeardMillis = now;
if (now - mVoiceStartedMillis > MAX_SPEECH_LENGTH_MILLIS) {
end();
}
} else if (mLastVoiceHeardMillis != Long.MAX_VALUE) {
mCallback.onVoice(mBuffer, size);
if (now - mLastVoiceHeardMillis > SPEECH_TIMEOUT_MILLIS) {
end();
}
}
}
}
}
private void end() {
mLastVoiceHeardMillis = Long.MAX_VALUE;
mCallback.onVoiceEnd();
System.out.println("end...");
}
private boolean isHearingVoice(byte[] buffer, int size) {
for (int i = 0; i < size - 1; i += 2) {
// The buffer has LINEAR16 in little endian.
int s = buffer[i + 1];
if (s < 0) s *= -1;
s <<= 8;
s += Math.abs(buffer[i]);
if (s > AMPLITUDE_THRESHOLD) {
return true;
}
}
return false;
}
}}
Then I implemented the Speech Service & Voice Recorder callback as follows:
private VoiceRecorder voiceRecorder;
private final SpeechService.Listener speechServiceListener = new SpeechService.Listener() {
#Override
public void onSpeechRecognized(final String text, final boolean isFinal) {
if (isFinal) {
System.out.println("ui thread...");
if (!TextUtils.isEmpty(text)) {
runOnUiThread(() -> {
showMessage(text);
flingAnswer(text);
});
}
}
}
#Override
public void onErrorRecognizing() {
showMessage("Please try again. Could not detect.");
}
#Override
public void onRandomStupidity() {
}
};
private SpeechService speechService;
private final VoiceRecorder.Callback voiceCallback = new VoiceRecorder.Callback() {
#Override
public void onVoiceStart() {
if (speechService != null) {
System.out.println("voice start....");
speechService.startRecognizing(voiceRecorder.getSampleRate());
}
}
#Override
public void onVoice(byte[] data, int size) {
if (speechService != null) {
speechService.recognize(data, size);
}
}
#Override
public void onVoiceEnd() {
if (speechService != null) {
speechService.finishRecognizing();
}
}
};
private final ServiceConnection serviceConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName componentName, IBinder binder) {
speechService = SpeechService.from(binder);
speechService.addListener(speechServiceListener);
}
#Override
public void onServiceDisconnected(ComponentName componentName) {
speechService = null;
}
};
For voice input this is the code:
#Override
public void stopRecognizing() {
stopVoiceRecorder();
Log.e("Recording", "Stopped");
}
#Override
public void startRecognizing() {
if (permissionManager != null && permissionManager.askForPermissions()) {
startVoiceRecorder();
vibrate.vibrate(50);//Providing haptic feedback to user on press.
}
Log.e("Recording", "Started");
}
binding.imgVoice.setOnTouchListener((v, event) -> {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
System.out.println("up...");
mCallback.stopRecognizing();
binding.imgVoice
.animate()
.scaleX(1.0f)
.scaleY(1.0f);
binding.imgVoice.setVisibility(View.GONE);
binding.progressBar.setVisibility(View.VISIBLE);
break;
case MotionEvent.ACTION_DOWN:
System.out.println("down...");
binding.imgVoice
.animate()
.scaleX(1.8f)
.scaleY(1.8f);
mCallback.startRecognizing();
break;
}
return true;
});
}
When I press the mic, event registered as Action_Down, I start the voice recorder and on releasing the mic , voice recorder is stopped. Also, with the Action_Down I am scaling up the mic icon which needs to be scaled down on Action_Up . But the ui freezes as a whole most of the times. I find that the onNext() callback for StreamObserver is continuously being invoked before the isFinal becomes true.
private void startVoiceRecorder() {
if (voiceRecorder != null) {
voiceRecorder.stop();
}
voiceRecorder = new VoiceRecorder(voiceCallback);
voiceRecorder.start();
}
private void stopVoiceRecorder() {
if (voiceRecorder != null) {
voiceRecorder.stop();
voiceRecorder = null;
}
}
But I want the mic to scale down as soon as I release the mic(on Action up event) which is not happening.
So if anyone can help me over this?
Thanks in Advance.

how to track percentage of progressing download of each downloading file in exoplayer library?

I am working on a media player app. I'm using ExoPlayer library. I have a playlist of videos, and I want to download videos simultaneously. I done it by using available demo app of exoplayer library on GitHub. I want to show progress of each downloading in the UI. For this job I get help from DownloadNotificationUtil.buildProgressNotification method.
#Override
protected Notification getForegroundNotification(TaskState[] taskStates) {
float totalPercentage = 0;
int downloadTaskCount = 0;
boolean allDownloadPercentagesUnknown = true;
boolean haveDownloadedBytes = false;
boolean haveDownloadTasks = false;
boolean haveRemoveTasks = false;
Log.e(TAG,"size task state: "+taskStates.length);
for (TaskState taskState : taskStates) {
Log.e(TAG,"taskId= "+taskState.taskId);
if (taskState.state != TaskState.STATE_STARTED
&& taskState.state != TaskState.STATE_COMPLETED) {
continue;
}
if (taskState.action.isRemoveAction) {
haveRemoveTasks = true;
continue;
}
haveDownloadTasks = true;
if (taskState.downloadPercentage != C.PERCENTAGE_UNSET) {
allDownloadPercentagesUnknown = false;
totalPercentage += taskState.downloadPercentage;
}
haveDownloadedBytes |= taskState.downloadedBytes > 0;
downloadTaskCount++;
}
int progress = 0;
boolean indeterminate = true;
if (haveDownloadTasks) {
progress = (int) (totalPercentage / downloadTaskCount);
indeterminate = allDownloadPercentagesUnknown && haveDownloadedBytes;
Log.e(TAG,"notifi "+progress);
}
return DownloadNotificationUtil.buildProgressNotification(
this,
R.drawable.exo_icon_play,
DOWNLOAD_CHANNEL_ID,
null,
null,
taskStates);
}
Now,I can track the progress downloading. But I still have a problem. I can't understand which item is downloading to update it's progress bar in the UI. Is there a Identical id of each download to recognize it? For example Android Download Manager has a download ID for each downloading file. But I don't know , how to handle this problem.
This is MediaDownloadService:
public class MediaDownloadService extends DownloadService {
public static String TAG="MediaDownloadService";
private static final int FOREGROUND_NOTIFICATION_ID = 1;
public MediaDownloadService() {
super(
DOWNLOAD_NOTIFICATION_ID,
DEFAULT_FOREGROUND_NOTIFICATION_UPDATE_INTERVAL,
DOWNLOAD_CHANNEL_ID,
R.string.download_channel_name);
}
#Override
protected DownloadManager getDownloadManager() {
return ((MyApplication) getApplication()).getDownloadManager();
}
#Nullable
#Override
protected Scheduler getScheduler() {
return null;
}
#Override
protected Notification getForegroundNotification(TaskState[] taskStates) {
float totalPercentage = 0;
int downloadTaskCount = 0;
boolean allDownloadPercentagesUnknown = true;
boolean haveDownloadedBytes = false;
boolean haveDownloadTasks = false;
boolean haveRemoveTasks = false;
for (TaskState taskState : taskStates) {
if (taskState.state != TaskState.STATE_STARTED
&& taskState.state != TaskState.STATE_COMPLETED) {
continue;
}
if (taskState.action.isRemoveAction) {
haveRemoveTasks = true;
continue;
}
haveDownloadTasks = true;
if (taskState.downloadPercentage != C.PERCENTAGE_UNSET) {
allDownloadPercentagesUnknown = false;
totalPercentage += taskState.downloadPercentage;
}
haveDownloadedBytes |= taskState.downloadedBytes > 0;
downloadTaskCount++;
}
int progress = 0;
boolean indeterminate = true;
if (haveDownloadTasks) {
progress = (int) (totalPercentage / downloadTaskCount);
indeterminate = allDownloadPercentagesUnknown && haveDownloadedBytes;
Log.e(TAG,"notifi "+progress);
sendIntent(progress);
}
return DownloadNotificationUtil.buildProgressNotification(
this,
R.drawable.exo_icon_play,
DOWNLOAD_CHANNEL_ID,
null,
null,
taskStates);
}
private void sendIntent(int progress){
Intent intent = new Intent(ConstantUtil.MESSAGE_PROGRESS);
intent.putExtra("progress",progress);
LocalBroadcastManager.getInstance(MediaDownloadService.this).sendBroadcast(intent);
}
#Override
protected void onTaskStateChanged(TaskState taskState) {
if (taskState.action.isRemoveAction) {
return;
}
Notification notification = null;
if (taskState.state == TaskState.STATE_COMPLETED) {
Log.e(TAG,"STATE_COMPLETED");
notification =
DownloadNotificationUtil.buildDownloadCompletedNotification(
/* context= */ this,
R.drawable.exo_controls_play,
DOWNLOAD_CHANNEL_ID,
/* contentIntent= */ null,
Util.fromUtf8Bytes(taskState.action.data));
} else if (taskState.state == TaskState.STATE_FAILED) {
Log.e(TAG,"STATE_FAILED");
notification =
DownloadNotificationUtil.buildDownloadFailedNotification(
/* context= */ this,
R.drawable.exo_controls_play,
DOWNLOAD_CHANNEL_ID,
/* contentIntent= */ null,
Util.fromUtf8Bytes(taskState.action.data));
}
int notificationId = FOREGROUND_NOTIFICATION_ID + 1 + taskState.taskId;
NotificationUtil.setNotification(this, notificationId, notification);
}
}
This is DownloadTracker class:
public class DownloadTracker implements DownloadManager.Listener {
/** Listens for changes in the tracked downloads. */
public interface Listener {
/** Called when the tracked downloads changed. */
void onDownloadsChanged();
}
private static final String TAG = "DownloadTracker";
private final Context context;
private final DataSource.Factory dataSourceFactory;
private final TrackNameProvider trackNameProvider;
private final CopyOnWriteArraySet<Listener> listeners;
private Listener onDownloadsChanged;
private final HashMap<Uri, DownloadAction> trackedDownloadStates;
private final ActionFile actionFile;
private final Handler actionFileWriteHandler;
public DownloadTracker(
Context context,
DataSource.Factory dataSourceFactory,
File actionFile,
DownloadAction.Deserializer... deserializers) {
this.context = context.getApplicationContext();
this.dataSourceFactory = dataSourceFactory;
this.actionFile = new ActionFile(actionFile);
trackNameProvider = new DefaultTrackNameProvider(context.getResources());
listeners = new CopyOnWriteArraySet<>();
trackedDownloadStates = new HashMap<>();
HandlerThread actionFileWriteThread = new HandlerThread("DownloadTracker");
actionFileWriteThread.start();
actionFileWriteHandler = new Handler(actionFileWriteThread.getLooper());
loadTrackedActions(
deserializers.length > 0 ? deserializers : DownloadAction.getDefaultDeserializers());
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
public boolean isDownloaded(Uri uri) {
return trackedDownloadStates.containsKey(uri);
}
#SuppressWarnings("unchecked")
public List<StreamKey> getOfflineStreamKeys(Uri uri) {
if (!trackedDownloadStates.containsKey(uri)) {
return Collections.emptyList();
}
return trackedDownloadStates.get(uri).getKeys();
}
public int toggleDownload(Activity activity, String name, Uri uri, String extension) {
if (isDownloaded(uri)) {
Log.e(TAG,"isDownloaded");
DownloadAction removeAction =
getDownloadHelper(uri, extension).getRemoveAction(Util.getUtf8Bytes(name));
startServiceWithAction(removeAction);
return -1;
} else {
StartDownloadDialogHelper helper =
new StartDownloadDialogHelper(activity, getDownloadHelper(uri, extension), name);
helper.prepare();
return helper.getTaskId();
}
}
#Override
public void onInitialized(DownloadManager downloadManager) {
// Do nothing.
}
#Override
public void onTaskStateChanged(DownloadManager downloadManager, TaskState taskState) {
DownloadAction action = taskState.action;
Uri uri = action.uri;
if ((action.isRemoveAction && taskState.state == TaskState.STATE_COMPLETED)
|| (!action.isRemoveAction && taskState.state == TaskState.STATE_FAILED)) {
// A download has been removed, or has failed. Stop tracking it.
if (trackedDownloadStates.remove(uri) != null) {
handleTrackedDownloadStatesChanged();
}
}
}
#Override
public void onIdle(DownloadManager downloadManager) {
// Do nothing.
}
// Internal methods
private void loadTrackedActions(DownloadAction.Deserializer[] deserializers) {
try {
DownloadAction[] allActions = actionFile.load(deserializers);
for (DownloadAction action : allActions) {
trackedDownloadStates.put(action.uri, action);
}
} catch (IOException e) {
Log.e(TAG, "Failed to load tracked actions", e);
}
}
private void handleTrackedDownloadStatesChanged() {
for (Listener listener : listeners) {
listener.onDownloadsChanged();
}
final DownloadAction[] actions = trackedDownloadStates.values().toArray(new DownloadAction[0]);
Log.e(TAG,"actions: "+actions.toString());
actionFileWriteHandler.post(
() -> {
try {
actionFile.store(actions);
} catch (IOException e) {
Log.e(TAG, "Failed to store tracked actions", e);
}
});
}
private void startDownload(DownloadAction action) {
if (trackedDownloadStates.containsKey(action.uri)) {
// This content is already being downloaded. Do nothing.
Log.e(TAG,"download already exsit");
return;
}
trackedDownloadStates.put(action.uri, action);
handleTrackedDownloadStatesChanged();
startServiceWithAction(action);
}
private void startServiceWithAction(DownloadAction action) {
DownloadService.startWithAction(context, MediaDownloadService.class, action, false);
}
private DownloadHelper getDownloadHelper(Uri uri, String extension) {
int type = Util.inferContentType(uri, extension);
switch (type) {
case C.TYPE_DASH:
return new DashDownloadHelper(uri, dataSourceFactory);
case C.TYPE_SS:
return new SsDownloadHelper(uri, dataSourceFactory);
case C.TYPE_HLS:
return new HlsDownloadHelper(uri, dataSourceFactory);
case C.TYPE_OTHER:
return new ProgressiveDownloadHelper(uri);
default:
throw new IllegalStateException("Unsupported type: " + type);
}
}
private final class StartDownloadDialogHelper
implements DownloadHelper.Callback, DialogInterface.OnClickListener {
private final DownloadHelper downloadHelper;
private final String name;
private final AlertDialog.Builder builder;
private final View dialogView;
private final List<TrackKey> trackKeys;
private final ArrayAdapter<String> trackTitles;
private final ListView representationList;
private int taskId;
public StartDownloadDialogHelper(
Activity activity, DownloadHelper downloadHelper, String name) {
this.downloadHelper = downloadHelper;
this.name = name;
builder =
new AlertDialog.Builder(activity)
.setTitle(R.string.exo_download_description)
.setPositiveButton(android.R.string.ok, this)
.setNegativeButton(android.R.string.cancel, null);
// Inflate with the builder's context to ensure the correct style is used.
LayoutInflater dialogInflater = LayoutInflater.from(builder.getContext());
dialogView = dialogInflater.inflate(R.layout.start_download_dialog, null);
trackKeys = new ArrayList<>();
trackTitles =
new ArrayAdapter<>(
builder.getContext(), android.R.layout.simple_list_item_multiple_choice);
representationList = dialogView.findViewById(R.id.representation_list);
representationList.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
representationList.setAdapter(trackTitles);
}
public void prepare() {
downloadHelper.prepare(this);
}
#Override
public void onPrepared(DownloadHelper helper) {
for (int i = 0; i < downloadHelper.getPeriodCount(); i++) {
TrackGroupArray trackGroups = downloadHelper.getTrackGroups(i);
for (int j = 0; j < trackGroups.length; j++) {
TrackGroup trackGroup = trackGroups.get(j);
for (int k = 0; k < trackGroup.length; k++) {
trackKeys.add(new TrackKey(i, j, k));
trackTitles.add(trackNameProvider.getTrackName(trackGroup.getFormat(k)));
}
}
}
if (!trackKeys.isEmpty()) {
builder.setView(dialogView);
}
builder.create().show();
}
#Override
public void onPrepareError(DownloadHelper helper, IOException e) {
Toast.makeText(
context.getApplicationContext(), R.string.download_start_error, Toast.LENGTH_LONG)
.show();
Log.e(TAG, "Failed to start download", e);
}
#Override
public void onClick(DialogInterface dialog, int which) {
ArrayList<TrackKey> selectedTrackKeys = new ArrayList<>();
for (int i = 0; i < representationList.getChildCount(); i++) {
if (representationList.isItemChecked(i)) {
selectedTrackKeys.add(trackKeys.get(i));
}
}
if (!selectedTrackKeys.isEmpty() || trackKeys.isEmpty()) {
// We have selected keys, or we're dealing with single stream content.
DownloadAction downloadAction =
downloadHelper.getDownloadAction(Util.getUtf8Bytes(name), selectedTrackKeys);
taskId=MyApplication.getInstance().getDownloadManager().handleAction(downloadAction);
startDownload(downloadAction);
}
}
}
}
In my Fragment/Activity:
/* this method will be called when user click on download button of each item */
#Override
public void onDownloadClick(LectureList lecture) {
Log.e(TAG,"onClickDownload");
downloadTracker.toggleDownload(this,lecture.getTitle_lecture(),
Uri.parse(lecture.getUrlPath()),lecture.getExtension());
}
And here is my broadcast receiver:
private BroadcastReceiver broadcastReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
Log.e(TAG,"onRecive download");
if(intent.getAction().equals(MESSAGE_PROGRESS)){
int progress=intent.getLongExtra("progress",0);
}
}
};
In the getForegroundNotification() method, you will get list of TaskState objects, which has a members downloadPercentage and your download Uri taskState.action.uri which is unique for each download task. Store these variables into a map and broadcast the map.
override fun getForegroundNotification(taskStates: Array<TaskState>): Notification {
var totalPercentage = 0f
var downloadTaskCount = 0
var progressMap : HashMap<Uri, Int> = HashMap()
for (taskState in taskStates) {
if (taskState.state != TaskState.STATE_STARTED && taskState.state != TaskState.STATE_COMPLETED) {
continue
}
if (taskState.action.isRemoveAction) {
continue
}
if (taskState.downloadPercentage != C.PERCENTAGE_UNSET.toFloat()) {
totalPercentage += taskState.downloadPercentage
progressMap.put(taskState.action.uri, taskState.downloadPercentage.toInt())
}
downloadTaskCount++
}
var progress = 0
progress = (totalPercentage / downloadTaskCount).toInt()
broadcastIndividualProgress(progressMap)
return buildProgressNotification(progress)
}

waitingInMainSignalCatcherLoop,Thread*=0x72c22ee000,peer=0x12d00280,"Signal Catcher"]: reacting to signal 3

Good day everyone, I would like to ask, hat is the cause of that ANR?. In my project I have service which is binded in a activity. Now when I exit in that activity the app is hang for a moment. My thought is that the service is still running though I unbind it in onStop() of the activity.
Here is my service class
public class SpeechService extends Service {
public interface Listener {
/**
* Called when a new piece of text was recognized by the Speech API.
*
* #param text The text.
* #param isFinal {#code true} when the API finished processing audio.
*/
void onSpeechRecognized(String text, boolean isFinal);
}
private static final String TAG = "SpeechService";
private static final String PREFS = "SpeechService";
private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value";
private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time";
/** We reuse an access token if its expiration time is longer than this. */
private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes
/** We refresh the current access token before it expires. */
private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute
public static final List<String> SCOPE =
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform");
private static final String HOSTNAME = "speech.googleapis.com";
private static final int PORT = 443;
private final SpeechBinder mBinder = new SpeechBinder();
private final ArrayList<Listener> mListeners = new ArrayList<>();
private volatile AccessTokenTask mAccessTokenTask;
private SpeechGrpc.SpeechStub mApi;
private static Handler mHandler;
private final StreamObserver<StreamingRecognizeResponse> mResponseObserver
= new StreamObserver<StreamingRecognizeResponse>() {
#Override
public void onNext(StreamingRecognizeResponse response) {
String text = null;
boolean isFinal = false;
if (response.getResultsCount() > 0) {
final StreamingRecognitionResult result = response.getResults(0);
isFinal = result.getIsFinal();
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, isFinal);
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private final StreamObserver<RecognizeResponse> mFileResponseObserver
= new StreamObserver<RecognizeResponse>() {
#Override
public void onNext(RecognizeResponse response) {
String text = null;
if (response.getResultsCount() > 0) {
final SpeechRecognitionResult result = response.getResults(0);
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, true);
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private StreamObserver<StreamingRecognizeRequest> mRequestObserver;
public static SpeechService from(IBinder binder) {
return ((SpeechBinder) binder).getService();
}
#Override
public void onCreate() {
super.onCreate();
mHandler = new Handler();
fetchAccessToken();
}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mFetchAccessTokenRunnable);
mHandler = null;
// Release the gRPC channel.
if (mApi != null) {
final ManagedChannel channel = (ManagedChannel) mApi.getChannel();
if (channel != null && !channel.isShutdown()) {
try {
channel.shutdown().awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Log.e(TAG, "Error shutting down the gRPC channel.", e);
}
}
mApi = null;
}
}
private void fetchAccessToken() {
if (mAccessTokenTask != null) {
return;
}
mAccessTokenTask = new AccessTokenTask();
mAccessTokenTask.execute();
}
private String getDefaultLanguageCode() {
final Locale locale = Locale.getDefault();
final StringBuilder language = new StringBuilder(locale.getLanguage());
final String country = locale.getCountry();
if (!TextUtils.isEmpty(country)) {
language.append("-");
language.append(country);
}
return language.toString();
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public void addListener(#NonNull Listener listener) {
mListeners.add(listener);
}
public void removeListener(#NonNull Listener listener) {
mListeners.remove(listener);
}
/**
* Starts recognizing speech audio.
*
* #param sampleRate The sample rate of the audio.
*/
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
// Configure the API
mRequestObserver = mApi.streamingRecognize(mResponseObserver);
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(StreamingRecognitionConfig.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setLanguageCode(getDefaultLanguageCode())
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setInterimResults(true)
.setSingleUtterance(true)
.build())
.build());
}
/**
* Recognizes the speech audio. This method should be called every time a chunk of byte buffer
* is ready.
*
* #param data The audio data.
* #param size The number of elements that are actually relevant in the {#code data}.
*/
public void recognize(byte[] data, int size) {
if (mRequestObserver == null) {
return;
}
// Call the streaming recognition API
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data, 0, size))
.build());
}
/**
* Finishes recognizing speech audio.
*/
public void finishRecognizing() {
if (mRequestObserver == null) {
return;
}
mRequestObserver.onCompleted();
mRequestObserver = null;
}
/**
* Recognize all data from the specified {#link InputStream}.
*
* #param stream The audio data.
*/
public void recognizeInputStream(InputStream stream) {
try {
mApi.recognize(
RecognizeRequest.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("en-US")
.setSampleRateHertz(16000)
.build())
.setAudio(RecognitionAudio.newBuilder()
.setContent(ByteString.readFrom(stream))
.build())
.build(),
mFileResponseObserver);
} catch (IOException e) {
Log.e(TAG, "Error loading the input", e);
}
}
private class SpeechBinder extends Binder {
SpeechService getService() {
return SpeechService.this;
}
}
private final Runnable mFetchAccessTokenRunnable = new Runnable() {
#Override
public void run() {
fetchAccessToken();
}
};
private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> {
#Override
protected AccessToken doInBackground(Void... voids) {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
try {
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
return token;
} catch (IOException e) {
Log.e(TAG, "Failed to obtain access token.", e);
}
return null;
}
#Override
protected void onPostExecute(AccessToken accessToken) {
mAccessTokenTask = null;
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
mApi = SpeechGrpc.newStub(channel);
// Schedule access token refresh before it expires
if (mHandler != null) {
mHandler.postDelayed(mFetchAccessTokenRunnable,
Math.max(accessToken.getExpirationTime().getTime()
- System.currentTimeMillis()
- ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE));
}
}
}
/**
* Authenticates the gRPC channel using the specified {#link GoogleCredentials}.
*/
private static class GoogleCredentialsInterceptor implements ClientInterceptor {
private final Credentials mCredentials;
private Metadata mCached;
private Map<String, List<String>> mLastMetadata;
GoogleCredentialsInterceptor(Credentials credentials) {
mCredentials = credentials;
}
#Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(
final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions,
final Channel next) {
return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>(
next.newCall(method, callOptions)) {
#Override
protected void checkedStart(Listener<RespT> responseListener, Metadata headers)
throws StatusException {
Metadata cachedSaved;
URI uri = serviceUri(next, method);
synchronized (this) {
Map<String, List<String>> latestMetadata = getRequestMetadata(uri);
if (mLastMetadata == null || mLastMetadata != latestMetadata) {
mLastMetadata = latestMetadata;
mCached = toHeaders(mLastMetadata);
}
cachedSaved = mCached;
}
headers.merge(cachedSaved);
delegate().start(responseListener, headers);
}
};
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough
* information for a service to know that the JWT was intended for it. The URI will
* commonly be verified with a simple string equality check.
*/
private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method)
throws StatusException {
String authority = channel.authority();
if (authority == null) {
throw Status.UNAUTHENTICATED
.withDescription("Channel has no authority")
.asException();
}
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName());
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI after removing port")
.withCause(e).asException();
}
}
private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException {
try {
return mCredentials.getRequestMetadata(uri);
} catch (IOException e) {
throw Status.UNAUTHENTICATED.withCause(e).asException();
}
}
private static Metadata toHeaders(Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
Metadata.Key<String> headerKey = Metadata.Key.of(
key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
return headers;
}
}
}
and here is my activity class
public class MainActivity extends AppCompatActivity implements MessageDialogFragment.Listener {
private static final String FRAGMENT_MESSAGE_DIALOG = "message_dialog";
private static final String STATE_RESULTS = "results";
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 1;
private SpeechService mSpeechService;
private VoiceRecorder mVoiceRecorder;
private final VoiceRecorder.Callback mVoiceCallback = new VoiceRecorder.Callback() {
#Override
public void onVoiceStart() {
showStatus(true);
if (mSpeechService != null) {
mSpeechService.startRecognizing(mVoiceRecorder.getSampleRate());
}
}
#Override
public void onVoice(byte[] data, int size) {
if (mSpeechService != null) {
mSpeechService.recognize(data, size);
}
}
#Override
public void onVoiceEnd() {
showStatus(false);
if (mSpeechService != null) {
mSpeechService.finishRecognizing();
}
}
};
// Resource caches
private int mColorHearing;
private int mColorNotHearing;
// View references
private TextView mStatus;
private TextView mText, mResult;
private Button editButton, clearButton;
private SharedPreferences settings;
private final ServiceConnection mServiceConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName componentName, IBinder binder) {
mSpeechService = SpeechService.from(binder);
mSpeechService.addListener(mSpeechServiceListener);
mStatus.setVisibility(View.VISIBLE);
}
#Override
public void onServiceDisconnected(ComponentName componentName) {
mSpeechService = null;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
final Resources resources = getResources();
final Resources.Theme theme = getTheme();
mColorHearing = ResourcesCompat.getColor(resources, R.color.status_hearing, theme);
mColorNotHearing = ResourcesCompat.getColor(resources, R.color.status_not_hearing, theme);
mStatus = (TextView) findViewById(R.id.status);
mText = (TextView) findViewById(R.id.text);
mResult = (TextView) findViewById(R.id.resultText);
editButton = (Button)findViewById(R.id.button1);
clearButton = (Button)findViewById(R.id.button2);
settings = getSharedPreferences("MyPreference", Context.MODE_PRIVATE);
clearButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
mResult.setText("");
}
});
editButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
Intent editIntent = new Intent(MainActivity.this, EditorActivity.class);
String forEditText = mResult.getText().toString();
editIntent.putExtra("forEdit", forEditText);
startActivity(editIntent);
}
});
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if(id == android.R.id.home){
this.finish();
}
return super.onOptionsItemSelected(item);
}
#Override
protected void onStart() {
super.onStart();
// Prepare Cloud Speech API
bindService(new Intent(this, SpeechService.class), mServiceConnection, BIND_AUTO_CREATE);
// Start listening to voices
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.RECORD_AUDIO)) {
showPermissionMessageDialog();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
}
#Override
protected void onStop() {
// Stop listening to voice
stopVoiceRecorder();
// Stop Cloud Speech API
mSpeechService.removeListener(mSpeechServiceListener);
unbindService(mServiceConnection);
mSpeechService = null;
super.onStop();
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode == REQUEST_RECORD_AUDIO_PERMISSION) {
if (permissions.length == 1 && grantResults.length == 1
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else {
showPermissionMessageDialog();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private void startVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
}
mVoiceRecorder = new VoiceRecorder(mVoiceCallback);
mVoiceRecorder.start();
}
private void stopVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
mVoiceRecorder = null;
}
}
private void showPermissionMessageDialog() {
MessageDialogFragment
.newInstance(getString(R.string.permission_message))
.show(getSupportFragmentManager(), FRAGMENT_MESSAGE_DIALOG);
}
private void showStatus(final boolean hearingVoice) {
runOnUiThread(new Runnable() {
#Override
public void run() {
mStatus.setTextColor(hearingVoice ? mColorHearing : mColorNotHearing);
}
});
}
#Override
public void onMessageDialogDismissed() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
private final SpeechService.Listener mSpeechServiceListener =
new SpeechService.Listener() {
#Override
public void onSpeechRecognized(final String text, final boolean isFinal) {
if (isFinal) {
mVoiceRecorder.dismiss();
}
if (mText != null && !TextUtils.isEmpty(text)) {
runOnUiThread(new Runnable() {
#Override
public void run() {
if (isFinal) {
mText.setText(null);
mResult.append(" "+text.toString());
} else {
mText.setText(text);
}
}
});
}
}
};
}
Thank in advance for your help

Ble with MVVM and Architecture Components, proper way?

I'm working with Bluetooth LE devices and I was thinking about my current approach and best practices. Currently I have an activity which handles the connection and the GattCallbacks and I decided to restructure the code to get an better overview and maintainability cause its quite messy actually.
I found the BleManager from NordicSemiconductor https://github.com/NordicSemiconductor/Android-BLE-Library/
It's an abstraction of the basic steps for connecting with a BLE device and it handles the GattCallbacks + providing an appropriate interface to use it from a service or a ViewModel.
I'd like to use the ViewModel approach but I'm not so familiar with MVC, MVP, MVVM patterns and there are some questions that I still can't reply
This class is extending the BleManager (BlinkyManager.java)
It shows how to make use of the BleManager so I adopted the class and called it ECountBleManager.
EDIT:
The last 6 days I did reaearches especially facing the MVVM pattern and the Architecture Components. Unfortunately there are still a lot of questions that I can't reply myself. But I really want to get better so I made a draft of my current concept. I hope you can help me answering my questions and improving my project.
I'm especially interested in best practices.
Here is my draft:
And here are my class implementations:
ECountActivity.java
public class ECountActivity extends AppCompatActivity {
private ECountViewModel viewModel;
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.detail_view);
// hide magnifier icon
GifImageView customLoader = findViewById(R.id.progressBar);
customLoader.setVisibility(View.GONE);
// Get additional data from previous activity
final BluetoothDevice device = getIntent().getParcelableExtra("device");
initViewModel();
viewModel.connect(device);
}
private void initViewModel() {
viewModel = ViewModelProviders.of(this).get(ECountViewModel.class);
subscribeDataStreams(viewModel);
}
private void subscribeDataStreams(ECountViewModel viewModel) {
viewModel.isDeviceReady().observe(this, deviceReady -> openOptionsFragment());
viewModel.isConnected().observe(this, status -> {
// Todo: ...
});
}
private void openOptionsFragment() {
// load options fragment
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
ft.replace(R.id.contentFragment, new OptionsFragment());
ft.commitNow();
}
}
OtaFragment.java
public class OtaFragment extends Fragment implements FolderChooserDialog.FolderCallback,
FileChooserDialog.FileCallback {
private Button partialOtaButton;
private Button fullOtaButton;
private Button submitButton;
private SeekBar mtuSeekBar;
private EditText mtuInput;
private LinearLayout stacklayout;
private Button browseAppButton;
private TextView folderPathText;
private TextView appFileNameText;
private MaterialDialog otaPrepareDialog;
private MaterialDialog otaProgressDialog;
private ECountViewModel viewModel;
private OtaViewModel otaViewModel;
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initViewModel();
}
#Override
public View onCreateView(#NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// inflate the layout for this fragment
View view = inflater.inflate(R.layout.ota_fragment, container, false);
initViews(view);
return view;
}
#Override
public void onFolderSelection(#NonNull FolderChooserDialog dialog, #NonNull File folder) {
final String otaFolderPath = folder.toString();
otaViewModel.setOtaFolderPath(otaFolderPath);
folderPathText.setText(otaFolderPath.substring(otaFolderPath.lastIndexOf("/")));
// enable app browse
browseAppButton.setClickable(true);
browseAppButton.setEnabled(true);
}
#Override
public void onFolderChooserDismissed(#NonNull FolderChooserDialog dialog) {}
#Override
public void onFileSelection(#NonNull FileChooserDialog dialog, #NonNull File file) {
final String otaAppFilePath = file.toString();
otaViewModel.setOtaAppFilePath(otaAppFilePath);
appFileNameText.setText(otaAppFilePath.substring(otaAppFilePath.lastIndexOf("/")));
// enable submitButton button
submitButton.setClickable(true);
submitButton.setEnabled(true);
}
#Override
public void onFileChooserDismissed(#NonNull FileChooserDialog dialog) {}
private void subscribeDataStreams(ECountViewModel viewModel) {
viewModel.isOtaMode().observe(this, otaMode -> {
otaPrepareDialog.dismiss();
initOtaProgressDialog();
otaProgressDialog.show();
// Todo: how can i get mtu?
viewModel.requestMtu(512);
});
}
private void initViewModel() {
viewModel = ViewModelProviders.of(getActivity()).get(ECountViewModel.class);
otaViewModel = ViewModelProviders.of(getActivity()).get(OtaViewModel.class);
subscribeDataStreams(viewModel);
}
private void initViews(View view) {
// get resources
final Button browseFolderButton = view.findViewById(R.id.browseFolder);
final Button cancelButton = view.findViewById(R.id.ota_cancel);
final SeekBar prioritySeekBar = view.findViewById(R.id.connection_seekBar);
partialOtaButton = view.findViewById(R.id.radio_ota);
fullOtaButton = view.findViewById(R.id.radio_ota_full);
browseAppButton = view.findViewById(R.id.browseApp);
folderPathText = view.findViewById(R.id.folderPathText);
appFileNameText = view.findViewById(R.id.appFileNameText);
stacklayout = view.findViewById(R.id.stacklayout);
submitButton = view.findViewById(R.id.ota_proceed);
mtuSeekBar = view.findViewById(R.id.mtu_seekBar);
mtuInput = view.findViewById(R.id.mtu_value);
// set initial states
mtuSeekBar.setMax(512-23);
mtuSeekBar.setProgress(244);
prioritySeekBar.setMax(2);
prioritySeekBar.setProgress(1);
browseAppButton.setClickable(false);
browseAppButton.setEnabled(false);
submitButton.setClickable(false);
submitButton.setEnabled(false);
mtuInput.setOnEditorActionListener((v, actionId, event) -> {
final Editable mtuText = mtuInput.getText();
if (mtuText != null) {
int mtu = Integer.valueOf(mtuText.toString());
if (mtu < 23) mtu = 23;
if (mtu > 512) mtu = 512;
mtuSeekBar.setProgress(mtu);
viewModel.setMtu(mtu);
}
return false;
});
mtuSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
#Override
public void onStartTrackingTouch(SeekBar seekBar) {}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {}
#Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
mtuInput.setText(String.valueOf(progress));
viewModel.setMtu(progress);
}
});
prioritySeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener(){
#Override
public void onStartTrackingTouch(SeekBar seekBar) {}
#Override
public void onStopTrackingTouch(SeekBar seekBar) {}
#Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
viewModel.setPriority(progress);
}
});
browseFolderButton.setOnClickListener(v -> new FolderChooserDialog.Builder(getActivity())
.chooseButton(R.string.positiveTextChoose)
.tag("#folder")
.show(getChildFragmentManager()));
browseAppButton.setOnClickListener(v -> new FileChooserDialog.Builder(getActivity())
.initialPath(otaViewModel.getOtaFolderPath())
.extensionsFilter(".ebl")
.tag("#app")
.show(getChildFragmentManager()));
cancelButton.setOnClickListener(v -> Log.i("ota", "cancelButton"));
submitButton.setOnClickListener(v -> {
// disable OTA submitButton button
submitButton.setClickable(false);
submitButton.setEnabled(false);
// init OTA process
viewModel.initOtaMode();
// show OTA preparing dialog
otaPrepareDialog.show();
});
fullOtaButton.setOnClickListener(v -> {
stacklayout.setVisibility(View.VISIBLE);
partialOtaButton.setBackgroundColor(getResources().getColor(R.color.colorPrimary));
fullOtaButton.setBackgroundColor(getResources().getColor(R.color.colorPrimaryDark));
});
partialOtaButton.setOnClickListener(v -> {
stacklayout.setVisibility(View.GONE);
partialOtaButton.setBackgroundColor(getResources().getColor(R.color.colorPrimaryDark));
fullOtaButton.setBackgroundColor(getResources().getColor(R.color.colorPrimary));
});
otaPrepareDialog = new MaterialDialog.Builder(getActivity())
.title(R.string.otaDialogHeaderText)
.content(R.string.waiting)
.progress(true, 0)
.progressIndeterminateStyle(true)
.build();
otaProgressDialog = new MaterialDialog.Builder(getActivity())
.title("test")
.customView(R.layout.ota_progress2, false)
.build();
}
private void initOtaProgressDialog() {
// Todo: ...
}
}
ECountViewModel.java
public class ECountViewModel extends AndroidViewModel implements ECountBleManagerCallbacks {
private final ECountBleManager eCountBleManager;
// Connection states Connecting, Connected, Disconnecting, Disconnected etc.
private final MutableLiveData<String> connectionState = new MutableLiveData<>();
// Flag to determine if the device is connected
private final MutableLiveData<Boolean> isConnected = new MutableLiveData<>();
// Flag to determine if the device is ready
private final MutableLiveData<Void> onDeviceReady = new MutableLiveData<>();
// Flag to determine if the device is in OTA mode
private final MutableLiveData<Void> onOtaMode = new MutableLiveData<>();
public LiveData<Void> isDeviceReady() {
return onDeviceReady;
}
public LiveData<Void> isOtaMode() {
return onOtaMode;
}
public LiveData<String> getConnectionState() {
return connectionState;
}
public LiveData<Boolean> isConnected() {
return isConnected;
}
public ECountViewModel(#NonNull final Application application) {
super(application);
// Initialize the manager
eCountBleManager = new ECountBleManager(getApplication());
eCountBleManager.setGattCallbacks(this);
}
/**
* Connect to peripheral
*/
public void connect(final BluetoothDevice device) {
eCountBleManager.connect(device);
}
/**
* Disconnect from peripheral
*/
private void disconnect() {
eCountBleManager.disconnect();
}
#Override
protected void onCleared() {
super.onCleared();
if (eCountBleManager.isConnected()) {
disconnect();
}
}
#Override
public void onDeviceConnecting(BluetoothDevice device) {
}
#Override
public void onDeviceConnected(BluetoothDevice device) {
isConnected.postValue(true);
}
#Override
public void onDeviceDisconnecting(BluetoothDevice device) {
isConnected.postValue(false);
}
#Override
public void onDeviceDisconnected(BluetoothDevice device) {
isConnected.postValue(false);
}
#Override
public void onLinklossOccur(BluetoothDevice device) {
isConnected.postValue(false);
}
#Override
public void onServicesDiscovered(BluetoothDevice device, boolean optionalServicesFound) {
}
#Override
public void onDeviceReady(BluetoothDevice device) {
onDeviceReady.postValue(null);
}
#Override
public void onOptionalServiceSupported(BluetoothDevice device) {
onOtaMode.postValue(null);
}
#Override
public void onBondingRequired(BluetoothDevice device) {
}
#Override
public void onBonded(BluetoothDevice device) {
}
#Override
public void onError(BluetoothDevice device, String message, int errorCode) {
}
#Override
public void onDeviceNotSupported(BluetoothDevice device) {
disconnect();
}
// delegate call from options fragment to ECountBleManager
public String getDeviceId() {
return BinaryUtils.byteArrayToHexString(eCountBleManager.getDeviceId());
}
// delegate call from ota fragment to ECountBleManager
public void setMtu(final int value) {
eCountBleManager.setMtu(value);
}
public void setPriority(final int value) {
eCountBleManager.setPriority(value);
}
ECountBleManager.java
public class ECountBleManager extends BleManager<BleManagerCallbacks> {
private static final String TAG = ECountBleManager.class.getSimpleName();
private final Handler handler;
private BluetoothGattCharacteristic authCharacteristic;
private BluetoothGattCharacteristic deviceIdCharacteristic;
private BluetoothGattCharacteristic deviceVersionCharacteristic;
private BluetoothGattCharacteristic configIdCharacteristic;
private BluetoothGattCharacteristic configTransmissionIntervalCharacteristic;
private BluetoothGattCharacteristic configKeepAliveIntervalCharacteristic;
private BluetoothGattCharacteristic configRadioModeCharacteristic;
private BluetoothGattCharacteristic configGpsCharacteristic;
private BluetoothGattCharacteristic configRadarCharacteristic;
private BluetoothGattCharacteristic configOperationModeCharacteristic;
private BluetoothGattCharacteristic configLoRaAppEuiCharacteristic;
private BluetoothGattCharacteristic configLoRaAppKeyCharacteristic;
private BluetoothGattCharacteristic configLoRaDeviceEuiCharacteristic;
private BluetoothGattCharacteristic operationCmdCharacteristic;
private BluetoothGattCharacteristic nemeusStatusCharacteristic;
private BluetoothGattCharacteristic gmrStatusCharacteristic;
private BluetoothGattCharacteristic radarStatusCharacteristic;
private BluetoothGattCharacteristic otaControlCharacteristic;
private BluetoothGattCharacteristic otaDataCharacteristic;
private byte[] configTransmissionInterval;
private byte[] configKeepAliveInterval;
private byte[] configRadioMode;
private byte[] configOperationMode;
private byte[] configId;
private byte[] deviceId;
private byte[] deviceVersion;
private byte[] configGps;
private byte[] configRadar;
private byte[] configLoRaAppEui;
private byte[] configLoRaAppKey;
private byte[] configLoRaDeviceEui;
private byte[] operationCmd;
private byte[] nemeusStatus;
private byte[] gmrStatus;
private byte[] radarStatus;
// OTA flags
private boolean isOtaProcessing = false;
private boolean isReconnectRequired = false;
private MutableLiveData<Boolean> isOtaMode = new MutableLiveData<>();
// OTA variables
private int mtu = 512;
private int priority = BluetoothGatt.CONNECTION_PRIORITY_HIGH;
private byte[] otaAppFileStream;
////////////////////////////
public ECountBleManager(Context context) {
super(context);
handler = new Handler();
}
#Override
protected BleManagerGattCallback getGattCallback() {
return gattCallback;
}
#Override
protected boolean shouldAutoConnect() {
return true;
}
/**
* BluetoothGatt callbacks for connection/disconnection, service discovery, receiving indication, etc
*/
private final BleManagerGattCallback gattCallback = new BleManagerGattCallback() {
#Override
protected void onDeviceReady() {
super.onDeviceReady();
}
#Override
protected void onOptionalServiceSupported() {
super.onOptionalServiceSupported();
isOtaMode.postValue(true);
}
#Override
protected boolean isOptionalServiceSupported(BluetoothGatt gatt) {
final BluetoothGattService otaService = gatt.getService(DC_UUID.otaService);
otaDataCharacteristic = otaService.getCharacteristic(DC_UUID.otaData);
return otaDataCharacteristic != null;
}
#Override
protected boolean isRequiredServiceSupported(BluetoothGatt gatt) {
final BluetoothGattService dcService = gatt.getService(DC_UUID.dcService);
final BluetoothGattService otaService = gatt.getService(DC_UUID.otaService);
if (dcService == null || otaService == null) return false;
authCharacteristic = dcService.getCharacteristic(DC_UUID.authentication);
deviceIdCharacteristic = dcService.getCharacteristic(DC_UUID.deviceId);
deviceVersionCharacteristic = dcService.getCharacteristic(DC_UUID.deviceVersion);
configIdCharacteristic = dcService.getCharacteristic(DC_UUID.configId);
configTransmissionIntervalCharacteristic = dcService.getCharacteristic(DC_UUID.configTransmissionInterval);
configKeepAliveIntervalCharacteristic = dcService.getCharacteristic(DC_UUID.configKeepAliveInterval);
configRadioModeCharacteristic = dcService.getCharacteristic(DC_UUID.configRadioMode);
configGpsCharacteristic = dcService.getCharacteristic(DC_UUID.configGps);
configRadarCharacteristic = dcService.getCharacteristic(DC_UUID.configRadar);
configOperationModeCharacteristic = dcService.getCharacteristic(DC_UUID.configOperationMode);
configLoRaAppEuiCharacteristic = dcService.getCharacteristic(DC_UUID.configLoRaAppEui);
configLoRaAppKeyCharacteristic = dcService.getCharacteristic(DC_UUID.configLoRaAppKey);
configLoRaDeviceEuiCharacteristic = dcService.getCharacteristic(DC_UUID.configLoRaDeviceEui);
operationCmdCharacteristic = dcService.getCharacteristic(DC_UUID.operationCmd);
nemeusStatusCharacteristic = dcService.getCharacteristic(DC_UUID.nemeusStatus);
gmrStatusCharacteristic = dcService.getCharacteristic(DC_UUID.gmrStatus);
radarStatusCharacteristic = dcService.getCharacteristic(DC_UUID.radarStatus);
otaControlCharacteristic = otaService.getCharacteristic(DC_UUID.otaControl);
return authCharacteristic != null &&
deviceIdCharacteristic != null &&
deviceVersionCharacteristic != null&&
configIdCharacteristic != null &&
configTransmissionIntervalCharacteristic != null &&
configKeepAliveIntervalCharacteristic != null &&
configRadioModeCharacteristic != null &&
configGpsCharacteristic != null &&
configRadarCharacteristic != null &&
configOperationModeCharacteristic != null &&
configLoRaAppEuiCharacteristic != null &&
configLoRaAppKeyCharacteristic != null &&
configLoRaDeviceEuiCharacteristic != null &&
operationCmdCharacteristic != null &&
nemeusStatusCharacteristic != null &&
gmrStatusCharacteristic != null &&
radarStatusCharacteristic != null &&
otaControlCharacteristic != null;
}
#Override
protected Deque<Request> initGatt(BluetoothGatt gatt) {
final LinkedList<Request> requests = new LinkedList<>();
requests.push(Request.readRequest(deviceIdCharacteristic));
requests.push(Request.readRequest(deviceVersionCharacteristic));
requests.push(Request.readRequest(configIdCharacteristic));
requests.push(Request.readRequest(configTransmissionIntervalCharacteristic));
requests.push(Request.readRequest(configKeepAliveIntervalCharacteristic));
requests.push(Request.readRequest(configRadioModeCharacteristic));
requests.push(Request.readRequest(configGpsCharacteristic));
requests.push(Request.readRequest(configRadarCharacteristic));
requests.push(Request.readRequest(configOperationModeCharacteristic));
requests.push(Request.readRequest(configLoRaAppEuiCharacteristic));
requests.push(Request.readRequest(configLoRaAppKeyCharacteristic));
requests.push(Request.readRequest(operationCmdCharacteristic));
requests.push(Request.readRequest(configLoRaDeviceEuiCharacteristic));
requests.push(Request.readRequest(nemeusStatusCharacteristic));
requests.push(Request.readRequest(gmrStatusCharacteristic));
requests.push(Request.readRequest(radarStatusCharacteristic));
// write authentication key to characteristic
requests.push(Request.writeRequest(authCharacteristic));
// perform server authentication
requests.push(Request.readRequest(authCharacteristic));
return requests;
}
#Override
protected void onDeviceDisconnected() {
authCharacteristic = null;
deviceIdCharacteristic = null;
deviceVersionCharacteristic = null;
configIdCharacteristic = null;
configTransmissionIntervalCharacteristic = null;
configKeepAliveIntervalCharacteristic = null;
configRadioModeCharacteristic = null;
configGpsCharacteristic = null;
configRadarCharacteristic = null;
configOperationModeCharacteristic = null;
configLoRaAppEuiCharacteristic = null;
configLoRaAppKeyCharacteristic = null;
configLoRaDeviceEuiCharacteristic = null;
nemeusStatusCharacteristic = null;
gmrStatusCharacteristic = null;
radarStatusCharacteristic = null;
otaDataCharacteristic = null;
}
#Override
protected void onMtuChanged(int mtu) {
super.onMtuChanged(mtu);
ECountBleManager.this.mtu = mtu;
}
#Override
protected void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
super.onCharacteristicRead(gatt, characteristic);
if (characteristic.getUuid().equals(DC_UUID.authentication)) {
byte encryptedData[];
try {
encryptedData = BinaryUtils.encryptByteArray(characteristic.getValue());
} catch (Exception e) {
e.printStackTrace();
return;
}
characteristic.setValue(encryptedData);
} else if (characteristic.getUuid().equals(DC_UUID.deviceId)) {
deviceId = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.deviceVersion)) {
deviceVersion = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configId)) {
configId = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configTransmissionInterval)) {
configTransmissionInterval = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configKeepAliveInterval)) {
configKeepAliveInterval = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configRadioMode)) {
configRadioMode = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configGps)) {
configGps = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configRadar)) {
configRadar = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configOperationMode)) {
configOperationMode = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configLoRaAppEui)) {
configLoRaAppEui = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configLoRaAppKey)) {
configLoRaAppKey = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.configLoRaDeviceEui)) {
configLoRaDeviceEui = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.nemeusStatus)) {
nemeusStatus = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.gmrStatus)) {
gmrStatus = characteristic.getValue();
} else if (characteristic.getUuid().equals(DC_UUID.radarStatus)) {
radarStatus = characteristic.getValue();
}
}
#Override
protected void onCharacteristicWrite(BluetoothGatt gatt,
BluetoothGattCharacteristic characteristic) {
super.onCharacteristicWrite(gatt, characteristic);
if (characteristic.getUuid().equals(DC_UUID.otaControl)) {
final byte[] otaControl = characteristic.getValue();
if (otaControl.length == 1) {
// OTA client initiates the update process
if (otaControl[0] == (byte) 0x00) {
// set OTA process flag
isOtaProcessing = true;
// check whether device is in OTA mode
if (isOtaMode.getValue()) {
// request MTU size
requestMtu(mtu);
// start update process,but ensure MTU size has been requested
handler.postDelayed(() -> uploadOta(), 2000);
} else {
// reconnect to establish OTA mode
isReconnectRequired = true;
// enforces device to reconnect
gatt.disconnect();
}
}
// OTA client finishes the update process
if (otaControl[0] == (byte) 0x03) {
if (isOtaProcessing) { // if device is in OTA mode and update process was successful
isOtaProcessing = false;
disconnect();
} else { // if device is in OTA mode, but update process was not established
// enforces device to reconnect
gatt.disconnect();
}
}
}
}
}
#Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
super.onCharacteristicChanged(gatt, characteristic);
}
};
public byte[] getDeviceId() {
return deviceId;
}
public void setDeviceId(final byte[] value) {
writeCharacteristic(deviceIdCharacteristic,
BluetoothGattCharacteristic.WRITE_TYPE_DEFAULT,
value);
}
// Todo: implement other getters and setters
// Here I have to get the otaAppFilePath which I discovered in OtaFragment
public void uploadOta(final String otaAppFilePath) {
if (otaDataCharacteristic != null) {
otaDataCharacteristic.setWriteType(BluetoothGattCharacteristic.WRITE_TYPE_NO_RESPONSE);
byte[] ebl = null;
try {
FileInputStream fileInputStream = new FileInputStream(otaAppFilePath);
int size = fileInputStream.available();
byte[] temp = new byte[size];
fileInputStream.read(temp);
fileInputStream.close();
ebl = temp;
} catch (Exception e) {
Logger.e(TAG, "Couldn't open file " + e);
}
otaAppFileStream = ebl;
pack = 0;
// start update process in another thread
Thread otaUploadThread = new Thread(() -> otaWriteDataReliable());
otaUploadThread.start();
}
}
private void writeCharacteristic(final BluetoothGattCharacteristic c,
final int writeType,
final byte[] value) {
if (c == null)
return;
c.setWriteType(writeType);
c.setValue(value);
writeCharacteristic(c); // will call the underlying API of BleManager
}
}
The code covers the basic use cases but I'm still not sure how to link the particular components with each other.
While reading about MVVM I noticed that there is always more than one possible solution/approach. I discovered the following questions:
Is the ECountBleManager the right place to store the variables that I got by calling characteristics.getValue() and when yes, should I place the variables that I discover in OtaFragment in it too (that would mean, that I have to forward the values e.g. of mtu to the ECountBleManager)? Consider that I have to access the variables that I discover in OtaFragment and maybe other Fragments.
Where do I store the variables from OtaFragment? In ECountVieModel or in ECountBleManager or do I create an OtaViewModel (but how could I access the ECountBleManager instance that I already created in ECountViewModel within the OtaViewModel?)
How can I access mtu, priority and otaAppFile which were discovered in OtaFragment within the ECountBleManager?
Do I have one ViewModel for every activity and fragment? But how to solve the problem with the ECountBleManager instance, see question 2?
How does the ECountBleManager fit in the MVVM pattern? I would guess it is part of the Model?! But which part? Repository, Interactor, Controller, Mediator?
The code is not less so I'm sorry but you see I'm really try harding and I want to get better. I hope someone can help me with my questions and to improve my code. Thanks in advance!
My 5 cents about BLE and an architecture:
you don't need to mix up the app architecture and BLE layer.
just think about BLE as a data-source/-repository (based on Rx, Coroutines)
split your functionality into several parts: BLE-scanner, BLE-connector, BLE-command-executor. Cover these parts by using the BLE-Fasade.
BLE layer is generally the async stuff (and it's too complicated to make them is 100% testable)
BLE layer in a deep look is the thread-safe handling of bytes. Don't handle it on the Main thread.

Xamarin.Forms DependencyService difficulty getting location

So after finally getting my head around Xamarin.Forms DependencyService I have nearly got it returning the device's current location.
my interface
public interface ICurrentLocation
{
MyLocation SetCurrentLocation();
}
MyLocation
public class MyLocation
{
public double Latitude {get; set;}
public double Longitude{get; set;}
}
the line that calls it
MyLocation location = DependencyService.Get<ICurrentLocation>().SetCurrentLocation();
and in the CurrentLocation class in the Android project that implements the Geolocation class of Xamarin.Mobile
[assembly: Dependency(typeof(CurrentLocation))]
namespace MyCockburn.Droid
{
public class CurrentLocation : Activity, ICurrentLocation
Geolocator locator;
Position position = new Position();
MyLocation location;
public MyLocation SetCurrentLocation()
{
GetPosition();
location = new MyLocation()
{
Latitude = position.Latitude,
Longitude = position.Longitude
};
return location;
}
async void GetPosition()
{
try
{
locator = new Geolocator(this) { DesiredAccuracy = 50 };
if (locator.IsListening != true)
locator.StartListening(minTime: 1000, minDistance: 0);
position = await locator.GetPositionAsync(timeout: 20000);
}
catch (Exception e)
{
Log.Debug("GeolocatorError", e.ToString());
}
}
}
my problem seems to be that is returning location before position holds the longitude and latitude
I am hoping my mistake is glaringly obvious
EDIT: the code works if I run it as a normal Android Activity
I would do a slight modification since best practice is to either do all async or none. When you try to return the result from an async method from a non async method you can run into problems with deadlocks. Also, since you aren't using the await keyword when calling the GetPosition method, you are getting back a Task, but aren't checking when the operation is complete. I suggest slightly modifying your code as such.
public interface ICurrentLocation {
Task<MyLocation> GetCurrentLocation();
}
public async Task<MyLocation> GetCurrentLocation()
{
var position = await GetPosition();
return new MyLocation()
{
Latitude = position.Latitude,
Longitude = position.Longitude
};
}
async Task<Location> GetPosition()
{
try
{
locator = new Geolocator(this) { DesiredAccuracy = 50 };
if (locator.IsListening != true)
locator.StartListening(minTime: 1000, minDistance: 0);
return await locator.GetPositionAsync(timeout: 20000);
}
catch (Exception e)
{
Log.Debug("GeolocatorError", e.ToString());
}
}
You aren't waiting for the position function to finish. Many different options and keeping it async is the best one but if you want it synchronous then try this blocking call:
void GetPosition()
{
try
{
locator = new Geolocator(this) { DesiredAccuracy = 50 };
if (locator.IsListening != true)
locator.StartListening(minTime: 1000, minDistance: 0);
position = locator.GetPositionAsync(timeout: 20000).Result;
}
catch (Exception e)
{
Log.Debug("GeolocatorError", e.ToString());
}
}
I also recommend taking a look at Xamarin.Forms.Labs as it already has abstracted GPS service and working sample that is functional for all 3 platforms:
https://github.com/XForms/Xamarin-Forms-Labs
Try adding the assembly above the namespace and awaiting the GetPosition method.
Take a look at this image:
http://developer.xamarin.com/guides/cross-platform/xamarin-forms/dependency-service/Images/solution.png
I developed an app that works fine in getting GPS location. I believe the codes below will be of great help.
You can then edit the SubmitGPSLocation function to your preference.
public async Task Run(CancellationToken token)
{
await Task.Run(async () =>
{
if (GPSService.Instance.IsListening)
{
GPSService.Instance.StopListening();
}
GPSService.Instance.StartListening(2500, 50, true);
GPSService.Instance.PositionChanged += Instance_PositionChanged;
System.Diagnostics.Debug.WriteLine(getRunningStateLocationService());
while (getRunningStateLocationService())
{
token.ThrowIfCancellationRequested();
await Task.Delay(500).ConfigureAwait(true);
}
GPSService.Instance.StopListening();
//await CrossGeolocator.Current.StopListeningAsync().ConfigureAwait(true);
GPSService.Instance.PositionChanged -= Instance_PositionChanged;
return;
}, token).ConfigureAwait(false);
}
private void Instance_PositionChanged(object sender, PositionEventArgs e)
{
try
{
isEvenCount = !isEvenCount;
if (e.Position != null)
{
var message = new LocationMessage
{
Latitude = e.Position.Latitude,
Longitude = e.Position.Longitude,
Accuracy = e.Position.Accuracy,
Speed = e.Position.Speed,
Heading = e.Position.Heading,
TimeStamp = e.Position.Timestamp.DateTime
};
SubmitGPSLocation(e).ConfigureAwait(false);
}
else
{
CrossToastPopUp.Current.ShowToastMessage("Failed to get GPS location");
}
}
catch (Exception ex)
{
CrossToastPopUp.Current.ShowToastMessage(ex.Message);
}
}
private static async Task SubmitGPSLocation(PositionEventArgs e)
{
if (!NetworkCheck.IsInternet())
{
return;
}
if (!int.TryParse(App.PhoneID, out var number))
{
return;
}
try
{
var thetrackers = Convert.ToString(Application.Current.Properties["AuthorizedTrackers"]);
GeneralUserPhoneLocation MyGeneralUserPhoneLocation = new GeneralUserPhoneLocation();
MyGeneralUserPhoneLocation.PhoneID = int.Parse(App.PhoneID);
MyGeneralUserPhoneLocation.Latitude = e.Position.Latitude.ToString("n6");
MyGeneralUserPhoneLocation.Longitude = e.Position.Longitude.ToString("n6");
MyGeneralUserPhoneLocation.Accuracy = e.Position.Accuracy;
MyGeneralUserPhoneLocation.Heading = e.Position.Heading;
MyGeneralUserPhoneLocation.Speed = e.Position.Speed;
MyGeneralUserPhoneLocation.TimeStamp = e.Position.Timestamp.DateTime;
MyGeneralUserPhoneLocation.RequestType = "N";
MyGeneralUserPhoneLocation.Comment = thetrackers;
string servicestring = JsonConvert.SerializeObject(MyGeneralUserPhoneLocation);
HttpContent theusercontent = new StringContent(servicestring, Encoding.UTF8, "application/json");
using (HttpClient client = new HttpClient())
{
client.BaseAddress = new Uri("https://mygpswebapi.com");
var response = await client.PostAsync("Home/SaveGeneralUserPhoneLocationAPP/", theusercontent).ConfigureAwait(true);
if (response.IsSuccessStatusCode)
{
}
else
{
}
}
}
catch (Exception ex)
{
CrossToastPopUp.Current.ShowToastMessage(ex.Message);
}
}

Categories

Resources