How to create multiple instances of a service in Android? - android

I have created a service DownloadService1 which is using object downloadData1.
Now I want to perform 2 download operations simultaneously by using objects downloadData1 and downloadData2, but I dont want to create an extact copy of DownloadService1, say DownloadService2 that will work with downloadData2.
Is there any way to create multiple instances of Service to achieve what i want ?

How to create multiple instances of a service in Android?
AFAIK it is not possible to create multiple instances of a service
Only one instances of service will run
if you call startService() multiple times does not result in starting multiple service.
when you call startService() there are two only possibility
If Service in not started previously then it will start as per service life cycle
If Service is previously started then only onStartCammand() will get called with desired intent which you have passed to it .
for more information read docs service

How about you create an Object that can handle multiple download simultaneously, and then you can use this Object in your service. You can checkout my workaround for the download:
public class MediaDownloadClient extends MediaDownloadListener implements Runnable {
private static final int DOWNLOAD_TRACKER_BREAK = 500; //ms
private static MediaDownloadClient singleton;
private final Object lock = new Object();
private OkHttpClient mHttpClient;
private Thread downloadThread;//main download thread
private ExecutorService threadPool = Executors.newCachedThreadPool();
private boolean downloading = false, running = false;
private Queue<MediaDownloadEntry> downloadQueue = new ArrayDeque<>();
private int stopDownloadId = 0;
private MediaDownloadClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.connectTimeout(15000, TimeUnit.SECONDS);
builder.readTimeout(15000, TimeUnit.SECONDS);
mHttpClient = builder.build();
}
public static MediaDownloadClient getInstance() {
if (singleton == null) {
singleton = new MediaDownloadClient();
singleton.start();
}
return singleton;
}
public static boolean download(String url, String toPath) throws IOException {
if (url == null || toPath == null) return false;
OkHttpClient okHttpClient = new OkHttpClient();
Request.Builder builder = new Request.Builder().url(url);
Response response = okHttpClient.newCall(builder.build()).execute();
if (!response.isSuccessful()) return false;
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
bis = new BufferedInputStream(response.body().byteStream());
bos = new BufferedOutputStream(new FileOutputStream(toPath));
byte[] buffer = new byte[1024 * 200];
int read;
while ((read = bis.read(buffer)) != -1) {
bos.write(buffer, 0, read);
}
} finally {
if (bos != null) bos.close();
if (bis != null) bis.close();
}
return true;
}
public void addListener(Listener listener) {
super.addListener(listener);
}
public void removeListener(Listener listener) {
super.removeListener(listener);
}
public void queue(MediaModel item, String toPath) {
MediaDownloadEntry download = new MediaDownloadEntry();
download.setMedia(item);
//serving as downloadId.. to keep track of the download
download.downloadId = 2 + new Random().nextInt(Integer.MAX_VALUE);
download.downloadTo = toPath;
download.downloadState = MediaDownloadEntry.STATE_QUEUED;
downloadQueue.add(download);
dispatchDownloadEvent(download, MediaDownloadEntry.STATE_QUEUED);
synchronized (lock) {
lock.notify();
}
}
public void queue(String url, String fileName, String toPath, int mediaType) {
MediaDownloadEntry model = new MediaDownloadEntry();
model.dataUrl = url;
model.mediaType = mediaType;
model.fileName = fileName;
queue(model, toPath);
}
#Override
public void run() {
while (running) {
synchronized (lock) {
while (downloading || downloadQueue.size() == 0) {
try {
lock.wait();
} catch (Exception ignored) {
}
}
try {
MediaDownloadEntry queue = downloadQueue.poll();
if (queue != null) {
downloading = true;
download(queue);
}
} catch (Exception ignored) {
}
downloading = false;
lock.notify();
}
}
}
private void start() {
if (downloadThread == null)
downloadThread = new Thread(this);
downloadThread.start();
running = true;
}
private void download(MediaDownloadEntry queue) {
DownloadTracker tracker = new DownloadTracker(queue);
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
Request.Builder builder = new Request.Builder().url(queue.dataUrl);
Response response = mHttpClient.newCall(builder.build()).execute();
bis = new BufferedInputStream(response.body().byteStream());
bos = new BufferedOutputStream(new FileOutputStream(queue.downloadTo));
if (queue.fileSize == 0) {
String len = response.header("Content-Length", "0");
queue.fileSize = Integer.parseInt(len);
}
if (queue.fileName == null) {
queue.fileName = response.header("Content-Name", "file");
}
if (queue.mimeType == null) {
queue.mimeType = response.header("Content-Type", "media");
}
queue.downloadState = MediaDownloadEntry.STATE_START;
dispatchDownloadEvent(queue, MediaDownloadEntry.STATE_START);
downloadTracker(tracker);//start download tracking
byte[] buffer = new byte[1024 * 1000];//1MB buffer allocated
while ((tracker.read = bis.read(buffer)) != -1) {
if (stopDownloadId == queue.downloadId) {
queue.downloadState = MediaDownloadEntry.ERROR;
dispatchDownloadEvent(queue, MediaDownloadEntry.ERROR);
break;
}
tracker.count += tracker.read;
bos.write(buffer, 0, tracker.read);
}
queue.downloadState = MediaDownloadEntry.STATE_COMPLETED;
dispatchDownloadEvent(queue, MediaDownloadEntry.STATE_COMPLETED);
} catch (Exception e) {
tracker.read = -1;//to terminate tracker
queue.downloadState = MediaDownloadEntry.ERROR;
dispatchDownloadEvent(queue, MediaDownloadEntry.ERROR);
} finally {
try {
if (bis != null) bis.close();
if (bos != null) bos.close();
} catch (Exception ignored) {
}
}
}
public void stopDownload(int downloadId) {
stopDownloadId = downloadId;
}
private void downloadTracker(final DownloadTracker tracker) {
threadPool.execute(new Runnable() {
#Override
public void run() {
try {
Thread.sleep(DOWNLOAD_TRACKER_BREAK); //pause to allow first updateLike
while (tracker.read != -1) {
tracker.download.downloadCount = tracker.count;
tracker.download.downloadState = MediaDownloadEntry.STATE_PROGRESS;
dispatchDownloadEvent(tracker.download, MediaDownloadEntry.STATE_PROGRESS);
Thread.sleep(DOWNLOAD_TRACKER_BREAK); //break for another 300ms
}
} catch (Exception ignored) {
}
}//end run
});
}
public void terminate() {
running = false;
downloadQueue = null;
}
private class DownloadTracker {
int read, count;
MediaDownloadEntry download;
DownloadTracker(MediaDownloadEntry model) {
this.download = model;
}
}
}
public class MediaDownloadEntry extends MediaModel {
public static final int ERROR = -1;
public static final int STATE_QUEUED = 0;
public static final int STATE_PROGRESS = 1;
public static final int STATE_START = 1;
public static final int STATE_COMPLETED = 2;
public int downloadCount, downloadState;
public int downloadId;
public String downloadUrl, downloadTo;
#Override
public boolean equals(Object o) {
return ((MediaDownloadEntry) o).downloadId == downloadId;
}
}

Related

Socket closes after the device goes into deep sleep - Android Xamarin

I have created a simple TCP server which I use to send notifications to an android app (xamarin).
When the app is in the foreground it works correctly but when I sleep the device (lock the device) after 1-3 minutes I get an error message - "An existing connection was forcibly closed by the remote host" from the server and I lose connection.
I have created a service in the android project that takes care of the connection.
Test Server :
namespace AsyncServer
{
class Program
{
private static byte[] _buffer = new byte[2048];
private static List<connectedUser> Users = new List<connectedUser>();
private static Socket _serverSocket = new Socket
(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
private static int port = 25565;
static void Main(string[] args)
{
SetupServer();
while (true)
{
//Console.WriteLine("pick user name");
string userText = "User";// Console.ReadLine();
Console.WriteLine("Enter");
string re = Console.ReadLine();
string json = JsonConvert.SerializeObject(new NotificationOrderModel() { User = "Server", Name = re});
byte[] buffer = Encoding.UTF8.GetBytes(json);
if (Users != null)
{
foreach (connectedUser user in Users.ToList())
if (user.Name == userText)
Send(user, buffer);
}
}
}
private static void Send(connectedUser user , byte [] buff)
{
try
{
user._clientSocket.BeginSend(buff, 0, buff.Length, SocketFlags.None, new AsyncCallback(SendCallback), user._clientSocket);
}
catch (Exception)
{
Users.Remove(user);
}
}
private static void SetupServer()
{
Console.WriteLine("setting server");
_serverSocket.Bind(new IPEndPoint(IPAddress.Any, port));
_serverSocket.Listen(5);
_serverSocket.BeginAccept(new AsyncCallback(AcceptCallback), null);
}
private static void AcceptCallback(IAsyncResult AR)
{
Socket socket = _serverSocket.EndAccept(AR);
socket.BeginReceive(_buffer, 0, _buffer.Length, SocketFlags.None, new AsyncCallback(ReceiveCallback), socket);
_serverSocket.BeginAccept(new AsyncCallback(AcceptCallback), _serverSocket);
}
private static void ReceiveCallback(IAsyncResult AR)
{
try
{
Socket socket = (Socket)AR.AsyncState;
int received = socket.EndReceive(AR);
if (received > 0)
{
byte[] dataBuf = new byte[received];
Array.Copy(_buffer, dataBuf, received);
string data = Encoding.UTF8.GetString(dataBuf);
var tmp = JsonConvert.DeserializeObject<NotificationOrderModel>(data);
if (Users != null)
{
bool flag = false;
foreach (connectedUser user in Users.ToList())
if (user._clientSocket == socket)
flag = true;
if (flag)
;
else
{
Users.Add(new connectedUser() { Name = tmp.User, _clientSocket = socket });
Console.WriteLine("connected - " + tmp.User); // to be removed
}
}
if (tmp.isPing)
{
string json = JsonConvert.SerializeObject(new NotificationOrderModel() { User = "Server", isPing = true });
if (Users != null)
{
foreach (connectedUser user in Users.ToList())
if (user.Name == tmp.User)
Send(user, dataBuf);
}
}
}
socket.BeginReceive(_buffer, 0, _buffer.Length, SocketFlags.None, new AsyncCallback(ReceiveCallback), socket);
}
catch (SocketException ex)
{
//when user disconnects - closes the application
}
}
private static void SendCallback(IAsyncResult AR)
{
Socket socket = (Socket)AR.AsyncState;
socket.EndSend(AR);
}
}
class connectedUser
{
public string Name;
public Socket _clientSocket;
}
public class NotificationOrderModel //: OrderModel, ICloneable
{
public string Name;
public decimal Qty;
public string ServingPlace;
public string User;
public bool Seen = false;
public bool isPing = false;
public NotificationOrderModel()
{
}
}
}
Socket service in Android:
[Service]
[assembly: Dependency(typeof(SocketService))]
public class SocketService : Service
{
public override IBinder OnBind(Intent intent)
{
return null;
}
public override StartCommandResult OnStartCommand(Intent intent, StartCommandFlags flags, int startId)
{
Ping();
return StartCommandResult.Sticky;
}
public override void OnDestroy()
{
//if (_cts != null)
//{
// _cts.Token.ThrowIfCancellationRequested();
// _cts.Cancel();
//}
base.OnDestroy();
}
public static ObservableCollection<NotificationOrderModel> OrderList = new ObservableCollection<NotificationOrderModel>();
private static Socket _serverSocket = new Socket
(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
private static byte[] _buffer = new byte[2048];
private static string IP = App.Settings.ServerIP;
private const int port = 25565;
private static IPEndPoint remoteEP = new IPEndPoint(IPAddress.Parse(IP), port);
public static bool Connected = false;
public static async Task<bool> Start()
{
if (!Connected)
{
//_serverSocket.ReceiveTimeout
_serverSocket.BeginConnect(remoteEP, new AsyncCallback(ConnectCallback), _serverSocket);
if (_serverSocket.Connected == true)
Connected = true;
}
try
{
if (Connected == true)
{
// Console.ReadLine();
string json = JsonConvert.SerializeObject(new NotificationOrderModel() { User = App.DataProvider.User.Username, isPing = true });
byte[] data = Encoding.UTF8.GetBytes(json); //"test"
_serverSocket.BeginSend(data, 0, data.Length, SocketFlags.None, new AsyncCallback(SendCallback), _serverSocket);
_serverSocket.BeginReceive(_buffer, 0, _buffer.Length, SocketFlags.None, new AsyncCallback(ReceiveCallback), _serverSocket);
}
}
catch (Exception)
{
Connected = false;
}
return false;
}
//initiate
//ping
private static void ConnectCallback(IAsyncResult ar)
{
try
{
Socket client = (Socket)ar.AsyncState;
client.EndConnect(ar);
//Console.Clear();
Console.WriteLine("Socket connected to {0}", client.RemoteEndPoint.ToString());
Connected = true;
}
catch (Exception e)
{
Thread.Sleep(200);
_serverSocket.BeginConnect(remoteEP, new AsyncCallback(ConnectCallback), _serverSocket);
//Console.Clear();
Console.WriteLine("Attempting to connect");
}
}
private static void ReceiveCallback(IAsyncResult AR)
{
try
{
Socket socket = (Socket)AR.AsyncState;
int received = socket.EndReceive(AR);
if (received > 0)
{
byte[] dataBuf = new byte[received];
Array.Copy(_buffer, dataBuf, received);
string data = Encoding.UTF8.GetString(dataBuf);
var tmp = JsonConvert.DeserializeObject<NotificationOrderModel>(data);
if (!tmp.isPing)
{
OrderList.Add(tmp);
// DependencyService.Get<INotification>().CreateNotification("title", "text");
}
}
socket.BeginReceive(_buffer, 0, _buffer.Length, SocketFlags.None, new AsyncCallback(ReceiveCallback), socket);
}
catch (SocketException ex)
{
//throw;
}
}
private static void SendCallback(IAsyncResult AR)
{
Socket socket = (Socket)AR.AsyncState;
socket.EndSend(AR);
}
private static async Task<bool> Initialized()
{
_serverSocket.ReceiveTimeout = -1;
_serverSocket.SendTimeout = -1;
_serverSocket.BeginConnect(remoteEP, new AsyncCallback(ConnectCallback), _serverSocket);
try
{
Thread.Sleep(1000);
_serverSocket.BeginReceive(_buffer, 0, _buffer.Length, SocketFlags.None, new AsyncCallback(ReceiveCallback), _serverSocket);
string json = JsonConvert.SerializeObject(new NotificationOrderModel() { User = App.DataProvider.User.Username, isPing = true });
byte[] data = Encoding.UTF8.GetBytes(json);
_serverSocket.BeginSend(data, 0, data.Length, SocketFlags.None, new AsyncCallback(SendCallback), _serverSocket);
}
catch (SocketException ex)
{
Connected = false;
// _serverSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); ;
}
return true;
}
public static async Task<bool> Ping()
{
if (Connected != true)
return await Initialized();
else
{
try
{
string json = JsonConvert.SerializeObject(new NotificationOrderModel() { User = App.DataProvider.User.Username, isPing = true });
byte[] data = Encoding.UTF8.GetBytes(json);
_serverSocket.BeginSend(data, 0, data.Length, SocketFlags.None, new AsyncCallback(SendCallback), _serverSocket);
}
catch (Exception)
{
Connected = false;
return false;
}
}
return false;
}
}
I start the service with this :
var intent = new Intent(this, typeof(SocketService));
StartService(intent);

Android main thread UI not responding while implementing Google Speech-to-text. How to solve?

Currently, I am implementing google Speech to Text in my project. The sample code referred is this: Click Here.
I have used the SpeechService and Voice Recorder class from this project.
public class SpeechService extends Service {
public static final List<String> SCOPE =
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform");
private static final String TAG = "SpeechService";
private static final String PREFS = "SpeechService";
private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value";
private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time";
/**
* We reuse an access token if its expiration time is longer than this.
*/
private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes
/**
* We refresh the current access token before it expires.
*/
private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute
private static final String HOSTNAME = "speech.googleapis.com";
private static final int PORT = 443;
private static Handler mHandler;
private final SpeechBinder mBinder = new SpeechBinder();
private final ArrayList<Listener> mListeners = new ArrayList<>();
private final StreamObserver<StreamingRecognizeResponse> mResponseObserver
= new StreamObserver<StreamingRecognizeResponse>() {
#Override
public void onNext(StreamingRecognizeResponse response) {
Log.e("Speech", "Recognized");
String text = null;
boolean isFinal = false;
if (response.getResultsCount() > 0) {
System.out.println("result count....."+String.valueOf(response.getResultsCount()));
final StreamingRecognitionResult result = response.getResults(0);
isFinal = result.getIsFinal();
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null && isFinal) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, isFinal);
}
} else {
for (Listener listener : mListeners) {
listener.onRandomStupidity();
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
for(Listener listener : mListeners){
listener.onErrorRecognizing();
}
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private volatile AccessTokenTask mAccessTokenTask;
private final Runnable mFetchAccessTokenRunnable = new Runnable() {
#Override
public void run() {
fetchAccessToken();
}
};
private SpeechGrpc.SpeechStub mApi;
private StreamObserver<StreamingRecognizeRequest> mRequestObserver;
public static SpeechService from(IBinder binder) {
return ((SpeechBinder) binder).getService();
}
#Override
public void onCreate() {
super.onCreate();
mHandler = new Handler();
fetchAccessToken();
}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mFetchAccessTokenRunnable);
mHandler = null;
// Release the gRPC channel.
if (mApi != null) {
final ManagedChannel channel = (ManagedChannel) mApi.getChannel();
if (channel != null && !channel.isShutdown()) {
try {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Log.e(TAG, "Error shutting down the gRPC channel.", e);
}
}
mApi = null;
}
}
private void fetchAccessToken() {
if (mAccessTokenTask != null) {
return;
}
mAccessTokenTask = new AccessTokenTask();
mAccessTokenTask.execute();
}
private String getDefaultLanguageCode() {
final LangInnerResponse languageToLearn = MemoryCache.getLanguageToLearn();
if(languageToLearn != null) {
Log.e("Test Lang", languageToLearn.getCode());
return languageToLearn.getCode();
} else {
final Locale locale = Locale.getDefault();
final StringBuilder language = new StringBuilder(locale.getLanguage());
final String country = locale.getCountry();
if (!TextUtils.isEmpty(country)) {
language.append("-");
language.append(country);
}
return language.toString();
}
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public void addListener(#NonNull Listener listener) {
mListeners.add(listener);
}
public void removeListener(#NonNull Listener listener) {
mListeners.remove(listener);
}
/**
** Starts recognizing speech audio.
*
* #param sampleRate The sample rate of the audio.
*/
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
System.out.println("calling api....");
// Configure the API
mRequestObserver = mApi.streamingRecognize(mResponseObserver);
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(StreamingRecognitionConfig.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setLanguageCode(getDefaultLanguageCode())
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setInterimResults(true)
.setSingleUtterance(true)
.build())
.build());
}
/**
* Recognizes the speech audio. This method should be called every time a chunk of byte buffer
* is ready.
*
* #param data The audio data.
* #param size The number of elements that are actually relevant in the {#code data}.
*/
public void recognize(byte[] data, int size) {
if (mRequestObserver == null) {
return;
}
// Call the streaming recognition API
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data, 0, size))
.build());
}
/**
* Finishes recognizing speech audio.
*/
public void finishRecognizing() {
if (mRequestObserver == null) {
return;
}
mRequestObserver.onCompleted();
mRequestObserver = null;
}
public interface Listener {
/**
* Called when a new piece of text was recognized by the Speech API.
*
* #param text The text.
* #param isFinal {#code true} when the API finished processing audio.
*/
void onSpeechRecognized(String text, boolean isFinal);
void onErrorRecognizing();
void onRandomStupidity();
}
/**
* Authenticates the gRPC channel using the specified {#link GoogleCredentials}.
*/
private static class GoogleCredentialsInterceptor implements ClientInterceptor {
private final Credentials mCredentials;
private Metadata mCached;
private Map<String, List<String>> mLastMetadata;
GoogleCredentialsInterceptor(Credentials credentials) {
mCredentials = credentials;
}
private static Metadata toHeaders(Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
Metadata.Key<String> headerKey = Metadata.Key.of(
key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
return headers;
}
#Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(
final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions,
final Channel next) {
return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>(
next.newCall(method, callOptions)) {
#Override
protected void checkedStart(Listener<RespT> responseListener, Metadata headers)
throws StatusException {
Metadata cachedSaved;
URI uri = serviceUri(next, method);
synchronized (this) {
Map<String, List<String>> latestMetadata = getRequestMetadata(uri);
if (mLastMetadata == null || mLastMetadata != latestMetadata) {
mLastMetadata = latestMetadata;
mCached = toHeaders(mLastMetadata);
}
cachedSaved = mCached;
}
headers.merge(cachedSaved);
delegate().start(responseListener, headers);
}
};
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough
* information for a service to know that the JWT was intended for it. The URI will
* commonly be verified with a simple string equality check.
*/
private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method)
throws StatusException {
String authority = channel.authority();
if (authority == null) {
throw Status.UNAUTHENTICATED
.withDescription("Channel has no authority")
.asException();
}
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName());
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI after removing port")
.withCause(e).asException();
}
}
private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException {
try {
return mCredentials.getRequestMetadata(uri);
} catch (IOException e) {
throw Status.UNAUTHENTICATED.withCause(e).asException();
}
}
}
private class SpeechBinder extends Binder {
SpeechService getService() {
return SpeechService.this;
}
}
private class CreateApiSingle implements SingleOnSubscribe<SpeechGrpc.SpeechStub> {
#Override
public void subscribe(SingleEmitter<SpeechGrpc.SpeechStub> emitter) throws Exception {
final AccessToken accessToken = generateCredentials();
final SpeechGrpc.SpeechStub api = generateApi(accessToken);
emitter.onSuccess(api);
}
private AccessToken generateCredentials() throws IOException {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
stream.close();
return token;
}
private SpeechGrpc.SpeechStub generateApi(AccessToken accessToken) {
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
return SpeechGrpc.newStub(channel);
}
}
private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> {
#Override
protected AccessToken doInBackground(Void... voids) {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
try {
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
return token;
} catch (IOException e) {
Log.e(TAG, "Failed to obtain access token.", e);
}
return null;
}
#Override
protected void onPostExecute(AccessToken accessToken) {
mAccessTokenTask = null;
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
mApi = SpeechGrpc.newStub(channel);
// Schedule access token refresh before it expires
if (mHandler != null) {
mHandler.postDelayed(mFetchAccessTokenRunnable,
Math.max(accessToken.getExpirationTime().getTime()
- System.currentTimeMillis()
- ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE));
}
}
}}
public class VoiceRecorder {
private static final int[] SAMPLE_RATE_CANDIDATES = new int[]{48000, 44100};
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AMPLITUDE_THRESHOLD = 1500;
private static final int SPEECH_TIMEOUT_MILLIS = 2000;
private static final int MAX_SPEECH_LENGTH_MILLIS = 30 * 1000;
public static abstract class Callback {
/**
* Called when the recorder starts hearing voice.
*/
public void onVoiceStart() {
}
/**
* Called when the recorder is hearing voice.
*
* #param data The audio data in {#link AudioFormat#ENCODING_PCM_16BIT}.
* #param size The size of the actual data in {#code data}.
*/
public void onVoice(byte[] data, int size) {
}
/**
* Called when the recorder stops hearing voice.
*/
public void onVoiceEnd() {
}
}
private final Callback mCallback;
private AudioRecord mAudioRecord;
private Thread mThread;
private byte[] mBuffer;
private final Object mLock = new Object();
/** The timestamp of the last time that voice is heard. */
private long mLastVoiceHeardMillis = Long.MAX_VALUE;
/** The timestamp when the current voice is started. */
private long mVoiceStartedMillis;
public VoiceRecorder(#NonNull Callback callback) {
mCallback = callback;
}
/**
* Starts recording audio.
*
* <p>The caller is responsible for calling {#link #stop()} later.</p>
*/
public void start() {
// Stop recording if it is currently ongoing.
stop();
// Try to create a new recording session.
mAudioRecord = createAudioRecord();
if (mAudioRecord == null) {
throw new RuntimeException("Cannot instantiate VoiceRecorder");
}
// Start recording.
mAudioRecord.startRecording();
// Start processing the captured audio.
mThread = new Thread(new ProcessVoice());
mThread.start();
}
/**
* Stops recording audio.
*/
public void stop() {
synchronized (mLock) {
System.out.println("stop audio record....");
dismiss();
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (mAudioRecord != null) {
mAudioRecord.stop();
mAudioRecord.release();
mAudioRecord = null;
}
mBuffer = null;
System.out.println("stop audio record....2");
}
}
/**
* Dismisses the currently ongoing utterance.
*/
public void dismiss() {
if (mLastVoiceHeardMillis != Long.MAX_VALUE) {
mLastVoiceHeardMillis = Long.MAX_VALUE;
mCallback.onVoiceEnd();
}
}
/**
* Retrieves the sample rate currently used to record audio.
*
* #return The sample rate of recorded audio.
*/
public int getSampleRate() {
if (mAudioRecord != null) {
return mAudioRecord.getSampleRate();
}
return 0;
}
/**
* Creates a new {#link AudioRecord}.
*
* #return A newly created {#link AudioRecord}, or null if it cannot be created (missing
* permissions?).
*/
private AudioRecord createAudioRecord() {
for (int sampleRate : SAMPLE_RATE_CANDIDATES) {
final int sizeInBytes = AudioRecord.getMinBufferSize(sampleRate, CHANNEL, ENCODING);
if (sizeInBytes == AudioRecord.ERROR_BAD_VALUE) {
continue;
}
final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, CHANNEL, ENCODING, sizeInBytes);
if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
mBuffer = new byte[sizeInBytes];
return audioRecord;
} else {
audioRecord.release();
}
}
return null;
}
/**
* Continuously processes the captured audio and notifies {#link #mCallback} of corresponding
* events.
*/
private class ProcessVoice implements Runnable {
#Override
public void run() {
while (true) {
synchronized (mLock) {
if (Thread.currentThread().isInterrupted()) {
break;
}
final int size = mAudioRecord.read(mBuffer, 0, mBuffer.length);
final long now = System.currentTimeMillis();
if (isHearingVoice(mBuffer, size)) {
if (mLastVoiceHeardMillis == Long.MAX_VALUE) {
mVoiceStartedMillis = now;
mCallback.onVoiceStart();
}
mCallback.onVoice(mBuffer, size);
mLastVoiceHeardMillis = now;
if (now - mVoiceStartedMillis > MAX_SPEECH_LENGTH_MILLIS) {
end();
}
} else if (mLastVoiceHeardMillis != Long.MAX_VALUE) {
mCallback.onVoice(mBuffer, size);
if (now - mLastVoiceHeardMillis > SPEECH_TIMEOUT_MILLIS) {
end();
}
}
}
}
}
private void end() {
mLastVoiceHeardMillis = Long.MAX_VALUE;
mCallback.onVoiceEnd();
System.out.println("end...");
}
private boolean isHearingVoice(byte[] buffer, int size) {
for (int i = 0; i < size - 1; i += 2) {
// The buffer has LINEAR16 in little endian.
int s = buffer[i + 1];
if (s < 0) s *= -1;
s <<= 8;
s += Math.abs(buffer[i]);
if (s > AMPLITUDE_THRESHOLD) {
return true;
}
}
return false;
}
}}
Then I implemented the Speech Service & Voice Recorder callback as follows:
private VoiceRecorder voiceRecorder;
private final SpeechService.Listener speechServiceListener = new SpeechService.Listener() {
#Override
public void onSpeechRecognized(final String text, final boolean isFinal) {
if (isFinal) {
System.out.println("ui thread...");
if (!TextUtils.isEmpty(text)) {
runOnUiThread(() -> {
showMessage(text);
flingAnswer(text);
});
}
}
}
#Override
public void onErrorRecognizing() {
showMessage("Please try again. Could not detect.");
}
#Override
public void onRandomStupidity() {
}
};
private SpeechService speechService;
private final VoiceRecorder.Callback voiceCallback = new VoiceRecorder.Callback() {
#Override
public void onVoiceStart() {
if (speechService != null) {
System.out.println("voice start....");
speechService.startRecognizing(voiceRecorder.getSampleRate());
}
}
#Override
public void onVoice(byte[] data, int size) {
if (speechService != null) {
speechService.recognize(data, size);
}
}
#Override
public void onVoiceEnd() {
if (speechService != null) {
speechService.finishRecognizing();
}
}
};
private final ServiceConnection serviceConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName componentName, IBinder binder) {
speechService = SpeechService.from(binder);
speechService.addListener(speechServiceListener);
}
#Override
public void onServiceDisconnected(ComponentName componentName) {
speechService = null;
}
};
For voice input this is the code:
#Override
public void stopRecognizing() {
stopVoiceRecorder();
Log.e("Recording", "Stopped");
}
#Override
public void startRecognizing() {
if (permissionManager != null && permissionManager.askForPermissions()) {
startVoiceRecorder();
vibrate.vibrate(50);//Providing haptic feedback to user on press.
}
Log.e("Recording", "Started");
}
binding.imgVoice.setOnTouchListener((v, event) -> {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
System.out.println("up...");
mCallback.stopRecognizing();
binding.imgVoice
.animate()
.scaleX(1.0f)
.scaleY(1.0f);
binding.imgVoice.setVisibility(View.GONE);
binding.progressBar.setVisibility(View.VISIBLE);
break;
case MotionEvent.ACTION_DOWN:
System.out.println("down...");
binding.imgVoice
.animate()
.scaleX(1.8f)
.scaleY(1.8f);
mCallback.startRecognizing();
break;
}
return true;
});
}
When I press the mic, event registered as Action_Down, I start the voice recorder and on releasing the mic , voice recorder is stopped. Also, with the Action_Down I am scaling up the mic icon which needs to be scaled down on Action_Up . But the ui freezes as a whole most of the times. I find that the onNext() callback for StreamObserver is continuously being invoked before the isFinal becomes true.
private void startVoiceRecorder() {
if (voiceRecorder != null) {
voiceRecorder.stop();
}
voiceRecorder = new VoiceRecorder(voiceCallback);
voiceRecorder.start();
}
private void stopVoiceRecorder() {
if (voiceRecorder != null) {
voiceRecorder.stop();
voiceRecorder = null;
}
}
But I want the mic to scale down as soon as I release the mic(on Action up event) which is not happening.
So if anyone can help me over this?
Thanks in Advance.

waitingInMainSignalCatcherLoop,Thread*=0x72c22ee000,peer=0x12d00280,"Signal Catcher"]: reacting to signal 3

Good day everyone, I would like to ask, hat is the cause of that ANR?. In my project I have service which is binded in a activity. Now when I exit in that activity the app is hang for a moment. My thought is that the service is still running though I unbind it in onStop() of the activity.
Here is my service class
public class SpeechService extends Service {
public interface Listener {
/**
* Called when a new piece of text was recognized by the Speech API.
*
* #param text The text.
* #param isFinal {#code true} when the API finished processing audio.
*/
void onSpeechRecognized(String text, boolean isFinal);
}
private static final String TAG = "SpeechService";
private static final String PREFS = "SpeechService";
private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value";
private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time";
/** We reuse an access token if its expiration time is longer than this. */
private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes
/** We refresh the current access token before it expires. */
private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute
public static final List<String> SCOPE =
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform");
private static final String HOSTNAME = "speech.googleapis.com";
private static final int PORT = 443;
private final SpeechBinder mBinder = new SpeechBinder();
private final ArrayList<Listener> mListeners = new ArrayList<>();
private volatile AccessTokenTask mAccessTokenTask;
private SpeechGrpc.SpeechStub mApi;
private static Handler mHandler;
private final StreamObserver<StreamingRecognizeResponse> mResponseObserver
= new StreamObserver<StreamingRecognizeResponse>() {
#Override
public void onNext(StreamingRecognizeResponse response) {
String text = null;
boolean isFinal = false;
if (response.getResultsCount() > 0) {
final StreamingRecognitionResult result = response.getResults(0);
isFinal = result.getIsFinal();
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, isFinal);
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private final StreamObserver<RecognizeResponse> mFileResponseObserver
= new StreamObserver<RecognizeResponse>() {
#Override
public void onNext(RecognizeResponse response) {
String text = null;
if (response.getResultsCount() > 0) {
final SpeechRecognitionResult result = response.getResults(0);
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, true);
}
}
}
#Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
#Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private StreamObserver<StreamingRecognizeRequest> mRequestObserver;
public static SpeechService from(IBinder binder) {
return ((SpeechBinder) binder).getService();
}
#Override
public void onCreate() {
super.onCreate();
mHandler = new Handler();
fetchAccessToken();
}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mFetchAccessTokenRunnable);
mHandler = null;
// Release the gRPC channel.
if (mApi != null) {
final ManagedChannel channel = (ManagedChannel) mApi.getChannel();
if (channel != null && !channel.isShutdown()) {
try {
channel.shutdown().awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Log.e(TAG, "Error shutting down the gRPC channel.", e);
}
}
mApi = null;
}
}
private void fetchAccessToken() {
if (mAccessTokenTask != null) {
return;
}
mAccessTokenTask = new AccessTokenTask();
mAccessTokenTask.execute();
}
private String getDefaultLanguageCode() {
final Locale locale = Locale.getDefault();
final StringBuilder language = new StringBuilder(locale.getLanguage());
final String country = locale.getCountry();
if (!TextUtils.isEmpty(country)) {
language.append("-");
language.append(country);
}
return language.toString();
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public void addListener(#NonNull Listener listener) {
mListeners.add(listener);
}
public void removeListener(#NonNull Listener listener) {
mListeners.remove(listener);
}
/**
* Starts recognizing speech audio.
*
* #param sampleRate The sample rate of the audio.
*/
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
// Configure the API
mRequestObserver = mApi.streamingRecognize(mResponseObserver);
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(StreamingRecognitionConfig.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setLanguageCode(getDefaultLanguageCode())
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setInterimResults(true)
.setSingleUtterance(true)
.build())
.build());
}
/**
* Recognizes the speech audio. This method should be called every time a chunk of byte buffer
* is ready.
*
* #param data The audio data.
* #param size The number of elements that are actually relevant in the {#code data}.
*/
public void recognize(byte[] data, int size) {
if (mRequestObserver == null) {
return;
}
// Call the streaming recognition API
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data, 0, size))
.build());
}
/**
* Finishes recognizing speech audio.
*/
public void finishRecognizing() {
if (mRequestObserver == null) {
return;
}
mRequestObserver.onCompleted();
mRequestObserver = null;
}
/**
* Recognize all data from the specified {#link InputStream}.
*
* #param stream The audio data.
*/
public void recognizeInputStream(InputStream stream) {
try {
mApi.recognize(
RecognizeRequest.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("en-US")
.setSampleRateHertz(16000)
.build())
.setAudio(RecognitionAudio.newBuilder()
.setContent(ByteString.readFrom(stream))
.build())
.build(),
mFileResponseObserver);
} catch (IOException e) {
Log.e(TAG, "Error loading the input", e);
}
}
private class SpeechBinder extends Binder {
SpeechService getService() {
return SpeechService.this;
}
}
private final Runnable mFetchAccessTokenRunnable = new Runnable() {
#Override
public void run() {
fetchAccessToken();
}
};
private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> {
#Override
protected AccessToken doInBackground(Void... voids) {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
try {
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
return token;
} catch (IOException e) {
Log.e(TAG, "Failed to obtain access token.", e);
}
return null;
}
#Override
protected void onPostExecute(AccessToken accessToken) {
mAccessTokenTask = null;
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
mApi = SpeechGrpc.newStub(channel);
// Schedule access token refresh before it expires
if (mHandler != null) {
mHandler.postDelayed(mFetchAccessTokenRunnable,
Math.max(accessToken.getExpirationTime().getTime()
- System.currentTimeMillis()
- ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE));
}
}
}
/**
* Authenticates the gRPC channel using the specified {#link GoogleCredentials}.
*/
private static class GoogleCredentialsInterceptor implements ClientInterceptor {
private final Credentials mCredentials;
private Metadata mCached;
private Map<String, List<String>> mLastMetadata;
GoogleCredentialsInterceptor(Credentials credentials) {
mCredentials = credentials;
}
#Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(
final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions,
final Channel next) {
return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>(
next.newCall(method, callOptions)) {
#Override
protected void checkedStart(Listener<RespT> responseListener, Metadata headers)
throws StatusException {
Metadata cachedSaved;
URI uri = serviceUri(next, method);
synchronized (this) {
Map<String, List<String>> latestMetadata = getRequestMetadata(uri);
if (mLastMetadata == null || mLastMetadata != latestMetadata) {
mLastMetadata = latestMetadata;
mCached = toHeaders(mLastMetadata);
}
cachedSaved = mCached;
}
headers.merge(cachedSaved);
delegate().start(responseListener, headers);
}
};
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough
* information for a service to know that the JWT was intended for it. The URI will
* commonly be verified with a simple string equality check.
*/
private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method)
throws StatusException {
String authority = channel.authority();
if (authority == null) {
throw Status.UNAUTHENTICATED
.withDescription("Channel has no authority")
.asException();
}
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName());
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI after removing port")
.withCause(e).asException();
}
}
private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException {
try {
return mCredentials.getRequestMetadata(uri);
} catch (IOException e) {
throw Status.UNAUTHENTICATED.withCause(e).asException();
}
}
private static Metadata toHeaders(Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
Metadata.Key<String> headerKey = Metadata.Key.of(
key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
return headers;
}
}
}
and here is my activity class
public class MainActivity extends AppCompatActivity implements MessageDialogFragment.Listener {
private static final String FRAGMENT_MESSAGE_DIALOG = "message_dialog";
private static final String STATE_RESULTS = "results";
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 1;
private SpeechService mSpeechService;
private VoiceRecorder mVoiceRecorder;
private final VoiceRecorder.Callback mVoiceCallback = new VoiceRecorder.Callback() {
#Override
public void onVoiceStart() {
showStatus(true);
if (mSpeechService != null) {
mSpeechService.startRecognizing(mVoiceRecorder.getSampleRate());
}
}
#Override
public void onVoice(byte[] data, int size) {
if (mSpeechService != null) {
mSpeechService.recognize(data, size);
}
}
#Override
public void onVoiceEnd() {
showStatus(false);
if (mSpeechService != null) {
mSpeechService.finishRecognizing();
}
}
};
// Resource caches
private int mColorHearing;
private int mColorNotHearing;
// View references
private TextView mStatus;
private TextView mText, mResult;
private Button editButton, clearButton;
private SharedPreferences settings;
private final ServiceConnection mServiceConnection = new ServiceConnection() {
#Override
public void onServiceConnected(ComponentName componentName, IBinder binder) {
mSpeechService = SpeechService.from(binder);
mSpeechService.addListener(mSpeechServiceListener);
mStatus.setVisibility(View.VISIBLE);
}
#Override
public void onServiceDisconnected(ComponentName componentName) {
mSpeechService = null;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
final Resources resources = getResources();
final Resources.Theme theme = getTheme();
mColorHearing = ResourcesCompat.getColor(resources, R.color.status_hearing, theme);
mColorNotHearing = ResourcesCompat.getColor(resources, R.color.status_not_hearing, theme);
mStatus = (TextView) findViewById(R.id.status);
mText = (TextView) findViewById(R.id.text);
mResult = (TextView) findViewById(R.id.resultText);
editButton = (Button)findViewById(R.id.button1);
clearButton = (Button)findViewById(R.id.button2);
settings = getSharedPreferences("MyPreference", Context.MODE_PRIVATE);
clearButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
mResult.setText("");
}
});
editButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
Intent editIntent = new Intent(MainActivity.this, EditorActivity.class);
String forEditText = mResult.getText().toString();
editIntent.putExtra("forEdit", forEditText);
startActivity(editIntent);
}
});
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if(id == android.R.id.home){
this.finish();
}
return super.onOptionsItemSelected(item);
}
#Override
protected void onStart() {
super.onStart();
// Prepare Cloud Speech API
bindService(new Intent(this, SpeechService.class), mServiceConnection, BIND_AUTO_CREATE);
// Start listening to voices
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.RECORD_AUDIO)) {
showPermissionMessageDialog();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
}
#Override
protected void onStop() {
// Stop listening to voice
stopVoiceRecorder();
// Stop Cloud Speech API
mSpeechService.removeListener(mSpeechServiceListener);
unbindService(mServiceConnection);
mSpeechService = null;
super.onStop();
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode == REQUEST_RECORD_AUDIO_PERMISSION) {
if (permissions.length == 1 && grantResults.length == 1
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else {
showPermissionMessageDialog();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private void startVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
}
mVoiceRecorder = new VoiceRecorder(mVoiceCallback);
mVoiceRecorder.start();
}
private void stopVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
mVoiceRecorder = null;
}
}
private void showPermissionMessageDialog() {
MessageDialogFragment
.newInstance(getString(R.string.permission_message))
.show(getSupportFragmentManager(), FRAGMENT_MESSAGE_DIALOG);
}
private void showStatus(final boolean hearingVoice) {
runOnUiThread(new Runnable() {
#Override
public void run() {
mStatus.setTextColor(hearingVoice ? mColorHearing : mColorNotHearing);
}
});
}
#Override
public void onMessageDialogDismissed() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
private final SpeechService.Listener mSpeechServiceListener =
new SpeechService.Listener() {
#Override
public void onSpeechRecognized(final String text, final boolean isFinal) {
if (isFinal) {
mVoiceRecorder.dismiss();
}
if (mText != null && !TextUtils.isEmpty(text)) {
runOnUiThread(new Runnable() {
#Override
public void run() {
if (isFinal) {
mText.setText(null);
mResult.append(" "+text.toString());
} else {
mText.setText(text);
}
}
});
}
}
};
}
Thank in advance for your help

Android ANR Multithreading

So I'm working on a project which needs to cut up a video into multiple frames, and save them as Bitmaps on the device.
I'm using FFmpegMediaMetadataRetriever.getFrameAtTime() to obtain the individual frames, which is working, but is slow. To speed it up a bit I'm trying to implement multiple worker threads which go off and grab the frames, finally responding back to UI via an anonymous function.
I have a class MyVideoProcessor which handles the video processing, and this is called from my EditVideoActivity.
The threads start, and start processing, but shortly afterwards the EditVideoActivity dies (ANR).
From what I can see, there is nothing running on UI (apart from at the very end (which I confirm only runs once)) so not sure why the UI thread is being held up by the worker threads.
EDIT:
So I've switched out FFmpegMediaMetadataRetriever for the standard MediaMetadataRetriever and everything works. BUT I need to use FFmpegMediaMetadataRetriever, as the OPTION_CLOSEST in MMR doesn't work as it should.
EditVideoActivity:
if (mBackgroundThread==null || !mBackgroundThread.isAlive()) {
mBackgroundThread = new Thread(mMyVideoProcessor);
mBackgroundThread.start();
}
MyVideoProcessor:
public class MyVideoProcessor implements Runnable {
private static final String TAG = MyVideoProcessor.class.getSimpleName();
private MyVideo mMyVideo;
private final Context mContext;
public static final int FRAME_CUT_DURATION = 200;
private int mStartFrom = 0;
private int mCurrentDuration = 0;
private int mVideoDuration = 0;
private ArrayList<OnFrameUpdateListener> listeners = new ArrayList<>();
private ExecutorService mProcessors = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
public MyVideoProcessor(Context context, MyVideo myVideo) {
mContext = context;
mMyVideo = myVideo;
}
public void setOnFrameUpdateListener(OnFrameUpdateListener listener) {
listeners.add(listener);
}
public int getCurrentDuration() {
return mCurrentDuration;
}
public void setStartFrom(int startFrom) {
mStartFrom = startFrom;
}
#Override
public void run() {
if (!mMyVideo.getProcessed()) {
FFmpegMediaMetadataRetriever retriever = new FFmpegMediaMetadataRetriever();
retriever.setDataSource(mContext.getExternalFilesDir(null) + File.separator + mMyVideo.getVideo());
String time = retriever.extractMetadata(FFmpegMediaMetadataRetriever.METADATA_KEY_DURATION);
retriever.release();
mVideoDuration = Integer.parseInt(time);
int i = 0;
if (mStartFrom > 0) {
Log.d(TAG,"Attempting restore");
i = mStartFrom+1;
}
for ( i=i;i<mVideoDuration;i+=FRAME_CUT_DURATION) {
mProcessors.execute(new ExtractImageExecutor(i));
}
}
}
public class ExtractImageExecutor implements Runnable {
private int mTime;
public ExtractImageExecutor(int time) {
mTime = time;
}
#Override
public void run() {
FFmpegMediaMetadataRetriever retriever = new FFmpegMediaMetadataRetriever();
retriever.setDataSource(mContext.getExternalFilesDir(null) + File.separator + mMyVideo.getVideo());
mCurrentDuration = mTime;
long startTime = System.currentTimeMillis();
Bitmap bitmap = retriever.getFrameAtTime(mTime*1000, FFmpegMediaMetadataRetriever.OPTION_CLOSEST);
long endTime = System.currentTimeMillis();
Log.d(TAG, "Took: " + ((endTime - startTime) / 1000f));
if (bitmap != null) {
try {
int thisFrame = 0;
if (mTime>0) {
thisFrame = mTime/FRAME_CUT_DURATION;
}
//noinspection StringBufferReplaceableByString
StringBuilder frameFilename = new StringBuilder();
frameFilename.append("VIDEO_");
frameFilename.append(thisFrame).append("_");
frameFilename.append(new SimpleDateFormat("yyyyMMddHHmm", Locale.UK).format(new Date()));
frameFilename.append(".jpg");
File frameFile = new File(mContext.getExternalFilesDir(null), frameFilename.toString());
FileOutputStream fos = new FileOutputStream(frameFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
mMyVideo.addFrame(thisFrame, frameFile);
/*for (OnFrameUpdateListener listener : listeners) {
listener.onFrameUpdate(mMyVideo);
}*/
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
retriever.release();
if ((mTime+FRAME_CUT_DURATION) > mVideoDuration) {
mMyVideo.setProcessed(true);
for (OnFrameUpdateListener listener : listeners) {
listener.onFrameUpdate(mMyVideo);
}
}
}
}
}
EditVideoActivity:
public class EditVideoActivity extends Activity {
private static final String TAG = EditVideoActivity.class.getSimpleName();
private ImageView mImageView;
private MyVideo mMyVideo;
private MyVideoProcessor mMyVideoProcessor;
private Thread mBackgroundThread;
private int mCurrentDuration = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_edit_video);
String videoFilename = getIntent().getStringExtra("videoFilename");
if (videoFilename != null) {
mMyVideo = new MyVideo(MyVideo.TYPE_EXTERIOR,"TEST",new File(videoFilename));
mMyVideoProcessor = new MyVideoProcessor(this,mMyVideo);
} else {
Log.d(TAG, "There was a problem with the video file");
}
}
#Override
protected void onSaveInstanceState(Bundle outState) {
Log.d(TAG,"Saving Instance State");
outState.putParcelable("video", mMyVideo);
outState.putInt("currentDuration", mMyVideoProcessor.getCurrentDuration());
super.onSaveInstanceState(outState);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
Log.d(TAG,"Restoring Instance State");
super.onRestoreInstanceState(savedInstanceState);
mMyVideo = (MyVideo) savedInstanceState.getParcelable("video");
mCurrentDuration = savedInstanceState.getInt("currentDuration");
}
#Override
protected void onResume() {
super.onResume();
mMyVideoProcessor = new MyVideoProcessor(this,mMyVideo);
final TextView totalFrames = (TextView) findViewById(R.id.totalFrames);
mImageView = (ImageView) findViewById(R.id.imageView2);
final SeekBar seekBar = (SeekBar) findViewById(R.id.seekBar);
final ProgressBar progressBar = (ProgressBar) findViewById(R.id.progressBar);
progressBar.animate();
seekBar.setEnabled(false);
OnFrameUpdateListener onFrameUpdateListener = new OnFrameUpdateListener() {
#Override
public void onFrameUpdate(final MyVideo myVideo) {
if (myVideo.getProcessed()) {
File lastFrame = myVideo.getLastFrame();
totalFrames.setText(myVideo.getTotalFrames()+"");
mImageView.setImageBitmap(BitmapFactory.decodeFile(lastFrame.getAbsolutePath()));
seekBar.setEnabled(true);
progressBar.setVisibility(View.GONE);
}
}
};
mMyVideoProcessor.setOnFrameUpdateListener(onFrameUpdateListener);
if (mBackgroundThread==null || !mBackgroundThread.isAlive()) {
mBackgroundThread = new Thread(mMyVideoProcessor);
mBackgroundThread.start();
}
}
}

enable auto-reboot in android on panic

I am working on Brix android x86 system. After inserting a module(introduced memory crashed code in the module for causing panic), it panics. But, the system does not auto-reboot. It hangs. Could you please tell the steps for auto-rebooting the android after panic ?
Thanks And Regards,
Pankaj
Depending upon your requirement you can put these peice of code where you require.
You can do with these 2 scenarios
You application must be signed with system application
// must be a system app
void reboot(Context context) {
PowerManager pm = (PowerManager) context
.getSystemService(Context.POWER_SERVICE);
pm.reboot(null);
}
You device must be rooted
if (ShellInterface.isSuAvailable()) {
ShellInterface.runCommand("-su");
ShellInterface.runCommand("reboot");
}
ShellInterface.class
public class ShellInterface {
private static final String TAG = "ShellInterface";
private static String shell;
// uid=0(root) gid=0(root)
private static final Pattern UID_PATTERN = Pattern
.compile("^uid=(\\d+).*?");
enum OUTPUT {
STDOUT, STDERR, BOTH
}
private static final String EXIT = "exit\n";
private static final String[] SU_COMMANDS = new String[] { "su",
"/system/xbin/su", "/system/bin/su" };
private static final String[] TEST_COMMANDS = new String[] { "id",
"/system/xbin/id", "/system/bin/id" };
public static synchronized boolean isSuAvailable() {
if (shell == null) {
checkSu();
}
return shell != null;
}
public static synchronized void setShell(String shell) {
ShellInterface.shell = shell;
}
private static boolean checkSu() {
for (String command : SU_COMMANDS) {
shell = command;
if (isRootUid())
return true;
}
shell = null;
return false;
}
private static boolean isRootUid() {
String out = null;
for (String command : TEST_COMMANDS) {
out = getProcessOutput(command);
if (out != null && out.length() > 0)
break;
}
if (out == null || out.length() == 0)
return false;
Matcher matcher = UID_PATTERN.matcher(out);
if (matcher.matches()) {
if ("0".equals(matcher.group(1))) {
return true;
}
}
return false;
}
public static String getProcessOutput(String command) {
try {
return _runCommand(command, OUTPUT.STDERR);
} catch (IOException ignored) {
return null;
}
}
public static boolean runCommand(String command) {
try {
_runCommand(command, OUTPUT.BOTH);
return true;
} catch (IOException ignored) {
return false;
}
}
private static String _runCommand(String command, OUTPUT o)
throws IOException {
DataOutputStream os = null;
Process process = null;
try {
process = Runtime.getRuntime().exec(shell);
os = new DataOutputStream(process.getOutputStream());
InputStreamHandler sh = sinkProcessOutput(process, o);
os.writeBytes(command + '\n');
os.flush();
os.writeBytes(EXIT);
os.flush();
process.waitFor();
if (sh != null) {
String output = sh.getOutput();
Log.d(TAG, command + " output: " + output);
return output;
} else {
return null;
}
} catch (Exception e) {
final String msg = e.getMessage();
Log.e(TAG, "runCommand error: " + msg);
throw new IOException(msg);
} finally {
try {
if (os != null) {
os.close();
}
if (process != null) {
process.destroy();
}
} catch (Exception ignored) {
}
}
}
public static InputStreamHandler sinkProcessOutput(Process p, OUTPUT o) {
InputStreamHandler output = null;
switch (o) {
case STDOUT:
output = new InputStreamHandler(p.getErrorStream(), false);
new InputStreamHandler(p.getInputStream(), true);
break;
case STDERR:
output = new InputStreamHandler(p.getInputStream(), false);
new InputStreamHandler(p.getErrorStream(), true);
break;
case BOTH:
new InputStreamHandler(p.getInputStream(), true);
new InputStreamHandler(p.getErrorStream(), true);
break;
}
return output;
}
private static class InputStreamHandler extends Thread {
private final InputStream stream;
private final boolean sink;
StringBuffer output;
public String getOutput() {
return output.toString();
}
InputStreamHandler(InputStream stream, boolean sink) {
this.sink = sink;
this.stream = stream;
start();
}
#Override
public void run() {
try {
if (sink) {
while (stream.read() != -1) {
}
} else {
output = new StringBuffer();
BufferedReader b = new BufferedReader(
new InputStreamReader(stream));
String s;
while ((s = b.readLine()) != null) {
output.append(s);
}
}
} catch (IOException ignored) {
}
}
}
}
You can force the kernel panic with "echo c > /proc/sysrq-trigger" command.
More debug logs can be seen via kernel panic log.

Categories

Resources