See code below. Pocketsphinx is configured with a keyphrase search to trigger on the word "record". Searching is then started, and talking causes onBeginningOfSpeech and onEndOfSpeech to be called, but no other listener methods get called, whatever I say.
public class MainActivity extends AppCompatActivity implements RecognitionListener {
private final Handler handler = new Handler ();
private SpeechRecognizer recognizer;
private final static String KEYWORD = "record";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_start);
makeButtonStartButton ();
ensureRecordAudioPermission ();
startKeywordListener ();
}
private void startKeywordListener() {
// Recognizer initialization is a time-consuming and it involves IO,
// so we execute it in async task
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(MainActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
Log.e("MainActivity", "Failed to init recognizer " + result);
} else {
startKeywordSearch ();
}
}
}.execute();
}
private void startKeywordSearch() {
Log.i("MainActivity", "Starting keyword search: " + KEYWORD);
recognizer.stop();
recognizer.startListening(KEYWORD);
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = SpeechRecognizerSetup.defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
.setRawLogDir(assetsDir) // To disable logging of raw audio comment out this call (takes a lot of space on the device)
.getRecognizer();
recognizer.addListener(this);
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KEYWORD, KEYWORD);
}
private void ensureRecordAudioPermission() {
// Check if user has given permission to record audio
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck == PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, 1);
return;
}
}
#Override
public void onRequestPermissionsResult(int requestCode,
String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 1) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startKeywordListener();
} else {
finish();
}
}
}
#Override
public void onDestroy() {
super.onDestroy();
if (recognizer != null) {
recognizer.cancel();
recognizer.shutdown();
}
}
private void makeButtonStartButton() {
findViewById(R.id.startButton).setOnClickListener(startRecording);
}
private final View.OnClickListener startRecording = new View.OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(MainActivity.this, RecordingActivity.class));
}
};
#Override
public void onBeginningOfSpeech() {
Log.i ("MainActivity", "Beginning of speech detected");
}
#Override
public void onEndOfSpeech() {
Log.i ("MainActivity", "End of speech detected");
}
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null) return; // reject the null hypothesis (!)
Log.i ("MainActivity", "Partial result: " + hypothesis.getHypstr() + " (" + hypothesis.getProb() + ")");
if (hypothesis.getHypstr().equals(KEYWORD))
startRecording.onClick(null);
}
#Override
public void onResult(Hypothesis hypothesis) {
if (hypothesis == null) return; // reject the null hypothesis (!)
Log.i ("MainActivity", "Complete result: " + hypothesis.getHypstr() + " (" + hypothesis.getProb() + ")");
if (hypothesis.getHypstr().equals(KEYWORD))
startRecording.onClick(null);
}
#Override
public void onError(Exception e) {
Log.i ("MainActivity", "Error detected", e);
}
#Override
public void onTimeout() {
Log.i ("MainActivity", "Timeout occurred");
}
}
Related
I want to create a voice recognition app in android and run it in service so i can use it even without in the app. So i looked for reference and i found in GitHub a demo app.
This is the site https://github.com/ihrupin/SpeechRecognitionService
I download the app and also i read the documentation, Yes it running well to me it also running in service, but i really want is for example if i said (open Facebook) it will open the installed Facebook app.
I'm newbie in using the PocketSphinx.
This is the MainActivity
public class MainActivity extends AppCompatActivity {
private static final int PERMISSIONS_REQUEST_RECORD_AUDIO = 1;
private static final String LOG_TAG = MainActivity.class.getSimpleName();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
((Button)findViewById(R.id.btn)).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Log.i(LOG_TAG, "onClick");
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck == PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.RECORD_AUDIO}, PERMISSIONS_REQUEST_RECORD_AUDIO);
return;
}
startService(new Intent(MainActivity.this, VoiceService.class));
}
});
}
#Override
public void onRequestPermissionsResult(int requestCode,
String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST_RECORD_AUDIO) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startService(new Intent(MainActivity.this, VoiceService.class));
} else {
finish();
}
}
}
}
This is my Service
public class VoiceService extends Service implements
RecognitionListener {
private static final String LOG_TAG = VoiceService.class.getSimpleName();
private static final String KWS_SEARCH = "wakeup";
private static final String KEYPHRASE = "lisa";
private SpeechRecognizer recognizer;
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck == PackageManager.PERMISSION_GRANTED) {
runRecognizerSetup();
}
return super.onStartCommand(intent, flags, startId);
}
private void runRecognizerSetup() {
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(VoiceService.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
Log.i(LOG_TAG, "Failed to init recognizer ");
} else {
switchSearch(KWS_SEARCH);
}
}
}.execute();
}
#Override
public void onDestroy() {
super.onDestroy();
if (recognizer != null) {
recognizer.cancel();
recognizer.shutdown();
}
}
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
if (text.contains(KEYPHRASE)) {
Toast.makeText(this, "onPartialResult text=" + text, Toast.LENGTH_SHORT).show();
switchSearch(KWS_SEARCH);
}
Log.i(LOG_TAG, "onPartialResult text=" +text);
}
#Override
public void onResult(Hypothesis hypothesis) {
if (hypothesis != null) {
String text = hypothesis.getHypstr();
Log.i(LOG_TAG, "onResult text=" +text);
}
}
#Override
public void onBeginningOfSpeech() {
Log.i(LOG_TAG, "onBeginningOfSpeech");
}
#Override
public void onEndOfSpeech() {
if (!recognizer.getSearchName().contains(KWS_SEARCH))
switchSearch(KWS_SEARCH);
Log.i(LOG_TAG, "onEndOfSpeech");
}
private void switchSearch(String searchName) {
Log.i(LOG_TAG, "switchSearch searchName = " + searchName);
recognizer.stop();
recognizer.startListening(searchName, 10000);
}
private void setupRecognizer(File assetsDir) throws IOException {
recognizer = SpeechRecognizerSetup.defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
.setRawLogDir(assetsDir)
.setKeywordThreshold(1e-45f)
.setBoolean("-allphone_ci", true)
.getRecognizer();
recognizer.addListener(this);
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
}
#Override
public void onError(Exception error) {
Log.i(LOG_TAG, "onError " + error.getMessage());
}
#Override
public void onTimeout() {
switchSearch(KWS_SEARCH);
Log.i(LOG_TAG, "onTimeout");
}
}
This is BootReceiver
public class BootReceiver extends BroadcastReceiver {
private static final String LOG_TAG = BootReceiver.class.getSimpleName();
#Override
public void onReceive(Context context, Intent intent) {
Log.i(LOG_TAG, "onReceive");
if(intent.getAction().equals(Intent.ACTION_BOOT_COMPLETED)){
Log.i(LOG_TAG, "onReceive onBoot");
context.startService(new Intent(context, VoiceService.class));
}
}
}
I've researched about this topic and i found out that i must modify the grammar and the dictionary but i don't know how to do that. Any ideas?
If you want to modify the existing grammer and want to add your own words you have to modify it a little bit.
write this line in your recognizer setup method.
recognizer.addKeyphraseSearch("facebookPhrase", "Open Facebook");
To edit this example you can read about on official website here
https://cmusphinx.github.io/wiki/tutoriallm/
I am trying to implement a hotword in my app which will start the speech input using CMUSphinx. I copied the code from the tutorial, did some changes to start the speech input but it is not working. My mainActivity code for the recognition looks something like this:
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (PrefManager.getString(Constant.ACCESS_TOKEN, null) == null) {
throw new IllegalStateException("Not signed in, Cannot access resource!");
}
clientBuilder = new ClientBuilder();
init();
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck == PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, PERMISSIONS_REQUEST_RECORD_AUDIO);
return;
}
runRecognizerSetup();
}
private static final String KWS_SEARCH = "wakeup";
private void runRecognizerSetup() {
// Recognizer initialization is a time-consuming and it involves IO,
// so we execute it in async task
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(MainActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
Toast.makeText(getApplicationContext(),"Failed",LENGTH_SHORT).show();
} else {
switchSearch(KWS_SEARCH);
}
}
}.execute();
}
#Override
public void onRequestPermissionsResult(int requestCode,
String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST_RECORD_AUDIO) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
runRecognizerSetup();
} else {
finish();
}
}
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
if (text.equals(KEYPHRASE))
promptSpeechInput();
}
/**
* This callback is called when we stop the recognizer.
*/
#Override
public void onResult(Hypothesis hypothesis){
if (hypothesis != null) {
String pqr = hypothesis.getHypstr();
Toast.makeText(getApplicationContext(),pqr,LENGTH_SHORT).show();
}
}
#Override
public void onBeginningOfSpeech() {
}
/**
* We stop recognizer here to get a final result
*/
#Override
public void onEndOfSpeech() {
if (!recognizer.getSearchName().equals(KWS_SEARCH))
switchSearch(KWS_SEARCH);
}
private void switchSearch(String searchName) {
recognizer.stop();
// If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
if (searchName.equals(KWS_SEARCH))
recognizer.startListening(searchName);
else
recognizer.startListening(searchName, 10000);
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = SpeechRecognizerSetup.defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
.setRawLogDir(assetsDir) // To disable logging of raw audio comment out this call (takes a lot of space on the device)
.getRecognizer();
recognizer.addListener(this);
/** In your application you might not need to add all those searches.
* They are added here for demonstration. You can leave just one.
*/
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
}
#Override
public void onError(Exception error) {
Toast.makeText(getApplicationContext(),"error",LENGTH_SHORT).show();
}
#Override
public void onTimeout() {
switchSearch(KWS_SEARCH);
}
My keyphrase is "HI". The problem is that whenever I run this code I get a toast message "failed" due to this:
Toast.makeText(getApplicationContext(),"Failed",LENGTH_SHORT).show();
What can be possibly wrong with this?
I am very new to these things so any help in this will be really helpful.
Thank you.
Hi I'm implementing continuous speech listening in my application, but when i destroy the speech recognizer in onPause i get not connected to the recognition service error.
I already read this question, and the relative answer, but it doesn't seem to solve the problem.
This is the code that generates the error:
// Activity code
#Override
public void onPause()
{
assistant.dismiss();
super.onPause();
}
#Override
public void onResume()
{
super.onResume();
assistant = new Assistant(this);
Log.d(TAG,"resume");
}
Assistant code:
public class Assistant extends UtteranceProgressListener implements RecognitionListener, TextToSpeech.OnInitListener
{
private static final String TAG = "Assistant" ;
private Context context ;
private Intent intent ;
private SpeechRecognizer speechRecognizer;
private TextToSpeech textToSpeech;
private static AudioManager audioManager;
private boolean isAudioMute;
String actionAnswer ;
public Assistant ( Context context )
{
this.context = context;
isAudioMute = false ;
textToSpeech = new TextToSpeech(context, this);
textToSpeech.setOnUtteranceProgressListener(this);
if ( SpeechRecognizer.isRecognitionAvailable(context))
{
intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
context.getPackageName());
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context);
speechRecognizer.setRecognitionListener(this);
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
else
{
Log.e(TAG, "speech recognizer not avaiable");
}
}
private void startListening()
{
if ( !isAudioMute )
{
audioManager.setStreamMute(AudioManager.STREAM_MUSIC, true);
isAudioMute = true ;
}
((Activity)context).runOnUiThread(new Runnable()
{
#Override
public void run()
{
speechRecognizer.startListening(intent);
Log.d(TAG, "startlisten");
}
});
}
private void stopListening()
{
speechRecognizer.stopListening();
try
{
// wait for annoying sound to happens, then unmute channel.
Thread.sleep(400);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
Log.d(TAG, "stoplistne");
if ( isAudioMute )
{
audioManager.setStreamMute(AudioManager.STREAM_MUSIC, false);
isAudioMute = false ;
}
}
public void dismiss()
{
((Activity)context).runOnUiThread(new Runnable()
{
#Override
public void run()
{
speechRecognizer.stopListening();
speechRecognizer.cancel();
speechRecognizer.destroy();
speechRecognizer = null;
}
});
try
{
// wait for annoying sound to happens, then unmute channel.
Thread.sleep(400);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
Log.d(TAG, "stoplistne");
if ( isAudioMute )
{
audioManager.setStreamMute(AudioManager.STREAM_MUSIC, false);
isAudioMute = false ;
}
textToSpeech.stop();
textToSpeech.shutdown();
textToSpeech = null;
}
private void speakOut(String text)
{
stopListening();
if (Build.VERSION.SDK_INT >= 21 )
{
if (textToSpeech.speak(text, TextToSpeech.QUEUE_FLUSH, null, this.getClass().getName()) != TextToSpeech.SUCCESS)
Log.e(TAG, "failed to queue text " + text);
}
else
{
if (textToSpeech.speak(text.toString(), TextToSpeech.QUEUE_FLUSH, null) != TextToSpeech.SUCCESS)
Log.e(TAG, "failed to queue text " + text);
}
}
// text to speech
#Override
public void onInit(int status)
{
if (status == TextToSpeech.SUCCESS)
{
int result = textToSpeech.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA
|| result == TextToSpeech.LANG_NOT_SUPPORTED)
{
Log.e(TAG, "This Language is not supported");
return;
}
speakOut("Assistant Ready");
}
else
{
Log.e(TAG, "Initilization Failed!");
}
}
// SpeechRecognizer
#Override
public void onReadyForSpeech(Bundle params)
{
Log.d(TAG, "readyforspeech");
}
// SpeechRecognizer
#Override
public void onBeginningOfSpeech()
{
Log.d(TAG, "beginningofspeech");
}
// SpeechRecognizer
#Override
public void onRmsChanged(float rmsdB)
{
}
// SpeechRecognizer
#Override
public void onBufferReceived(byte[] buffer)
{
Log.d(TAG, "bufferreceived");
}
// SpeechRecognizer
#Override
public void onEndOfSpeech()
{
Log.d(TAG, "endofspeech");
}
// SpeechRecognizer
#Override
public void onError(int error)
{
Log.d("SPEECH", "onError: " + error);
switch(error)
{
case SpeechRecognizer.ERROR_AUDIO:
Log.d(TAG,"ERROR_AUDIO");
break;
case SpeechRecognizer.ERROR_CLIENT:
Log.d(TAG,"ERROR_CLIENT");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
Log.d(TAG,"ERROR_INSUFFICIENT_PERMISSIONS");
break;
case SpeechRecognizer.ERROR_NETWORK:
Log.d(TAG,"ERROR_NETWORK");
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
Log.d(TAG,"ERROR_NETWORK_TIMEOUT");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
Log.d(TAG,"ERROR_NO_MATCH");
startListening();
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
Log.d(TAG,"ERROR_RECOGNIZER_BUSY");
break;
case SpeechRecognizer.ERROR_SERVER:
Log.d(TAG,"ERROR_SERVER");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
Log.d(TAG,"ERROR_SPEECH_TIMEOUT");
startListening();
break;
default:
Log.d(TAG,"ERROR_UNKNOWN");
}
}
// SpeechRecognizer
#Override
public void onResults(Bundle results)
{
ArrayList<String> res = results. getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION) ;
Log.i(TAG,"res: '" + res.get(0) +"'.");
actionAnswer = res.get(0);
speakOut(actionAnswer);
}
// SpeechRecognizer
#Override
public void onPartialResults(Bundle partialResults)
{
}
// SpeechRecognizer
#Override
public void onEvent(int eventType, Bundle params)
{
}
// Utterance progress listener
#Override
public void onStart(String utteranceId)
{
Log.d(TAG, "onstart");
}
// Utterance progress listener
#Override
public void onDone(String utteranceId)
{
Log.d(TAG, "ondone");
startListening();
}
// Utterance progress listener
#Override
public void onError(String utteranceId)
{
Log.d(TAG, "onerror");
}
}
Anyway, even if the error occurs, when i reinitialize the speech recognizer everything works fine, so i'm not sure if i have to worry about the error.
Don't use speechRecognizer.stopListening() and speechRecognizer.cancel(),
Use speechRecognizer.destroy() instead.
I am using the default dictionary that comes with the pocketsphinx demo which is good for my purposes. When a user enters a phrase, the app starts a keyphrase listening but if the word is not found in the dictionary the app crashes. The app crashes onError() within a service. How is the error handling done? is there any way I can catch the error? Overall I would just like the service to call stopSelf() when an error happens so the main activity won't crash as well.
Errors:
ERROR: "kws_search.c", line 165: The word 'phonez' is missing in the dictionary
Fatal signal 11 (SIGSEGV) at 0x00000000 (code=1), thread 5389 (1994.wherephone)
Here is my service class:
public class WherePhoneService extends Service implements RecognitionListener {
private static String SettingStorage = "SavedData";
SharedPreferences settingData;
private SpeechRecognizer recognizer;
private String sInput;
private String sOutput;
private int seekVal;
private TextToSpeech reply;
private AsyncTask t;
public WherePhoneService() {
}
#Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
throw new UnsupportedOperationException("Not yet implemented");
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
makeText(getApplicationContext(), "onHandle start", Toast.LENGTH_SHORT).show();
getValues();
startTTS();
t = new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(WherePhoneService.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
//((TextView) findViewById(R.id.caption_text)).setText("Failed to init recognizer " + result);
} else {
switchSearch(sInput);
}
}
}.execute();
return Service.START_STICKY;
}
private void setupRecognizer(File assetsDir) throws IOException {
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
// To disable logging of raw audio comment out this call (takes a lot of space on the device)
//.setRawLogDir(assetsDir)
// Threshold to tune for keyphrase to balance between false alarms and misses
.setKeywordThreshold(1e-45f)
// Use context-independent phonetic search, context-dependent is too slow for mobile
.setBoolean("-allphone_ci", true)
.getRecognizer();
recognizer.addListener(this);
// Create keyword-activation search.
recognizer.addKeyphraseSearch(sInput, sInput);
}
private void switchSearch(String searchName) {
recognizer.stop();
// If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
if (searchName.equals(sInput))
recognizer.startListening(searchName);
else
recognizer.startListening(searchName, 10000);
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onEndOfSpeech() {
if (!recognizer.getSearchName().equals(sInput))
switchSearch(sInput);
}
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), "Partial", Toast.LENGTH_SHORT).show();
if (text.equals(sInput)) {
setVolume();
// Text to speech
reply.speak(sOutput, TextToSpeech.QUEUE_ADD, null);
switchSearch(sInput);
}
else {
makeText(getApplicationContext(), "Try again", Toast.LENGTH_SHORT).show();
switchSearch(sInput);
}
}
#Override
public void onResult(Hypothesis hypothesis) {
if (hypothesis != null) {
// restart listener and affirm that partial has past
makeText(getApplicationContext(), "end", Toast.LENGTH_SHORT).show();
//recognizer.startListening(sInput);
switchSearch(sInput);
}
}
public void onError(Exception e) {
e.printStackTrace(); // not all Android versions will print the stack trace automatically
Intent intent = new Intent ();
intent.setAction ("com.mydomain.SEND_LOG"); // see step 5.
intent.setFlags (Intent.FLAG_ACTIVITY_NEW_TASK); // required when starting from Application
startActivity (intent);
stopSelf();
}
#Override
public void onTimeout() {
switchSearch(sInput);
}
public void startTTS() {
reply = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if(status != TextToSpeech.ERROR){
reply.setLanguage(Locale.UK);
}
}
});
}
public void getValues() {
settingData = getBaseContext().getSharedPreferences(SettingStorage, 0);
sInput = settingData.getString("inputstring", "Where is my phone").toString().toLowerCase().replaceAll("[^\\w\\s]", "");
sOutput = settingData.getString("outputstring", "").toString().toLowerCase();
seekVal = settingData.getInt("seekval", 0);
}
public void setVolume() {
int seekValConvert = 0;
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int getMaxPhoneVol = audioManager.getStreamMaxVolume(audioManager.STREAM_MUSIC);
seekValConvert = ((seekVal * getMaxPhoneVol)/100);
audioManager.setStreamVolume(audioManager.STREAM_MUSIC, seekValConvert, 0);
}
#Override
public void onDestroy() {
super.onDestroy();
makeText(getApplicationContext(), "destroy", Toast.LENGTH_SHORT).show();
recognizer.cancel();
recognizer.shutdown();
t.cancel(true);
}
}
Crash is a bug in pocketsphinx-android. If you update to latest version from github, it should properly throw RuntimeException on any errors in methods addKeyphrase and setSearch.
I have successfully implemented google plus
list people activity
public class ListPeopleActivity extends Activity implements
PlusClient.ConnectionCallbacks, PlusClient.OnPeopleLoadedListener,
PlusClient.OnConnectionFailedListener, DialogInterface.OnCancelListener {
private static final String TAG = "ListPeopleActivity";
private static final String STATE_RESOLVING_ERROR = "resolving_error";
private static final int DIALOG_GET_GOOGLE_PLAY_SERVICES = 1;
private static final int REQUEST_CODE_SIGN_IN = 1;
private static final int REQUEST_CODE_GET_GOOGLE_PLAY_SERVICES = 2;
private ArrayAdapter mListAdapter;
private ListView mPersonListView;
private ArrayList<String> mListItems;
private PlusClient mPlusClient;
private boolean mResolvingError;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.person_list_activity);
mPlusClient = new PlusClient.Builder(this, this, this)
.setVisibleActivities(MomentUtil.VISIBLE_ACTIVITIES).build();
mListItems = new ArrayList<String>();
mListAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, mListItems);
mPersonListView = (ListView) findViewById(R.id.person_list);
mResolvingError = savedInstanceState != null
&& savedInstanceState.getBoolean(STATE_RESOLVING_ERROR, false);
int available = GooglePlayServicesUtil
.isGooglePlayServicesAvailable(this);
if (available != ConnectionResult.SUCCESS) {
showDialog(DIALOG_GET_GOOGLE_PLAY_SERVICES);
}
}
#Override
protected Dialog onCreateDialog(int id) {
if (id != DIALOG_GET_GOOGLE_PLAY_SERVICES) {
return super.onCreateDialog(id);
}
int available = GooglePlayServicesUtil
.isGooglePlayServicesAvailable(this);
if (available == ConnectionResult.SUCCESS) {
return null;
}
if (GooglePlayServicesUtil.isUserRecoverableError(available)) {
return GooglePlayServicesUtil.getErrorDialog(available, this,
REQUEST_CODE_GET_GOOGLE_PLAY_SERVICES, this);
}
return new AlertDialog.Builder(this)
.setMessage(R.string.plus_generic_error).setCancelable(true)
.setOnCancelListener(this).create();
}
#Override
protected void onStart() {
super.onStart();
mPlusClient.connect();
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(STATE_RESOLVING_ERROR, mResolvingError);
}
#Override
protected void onStop() {
super.onStop();
mPlusClient.disconnect();
}
#Override
public void onPeopleLoaded(ConnectionResult status,
PersonBuffer personBuffer, String nextPageToken) {
switch (status.getErrorCode()) {
case ConnectionResult.SUCCESS:
mListItems.clear();
try {
int count = personBuffer.getCount();
Log.e("", "count : " + count);
for (int i = 0; i < count; i++) {
mListItems.add(personBuffer.get(i).getDisplayName());
Log.e("", "" + personBuffer.get(i).getDisplayName() + " "
+ personBuffer.get(i).getId() + " isPlusUser : "
+ personBuffer.get(i).isPlusUser()
+ " isVerified : "
+ personBuffer.get(i).isVerified()
+ " hasCircledByCount : "
+ personBuffer.get(i).hasCircledByCount()
+ " getObjectType : "
+ personBuffer.get(i).getObjectType());
}
} finally {
personBuffer.close();
}
mListAdapter.notifyDataSetChanged();
break;
case ConnectionResult.SIGN_IN_REQUIRED:
mPlusClient.disconnect();
mPlusClient.connect();
break;
default:
Log.e(TAG, "Error when listing people: " + status);
break;
}
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case REQUEST_CODE_SIGN_IN:
mResolvingError = false;
handleResult(resultCode);
break;
case REQUEST_CODE_GET_GOOGLE_PLAY_SERVICES:
handleResult(resultCode);
break;
}
}
private void handleResult(int resultCode) {
if (resultCode == RESULT_OK) {
// onActivityResult is called after onStart (but onStart is not
// guaranteed to be called while signing in), so we should make
// sure we're not already connecting before we call connect again.
if (!mPlusClient.isConnecting() && !mPlusClient.isConnected()) {
mPlusClient.connect();
}
} else {
Log.e(TAG, "Unable to sign the user in.");
finish();
}
}
#Override
public void onConnected(Bundle connectionHint) {
mPersonListView.setAdapter(mListAdapter);
// mPlusClient.loadVisiblePeople(this, null);
mPlusClient.loadPeople(this, "103193341800315457743");
}
#Override
public void onDisconnected() {
mPersonListView.setAdapter(null);
mPlusClient.connect();
}
#Override
public void onConnectionFailed(ConnectionResult result) {
if (mResolvingError) {
return;
}
mPersonListView.setAdapter(null);
try {
result.startResolutionForResult(this, REQUEST_CODE_SIGN_IN);
mResolvingError = true;
} catch (IntentSender.SendIntentException e) {
// Get another pending intent to run.
mPlusClient.connect();
}
}
#Override
public void onCancel(DialogInterface dialogInterface) {
Log.e(TAG, "Unable to sign the user in.");
finish();
}
In above code it mPlusClient.loadVisiblePeople(this, null); load all visible(in circle) people.. I want to load one specific people which is in my circle. and mPlusClient.loadPeople(this, "103193341800315457743"); this can load specific people but how can I know it is in my circle or not... because it returns everytime whether it is in my circle or not.
Actually I want to know that specific people is in my circle or not.
try, mPlusClient.loadPerson(this, "103193341800315457743");