Pocketsphinx does not recognise Indian language words with accuracy - android

I am developing app for voice commands. I have used pocketsphinx demo. Also I have created dictionary using http://www.speech.cs.cmu.edu/tools/lextool.html
but when I talk and it does not recognise correct word rather its gives another word. Below is my code
package edu.cmu.pocketsphinx.demo;
import android.app.Service;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import edu.cmu.pocketsphinx.Assets;
import edu.cmu.pocketsphinx.Hypothesis;
import edu.cmu.pocketsphinx.RecognitionListener;
import edu.cmu.pocketsphinx.SpeechRecognizer;
import edu.cmu.pocketsphinx.SpeechRecognizerSetup;
import static android.widget.Toast.makeText;
public class FirstService extends Service implements
RecognitionListener {
/* Named searches allow to quickly reconfigure the decoder */
private static final String KWS_SEARCH = "wakeup";
// private static final String FORECAST_SEARCH = "pimple";
// private static final String DIGITS_SEARCH = "help";
// private static final String PHONE_SEARCH = "phones";
private static final String MENU_SEARCH = "menu";
/* Keyword we are looking for to activate menu */
private static final String KEYPHRASE = "hello";
/* Used to handle permission request */
private static final int PERMISSIONS_REQUEST_RECORD_AUDIO = 1;
private SpeechRecognizer recognizer;
private HashMap<String, Integer> captions;
private static String TAG = "Inchoo.net tutorial";
#Override
public IBinder onBind(Intent arg0) {
/* TODO Auto-generated method stub */
return null;
}
#Override
public void onStart(Intent intent, int startId) {
// TODO Auto-generated method stub
super.onStart(intent, startId);
Log.d(TAG, "FirstService started");
runRecognizerSetup();
// this.stopSelf();
}
#Override
public void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
Log.d(TAG, "FirstService destroyed");
}
private void runRecognizerSetup() {
// Recognizer initialization is a time-consuming and it involves IO,
// so we execute it in async task
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(FirstService.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
} else {
switchSearch(KWS_SEARCH);
}
}
}.execute();
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
Log.d("TEMP RESULT",text);
/// if (text.equals(KEYPHRASE))
switchSearch(MENU_SEARCH);
// else if (text.equals(DIGITS_SEARCH))
// switchSearch(DIGITS_SEARCH);
// else if (text.equals(PHONE_SEARCH))
// switchSearch(PHONE_SEARCH);
// else if (text.equals(FORECAST_SEARCH))
// switchSearch(FORECAST_SEARCH);
// else
//((TextView) findViewById(R.id.result_text)).setText(text);
}
/**
* This callback is called when we stop the recognizer.
*/
#Override
public void onResult(Hypothesis hypothesis) {
if (hypothesis != null) {
String text = hypothesis.getHypstr();
Log.d(" RESULT",text);
makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
}
#Override
public void onBeginningOfSpeech() {
}
/**
* We stop recognizer here to get a final result
*/
#Override
public void onEndOfSpeech() {
Log.d("getSearchName()",recognizer.getSearchName());
if (!recognizer.getSearchName().equals(KWS_SEARCH))
switchSearch(KWS_SEARCH);
}
private void switchSearch(String searchName) {
recognizer.stop();
// If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
if (searchName.equals(KWS_SEARCH))
recognizer.startListening(searchName);
else
recognizer.startListening(searchName, 10000);
// String caption = getResources().getString(captions.get(searchName));
// ((TextView) findViewById(R.id.caption_text)).setText(caption);
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = SpeechRecognizerSetup.defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
.setRawLogDir(assetsDir) // To disable logging of raw audio comment out this call (takes a lot of space on the device)
.setKeywordThreshold(1e-45f) // Threshold to tune for keyphrase to balance between false alarms and misses
.setBoolean("-allphone_ci", true) // Use context-independent phonetic search, context-dependent is too slow for mobile
.getRecognizer();
recognizer.addListener(this);
/** In your application you might not need to add all those searches.
* They are added here for demonstration. You can leave just one.
*/
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
// Create grammar-based search for selection between demos
File menuGrammar = new File(assetsDir, "menu.gram");
recognizer.addGrammarSearch(MENU_SEARCH, menuGrammar);
// // Create grammar-based search for digit recognition
// File digitsGrammar = new File(assetsDir, "digits.gram");
// recognizer.addGrammarSearch(DIGITS_SEARCH, digitsGrammar);
//
// // Create language model search
// File languageModel = new File(assetsDir, "weather.dmp");
// recognizer.addNgramSearch(FORECAST_SEARCH, languageModel);
//
// // Phonetic search
// File phoneticModel = new File(assetsDir, "en-phone.dmp");
// recognizer.addAllphoneSearch(PHONE_SEARCH, phoneticModel);
}
#Override
public void onError(Exception error) {
}
#Override
public void onTimeout() {
Log.d("TIME OUt","TIMEOUT"+KWS_SEARCH);
switchSearch(KWS_SEARCH);
}
}

Default model is created for US English, it does not support Indian English and not expected to have high accuracy for Indian English.
Indian English is certainly a priority for us but we need someone local to help us to build the system. You as a local could help a lot, otherwise you have to wait until someone will collect enough data.

Related

How to fix: Wrong 1st argument type. Found: 'com.harrysoft.androidbluetoothserial.demoapp.CommunicateViewModel', required: 'android.content.Context'

I'm trying to to build an app which has to run in the background. So for this I'm using the ForegroundService, but when I write "this" in the CommunicateViewModel class, it gets underlined and show me:
"Cannot resolve constructor
'Intent(com.harrysoft.androidbluetoothserial.demoapp.CommunicateViewModel,
java.lang.Class<com.harrysoft.androidbluetoothserial.demoapp.TimeService>)'"
and at the next this:
"Wrong 1st argument type. Found:
'com.harrysoft.androidbluetoothserial.demoapp.CommunicateViewModel',
required: 'android.content.Context' less... Inspection info:
startForegroundService (android.content.Context, Intent) in
ContextCompat cannot be applied to
(com.harrysoft.androidbluetoothserial.demoapp.CommunicateViewModel,
Intent)  "
How can I solve this problem?
CommunicateViewModel:
package com.harrysoft.androidbluetoothserial.demoapp;
import android.app.Application;
import android.arch.lifecycle.AndroidViewModel;
import android.arch.lifecycle.LiveData;
import android.arch.lifecycle.MutableLiveData;
import android.content.Intent;
import android.os.CountDownTimer;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.support.v4.content.ContextCompat;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import com.harrysoft.androidbluetoothserial.BluetoothManager;
import com.harrysoft.androidbluetoothserial.SimpleBluetoothDeviceInterface;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.schedulers.Schedulers;
public class CommunicateViewModel extends AndroidViewModel {
// A CompositeDisposable that keeps track of all of our asynchronous tasks
private CompositeDisposable compositeDisposable = new CompositeDisposable();
// Our BluetoothManager!
private BluetoothManager bluetoothManager;
// Our Bluetooth Device! When disconnected it is null, so make sure we know that we need to deal with it potentially being null
#Nullable
private SimpleBluetoothDeviceInterface deviceInterface;
// The messages feed that the activity sees
private MutableLiveData<String> messagesData = new MutableLiveData<>();
// The connection status that the activity sees
private MutableLiveData<ConnectionStatus> connectionStatusData = new MutableLiveData<>();
// The device name that the activity sees
private MutableLiveData<String> deviceNameData = new MutableLiveData<>();
// The message in the message box that the activity sees
private MutableLiveData<String> messageData = new MutableLiveData<>();
// Our modifiable record of the conversation
private StringBuilder messages = new StringBuilder();
// Our configuration
private String deviceName;
private String mac;
// A variable to help us not double-connect
private boolean connectionAttemptedOrMade = false;
// A variable to help us not setup twice
private boolean viewModelSetup = false;
// Called by the system, this is just a constructor that matches AndroidViewModel.
public CommunicateViewModel(#NonNull Application application) {
super(application);
}
// Called in the activity's onCreate(). Checks if it has been called before, and if not, sets up the data.
// Returns true if everything went okay, or false if there was an error and therefore the activity should finish.
public boolean setupViewModel(String deviceName, String mac) {
// Check we haven't already been called
if (!viewModelSetup) {
viewModelSetup = true;
// Setup our BluetoothManager
bluetoothManager = BluetoothManager.getInstance();
if (bluetoothManager == null) {
// Bluetooth unavailable on this device :( tell the user
toast(R.string.bluetooth_unavailable);
// Tell the activity there was an error and to close
return false;
}
// Remember the configuration
this.deviceName = deviceName;
this.mac = mac;
// Tell the activity the device name so it can set the title
deviceNameData.postValue(deviceName);
// Tell the activity we are disconnected.
connectionStatusData.postValue(ConnectionStatus.DISCONNECTED);
}
// If we got this far, nothing went wrong, so return true
return true;
}
// Called when the user presses the connect button
public void toconnect(){
Intent serviceIntent = new Intent(this, TimeService.class);
serviceIntent.putExtra("inputExtra", "this can be set invissable");
ContextCompat.startForegroundService(this, serviceIntent);
connect();
}
public void connect() {
// Check we are not already connecting or connected
if (!connectionAttemptedOrMade) {
// Connect asynchronously
compositeDisposable.add(bluetoothManager.openSerialDevice(mac)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(device -> onConnected(device.toSimpleDeviceInterface()), t -> {
toast(R.string.connection_failed);
connectionAttemptedOrMade = false;
connectionStatusData.postValue(ConnectionStatus.DISCONNECTED);
}));
// Remember that we made a connection attempt.
connectionAttemptedOrMade = true;
// Tell the activity that we are connecting.
connectionStatusData.postValue(ConnectionStatus.CONNECTING);
}
}
// Called when the user presses the disconnect button
public void disconnect() {
// Check we were connected
if (connectionAttemptedOrMade && deviceInterface != null) {
connectionAttemptedOrMade = false;
// Use the library to close the connection
bluetoothManager.closeDevice(deviceInterface);
// Set it to null so no one tries to use it
deviceInterface = null;
// Tell the activity we are disconnected
connectionStatusData.postValue(ConnectionStatus.DISCONNECTED);
}
}
// Called once the library connects a bluetooth device
private void onConnected(SimpleBluetoothDeviceInterface deviceInterface) {
this.deviceInterface = deviceInterface;
if (this.deviceInterface != null) {
// We have a device! Tell the activity we are connected.
connectionStatusData.postValue(ConnectionStatus.CONNECTED);
// Setup the listeners for the interface
this.deviceInterface.setListeners(this::onMessageReceived, this::onMessageSent, t -> toast(R.string.message_send_error));
// Tell the user we are connected.
toast(R.string.connected);
// Reset the conversation
messages = new StringBuilder();
messagesData.postValue(messages.toString());
} else {
// deviceInterface was null, so the connection failed
toast(R.string.connection_failed);
connectionStatusData.postValue(ConnectionStatus.DISCONNECTED);
}
getCurrentTime();
}
// Adds a received message to the conversation
private void onMessageReceived(String message) {
messages.append(deviceName).append(": ").append(message).append('\n');
messagesData.postValue(messages.toString());
}
// Adds a sent message to the conversation
private void onMessageSent(String message) {
// Add it to the conversation
messages.append(getApplication().getString(R.string.you_sent)).append(": ").append(message).append('\n');
messagesData.postValue(messages.toString());
// Reset the message box
messageData.postValue("");
}
// Send a message
public void sendMessage(String message) {
new CountDownTimer(1000, 1000) {
public void onTick(long millisUntilFinished) {
if (deviceInterface != null && !TextUtils.isEmpty(message)) {
Log.i("info", "sendMessage: send");
}
deviceInterface.sendMessage(message);
}
public void onFinish() {
disconnect();
timer();
}
}.start();
// Check we have a connected device and the message is not empty, then send the message -----------------
}
//---------------------------------------------------------------------------MY Code
//timer
public void timer(){
new CountDownTimer(28000, 1000) {
public void onTick(long millisUntilFinished) {
Log.i("INFO", "onTick: ");
}
public void onFinish() {
connect();
}
}.start();
}
// Get Time
public void getCurrentTime() {
Calendar calendar = Calendar.getInstance();
SimpleDateFormat mdformat = new SimpleDateFormat("HHmm");
SimpleDateFormat seconds = new SimpleDateFormat("ss");
String strSec = seconds.format(calendar.getTime());
String strDate = "Time" + mdformat.format(calendar.getTime());
//if ((strSec == "29") || (strSec == "59")) {
sendMessage(strDate);
//}
}
// Called when the activity finishes - clear up after ourselves.
#Override
protected void onCleared() {
// Dispose any asynchronous operations that are running
compositeDisposable.dispose();
// Shutdown bluetooth connections
bluetoothManager.close();
}
// Helper method to create toast messages.
private void toast(#StringRes int messageResource) { Toast.makeText(getApplication(), messageResource, Toast.LENGTH_LONG).show(); }
// Getter method for the activity to use.
public LiveData<String> getMessages() { return messagesData; }
// Getter method for the activity to use.
public LiveData<ConnectionStatus> getConnectionStatus() { return connectionStatusData; }
// Getter method for the activity to use.
public LiveData<String> getDeviceName() { return deviceNameData; }
// Getter method for the activity to use.
public LiveData<String> getMessage() { return messageData; }
// An enum that is passed to the activity to indicate the current connection status
enum ConnectionStatus {
DISCONNECTED,
CONNECTING,
CONNECTED
}
}
TimeService:
package com.harrysoft.androidbluetoothserial.demoapp;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import android.support.v4.app.*;
import io.reactivex.annotations.Nullable;
import static com.harrysoft.androidbluetoothserial.demoapp.App.CHANNEL_ID;
public class TimeService extends Service {
#Override
public void onCreate() {
super.onCreate();
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
String input = intent.getStringExtra("inputExtra");
Intent notificationIntent = new Intent(this, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this,
0, notificationIntent, 0);
Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("Example Service")
.setContentText(input)
.setSmallIcon(R.drawable.ic_android)
.setContentIntent(pendingIntent)
.build();
startForeground(1, notification);
//do heavy work on a background thread
//stopSelf();
return START_NOT_STICKY;
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Nullable
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
You have to pass a Context as the first argument of the Intent constructor. You can retrieved it thanks to the application object.
Intent serviceIntent = new Intent(getApplication().getApplicationContext(), TimeService.class);

Creating a vpn profile and connencting (PPTP) with username and password in android programmatically

I want to build an app that can connect with a single click of button which will create a vpn profile and connect to the vpn profile by parsig the username and password of the vpn server.
The code that is developed and modified is good to create and connect vpn but my vpn requires username and password to be entered so i want to do that programatically.
Here is my project
VpnClient.java
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.VpnService;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class VpnClient extends AppCompatActivity{
public interface Prefs {
String NAME = "connection";
String SERVER_ADDRESS = "server.address";
String SERVER_PORT = "server.port";
String SHARED_SECRET = "shared.secret";
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button disconnect = (Button) findViewById(R.id.disconnect);
Button connect = (Button) findViewById(R.id.connect);
// final TextView serverAddress = (TextView) findViewById(R.id.address);
//final TextView serverPort = (TextView) findViewById(R.id.port);
// final TextView sharedSecret = (TextView) findViewById(R.id.secret);
final SharedPreferences prefs = getSharedPreferences(Prefs.NAME, MODE_PRIVATE);
final String serverAddress = ""; !!I insert my vpnserver address in here which is my.vpnserver.com
final String serverPort = ""; !!As server port i am using 1723 for PPTP connection
connect.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
prefs.edit()
.putString(Prefs.SERVER_ADDRESS,"")
.putString(Prefs.SERVER_PORT, "")
.putString(Prefs.SHARED_SECRET, "")
.commit();
Intent intent = VpnService.prepare(VpnClient.this);
if (intent != null) {
startActivityForResult(intent, 0);
} else {
onActivityResult(0, RESULT_OK, null);
}
}
});
disconnect.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startService(getServiceIntent().setAction(MyVpnService.ACTION_DISCONNECT));
}
});
}
#Override
protected void onActivityResult(int request, int result, Intent data) {
if (result == RESULT_OK) {
startService(getServiceIntent().setAction(MyVpnService.ACTION_CONNECT));
}
}
private Intent getServiceIntent() {
return new Intent(this, MyVpnService.class);
}
}
MyVpnService.java
import android.app.Notification;
import android.app.PendingIntent;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.os.ParcelFileDescriptor;
import android.support.annotation.RequiresApi;
import android.support.v4.util.Pair;
import android.util.Log;
import android.widget.Toast;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.LogRecord;
public class MyVpnService extends andenter code hereroid.net.VpnService implements android.os.Handler.Callback {
private static final String TAG = MyVpnService.class.getSimpleName();
public static final String ACTION_CONNECT = "com.yaksh.vpn.START";
public static final String ACTION_DISCONNECT = "com.yaksh.vpn.STOP";
private Handler mHandler;
private static class Connection extends Pair<Thread, ParcelFileDescriptor> {
public Connection(Thread thread, ParcelFileDescriptor pfd) {
super(thread, pfd);
}
}
private final AtomicReference<Thread> mConnectingThread = new AtomicReference<>();
private final AtomicReference<Connection> mConnection = new AtomicReference<>();
private AtomicInteger mNextConnectionId = new AtomicInteger(1);
private PendingIntent mConfigureIntent;
#Override
public void onCreate() {
// The handler is only used to show messages.
if (mHandler == null) {
mHandler = new Handler(this);
}
// Create the intent to "configure" the connection (just start ToyVpnClient).
mConfigureIntent = PendingIntent.getActivity(this, 0, new Intent(this, VpnClient.class),
PendingIntent.FLAG_UPDATE_CURRENT);
}
#RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null && ACTION_DISCONNECT.equals(intent.getAction())) {
disconnect();
return START_NOT_STICKY;
} else {
connect();
return START_STICKY;
}
}
#Override
public void onDestroy() {
disconnect();
}
#RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
#Override
public boolean handleMessage(Message message) {
Toast.makeText(this, message.what, Toast.LENGTH_SHORT).show();
if (message.what != R.string.disconnected) {
updateForegroundNotification(message.what);
}
return true;
}
#RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void connect() {
// Become a foreground service. Background services can be VPN services too, but they can
// be killed by background check before getting a chance to receive onRevoke().
updateForegroundNotification(R.string.connecting);
mHandler.sendEmptyMessage(R.string.connecting);
// Extract information from the shared preferences.
final SharedPreferences prefs = getSharedPreferences(VpnClient.Prefs.NAME, MODE_PRIVATE);
final String server = prefs.getString(VpnClient.Prefs.SERVER_ADDRESS, "");
final byte[] secret = prefs.getString(VpnClient.Prefs.SHARED_SECRET, "").getBytes();
final int port;
try {
port = Integer.parseInt(prefs.getString(VpnClient.Prefs.SERVER_PORT, ""));
} catch (NumberFormatException e) {
Log.e(TAG, "Bad port: " + prefs.getString(VpnClient.Prefs.SERVER_PORT, null), e);
return;
}
// Kick off a connection.
startConnection(new VpnConnection(
this, mNextConnectionId.getAndIncrement(), server, port, secret));
}
private void startConnection(final VpnConnection connection) {
// Replace any existing connecting thread with the new one.
final Thread thread = new Thread(connection, "ToyVpnThread");
setConnectingThread(thread);
// Handler to mark as connected once onEstablish is called.
connection.setConfigureIntent(mConfigureIntent);
connection.setOnEstablishListener(new VpnConnection.OnEstablishListener() {
public void onEstablish(ParcelFileDescriptor tunInterface) {
mHandler.sendEmptyMessage(R.string.connected);
mConnectingThread.compareAndSet(thread, null);
setConnection(new Connection(thread, tunInterface));
}
});
thread.start();
}
private void setConnectingThread(final Thread thread) {
final Thread oldThread = mConnectingThread.getAndSet(thread);
if (oldThread != null) {
oldThread.interrupt();
}
}
private void setConnection(final Connection connection) {
final Connection oldConnection = mConnection.getAndSet(connection);
if (oldConnection != null) {
try {
oldConnection.first.interrupt();
oldConnection.second.close();
} catch (IOException e) {
Log.e(TAG, "Closing VPN interface", e);
}
}
}
private void disconnect() {
mHandler.sendEmptyMessage(R.string.disconnected);
setConnectingThread(null);
setConnection(null);
stopForeground(true);
}
#RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void updateForegroundNotification(final int message) {
startForeground(1, new Notification.Builder(this)
.setSmallIcon(R.drawable.ic_vpn)
.setContentText(getString(message))
.setContentIntent(mConfigureIntent)
.build());
}
}
VpnConnection.java
import android.app.PendingIntent;
import android.os.Build;
import android.os.ParcelFileDescriptor;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.nio.channels.DatagramChannel;
import java.util.concurrent.TimeUnit;
/**
* Created by Yaksh on 11/3/2017.
*/
public class VpnConnection implements Runnable{
/**
* Callback interface to let the {#link MyVpnService} know about new connections
* and update the foreground notification with connection status.
*/
public interface OnEstablishListener {
void onEstablish(ParcelFileDescriptor tunInterface);
}
/** Maximum packet size is constrained by the MTU, which is given as a signed short. */
private static final int MAX_PACKET_SIZE = Short.MAX_VALUE;
/** Time to wait in between losing the connection and retrying. */
private static final long RECONNECT_WAIT_MS = TimeUnit.SECONDS.toMillis(3);
/** Time between keepalives if there is no traffic at the moment.
*
* TODO: don't do this; it's much better to let the connection die and then reconnect when
* necessary instead of keeping the network hardware up for hours on end in between.
**/
private static final long KEEPALIVE_INTERVAL_MS = TimeUnit.SECONDS.toMillis(15);
/** Time to wait without receiving any response before assuming the server is gone. */
private static final long RECEIVE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(20);
/**
* Time between polling the VPN interface for new traffic, since it's non-blocking.
*
* TODO: really don't do this; a blocking read on another thread is much cleaner.
*/
private static final long IDLE_INTERVAL_MS = TimeUnit.MILLISECONDS.toMillis(100);
/**
* Number of periods of length {#IDLE_INTERVAL_MS} to wait before declaring the handshake a
* complete and abject failure.
*
* TODO: use a higher-level protocol; hand-rolling is a fun but pointless exercise.
*/
private static final int MAX_HANDSHAKE_ATTEMPTS = 50;
private final MyVpnService mService;
private final int mConnectionId;
private final String mServerName;
private final int mServerPort;
private final byte[] mSharedSecret;
private PendingIntent mConfigureIntent;
private OnEstablishListener mOnEstablishListener;
public VpnConnection(final MyVpnService service, final int connectionId,
final String serverName, final int serverPort, final byte[] sharedSecret) {
mService = service;
mConnectionId = connectionId;
mServerName = serverName;
mServerPort= serverPort;
mSharedSecret = sharedSecret;
}
/**
* Optionally, set an intent to configure the VPN. This is {#code null} by default.
*/
public void setConfigureIntent(PendingIntent intent) {
mConfigureIntent = intent;
}
public void setOnEstablishListener(OnEstablishListener listener) {
mOnEstablishListener = listener;
}
#RequiresApi(api = Build.VERSION_CODES.KITKAT)
#Override
public void run() {
try {
Log.i(getTag(), "Starting");
// If anything needs to be obtained using the network, get it now.
// This greatly reduces the complexity of seamless handover, which
// tries to recreate the tunnel without shutting down everything.
// In this demo, all we need to know is the server address.
final SocketAddress serverAddress = new InetSocketAddress(mServerName, mServerPort);
// We try to create the tunnel several times.
// network is available.
// Here we just use a counter to keep things simple.
for (int attempt = 0; attempt < 10; ++attempt) {
// Reset the counter if we were connected.
if (run(serverAddress)) {
attempt = 0;
}
// Sleep for a while. This also checks if we got interrupted.
Thread.sleep(3000);
}
Log.i(getTag(), "Giving up");
} catch (IOException | InterruptedException | IllegalArgumentException e) {
Log.e(getTag(), "Connection failed, exiting", e);
}
}
#RequiresApi(api = Build.VERSION_CODES.KITKAT)
private boolean run(SocketAddress server)
throws IOException, InterruptedException, IllegalArgumentException {
ParcelFileDescriptor iface = null;
boolean connected = false;
// Create a DatagramChannel as the VPN tunnel.
try (DatagramChannel tunnel = DatagramChannel.open()) {
// Protect the tunnel before connecting to avoid loopback.
if (!mService.protect(tunnel.socket())) {
throw new IllegalStateException("Cannot protect the tunnel");
}
// Connect to the server.
tunnel.connect(server);
// For simplicity, we use the same thread for both reading and
// writing. Here we put the tunnel into non-blocking mode.
tunnel.configureBlocking(false);
// Authenticate and configure the virtual network interface.
iface = handshake(tunnel);
// Now we are connected. Set the flag.
connected = true;
// Packets to be sent are queued in this input stream.
FileInputStream in = new FileInputStream(iface.getFileDescriptor());
// Packets received need to be written to this output stream.
FileOutputStream out = new FileOutputStream(iface.getFileDescriptor());
// Allocate the buffer for a single packet.
ByteBuffer packet = ByteBuffer.allocate(MAX_PACKET_SIZE);
// Timeouts:
// - when data has not been sent in a while, send empty keepalive messages.
// - when data has not been received in a while, assume the connection is broken.
long lastSendTime = System.currentTimeMillis();
long lastReceiveTime = System.currentTimeMillis();
// We keep forwarding packets till something goes wrong.
while (true) {
// Assume that we did not make any progress in this iteration.
boolean idle = true;
// Read the outgoing packet from the input stream.
int length = in.read(packet.array());
if (length > 0) {
// Write the outgoing packet to the tunnel.
packet.limit(length);
tunnel.write(packet);
packet.clear();
// There might be more outgoing packets.
idle = false;
lastReceiveTime = System.currentTimeMillis();
}
// Read the incoming packet from the tunnel.
length = tunnel.read(packet);
if (length > 0) {
// Ignore control messages, which start with zero.
if (packet.get(0) != 0) {
// Write the incoming packet to the output stream.
out.write(packet.array(), 0, length);
}
packet.clear();
// There might be more incoming packets.
idle = false;
lastSendTime = System.currentTimeMillis();
}
// If we are idle or waiting for the network, sleep for a
// fraction of time to avoid busy looping.
if (idle) {
Thread.sleep(IDLE_INTERVAL_MS);
final long timeNow = System.currentTimeMillis();
if (lastSendTime + KEEPALIVE_INTERVAL_MS <= timeNow) {
// We are receiving for a long time but not sending.
// Send empty control messages.
packet.put((byte) 0).limit(1);
for (int i = 0; i < 3; ++i) {
packet.position(0);
tunnel.write(packet);
}
packet.clear();
lastSendTime = timeNow;
} else if (lastReceiveTime + RECEIVE_TIMEOUT_MS <= timeNow) {
// We are sending for a long time but not receiving.
throw new IllegalStateException("Timed out");
}
}
}
} catch (SocketException e) {
Log.e(getTag(), "Cannot use socket", e);
} finally {
if (iface != null) {
try {
iface.close();
} catch (IOException e) {
Log.e(getTag(), "Unable to close interface", e);
}
}
}
return connected;
}
private ParcelFileDescriptor handshake(DatagramChannel tunnel)
throws IOException, InterruptedException {
// To build a secured tunnel, we should perform mutual authentication
// and exchange session keys for encryption. To keep things simple in
// this demo, we just send the shared secret in plaintext and wait
// for the server to send the parameters.
// Allocate the buffer for handshaking. We have a hardcoded maximum
// handshake size of 1024 bytes, which should be enough for demo
// purposes.
ByteBuffer packet = ByteBuffer.allocate(1024);
// Control messages always start with zero.
packet.put((byte) 0).put(mSharedSecret).flip();
// Send the secret several times in case of packet loss.
for (int i = 0; i < 3; ++i) {
packet.position(0);
tunnel.write(packet);
}
packet.clear();
// Wait for the parameters within a limited time.
for (int i = 0; i < MAX_HANDSHAKE_ATTEMPTS; ++i) {
Thread.sleep(IDLE_INTERVAL_MS);
// Normally we should not receive random packets. Check that the first
// byte is 0 as expected.
int length = tunnel.read(packet);
if (length > 0 && packet.get(0) == 0) {
return configure(new String(packet.array(), 1, length - 1).trim());
}
}
throw new IOException("Timed out");
}
private ParcelFileDescriptor configure(String parameters) throws IllegalArgumentException {
// Configure a builder while parsing the parameters.
MyVpnService.Builder builder = mService.new Builder();
for (String parameter : parameters.split(" ")) {
String[] fields = parameter.split(",");
try {
switch (fields[0].charAt(0)) {
case 'm':
builder.setMtu(Short.parseShort(fields[1]));
break;
case 'a':
builder.addAddress(fields[1], Integer.parseInt(fields[2]));
break;
case 'r':
builder.addRoute(fields[1], Integer.parseInt(fields[2]));
break;
case 'd':
builder.addDnsServer(fields[1]);
break;
case 's':
builder.addSearchDomain(fields[1]);
break;
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Bad parameter: " + parameter);
}
}
// Create a new interface using the builder and save the parameters.
final ParcelFileDescriptor vpnInterface;
synchronized (mService) {
vpnInterface = builder
.setSession(mServerName)
.setConfigureIntent(mConfigureIntent)
.establish();
if (mOnEstablishListener != null) {
mOnEstablishListener.onEstablish(vpnInterface);
}
}
Log.i(getTag(), "New interface: " + vpnInterface + " (" + parameters + ")");
return vpnInterface;
}
private final String getTag() {
return VpnConnection.class.getSimpleName() + "[" + mConnectionId + "]";
}
}

Android Studio: Bluetooth Low Energy BLE Advertisements

on line 370 i need a way to look for a string of 'f's' in the advertisement data from a TI CC2650. I found this template online but I'm looking for specific advertising data. Please let me know what string array I need to look at to find this.
package net.jmodwyer.beacon.beaconPoC;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentSender;
import android.content.SharedPreferences;
import android.location.Location;
import android.os.Bundle;
import android.os.RemoteException;
import android.preference.PreferenceManager;
import android.text.util.Linkify;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ScrollView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesClient;
import com.google.android.gms.location.LocationClient;
import net.jmodwyer.ibeacon.ibeaconPoC.R;
import org.altbeacon.beacon.Beacon;
import org.altbeacon.beacon.BeaconConsumer;
import org.altbeacon.beacon.BeaconManager;
import org.altbeacon.beacon.RangeNotifier;
import org.altbeacon.beacon.Region;
import org.altbeacon.beacon.utils.UrlBeaconUrlCompressor;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
/**
* Adapted from original code written by D Young of Radius Networks.
* #author dyoung, jodwyer
*
*/
public class ScanActivity extends Activity implements BeaconConsumer,
GooglePlayServicesClient.ConnectionCallbacks,
GooglePlayServicesClient.OnConnectionFailedListener {
// Constant Declaration
private static final String PREFERENCE_SCANINTERVAL = "scanInterval";
private static final String PREFERENCE_TIMESTAMP = "timestamp";
private static final String PREFERENCE_POWER = "power";
private static final String PREFERENCE_PROXIMITY = "proximity";
private static final String PREFERENCE_RSSI = "rssi";
private static final String PREFERENCE_MAJORMINOR = "majorMinor";
private static final String PREFERENCE_UUID = "uuid";
private static final String PREFERENCE_INDEX = "index";
private static final String PREFERENCE_LOCATION = "location";
private static final String PREFERENCE_REALTIME = "realTimeLog";
private static final String MODE_SCANNING = "Stop Scanning";
private static final String MODE_STOPPED = "Start Scanning";
protected static final String TAG = "ScanActivity";
/*
* Define a request code to send to Google Play services
* This code is returned in Activity.onActivityResult
*/
private final static int
CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000;
private FileHelper fileHelper;
private BeaconManager beaconManager;
private Region region;
private int eventNum = 1;
// This StringBuffer will hold the scan data for any given scan.
private StringBuffer logString;
// Preferences - will actually have a boolean value when loaded.
private Boolean index;
private Boolean location;
private Boolean uuid;
private Boolean majorMinor;
private Boolean rssi;
private Boolean proximity;
private Boolean power;
private Boolean timestamp;
private String scanInterval;
// Added following a feature request from D.Schmid.
private Boolean realTimeLog;
// LocationClient for Google Play Location Services
LocationClient locationClient;
private ScrollView scroller;
private EditText editText;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scan);
verifyBluetooth();
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
BeaconScannerApp app = (BeaconScannerApp)this.getApplication();
beaconManager = app.getBeaconManager();
//beaconManager.setForegroundScanPeriod(10);
region = app.getRegion();
beaconManager.bind(this);
locationClient = new LocationClient(this, this, this);
fileHelper = app.getFileHelper();
scroller = (ScrollView)ScanActivity.this.findViewById(R.id.scanScrollView);
editText = (EditText)ScanActivity.this.findViewById(R.id.scanText);
// Initialise scan button.
getScanButton().setText(MODE_STOPPED);
}
#Override
public void onResume() {
super.onResume();
beaconManager.bind(this);
}
#Override
public void onPause() {
super.onPause();
// Uncommenting the following leak prevents a ServiceConnection leak when using the back
// arrow in the Action Bar to come out of the file list screen. Unfortunately it also kills
// background scanning, and as I have no workaround right now I'm settling for the lesser of
// two evils.
// beaconManager.unbind(this);
}
public String getCurrentLocation() {
/** Default "error" value is set for location, will be overwritten with the correct lat and
* long values if we're ble to connect to location services and get a reading.
*/
String location = "Unavailable";
if (locationClient.isConnected()) {
Location currentLocation = locationClient.getLastLocation();
if (currentLocation != null) {
location = Double.toString(currentLocation.getLatitude()) + "," +
Double.toString(currentLocation.getLongitude());
}
}
return location;
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_activity_actions, menu);
return super.onCreateOptionsMenu(menu);
}
#Override
public void onBeaconServiceConnect() {}
/**
*
* #param view
*/
public void onScanButtonClicked(View view) {
toggleScanState();
}
// Handle the user selecting "Settings" from the action bar.
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.Settings:
// Show settings
Intent api = new Intent(this, AppPreferenceActivity.class);
startActivityForResult(api, 0);
return true;
case R.id.action_listfiles:
// Launch list files activity
Intent fhi = new Intent(this, FileHandlerActivity.class);
startActivity(fhi);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Start and stop scanning, and toggle button label appropriately.
*/
private void toggleScanState() {
Button scanButton = getScanButton();
String currentState = scanButton.getText().toString();
if (currentState.equals(MODE_SCANNING)) {
stopScanning(scanButton);
} else {
startScanning(scanButton);
}
}
/**
* start looking for beacons.
*/
private void startScanning(Button scanButton) {
// Set UI elements to the correct state.
scanButton.setText(MODE_SCANNING);
((EditText)findViewById(R.id.scanText)).setText("");
// Reset event counter
eventNum = 1;
// Get current values for logging preferences
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this);
HashMap <String, Object> prefs = new HashMap<String, Object>();
prefs.putAll(sharedPrefs.getAll());
index = (Boolean)prefs.get(PREFERENCE_INDEX);
location = (Boolean)prefs.get(PREFERENCE_LOCATION);
uuid = (Boolean)prefs.get(PREFERENCE_UUID);
majorMinor = (Boolean)prefs.get(PREFERENCE_MAJORMINOR);
rssi = (Boolean)prefs.get(PREFERENCE_RSSI);
proximity = (Boolean)prefs.get(PREFERENCE_PROXIMITY);
power = (Boolean)prefs.get(PREFERENCE_POWER);
timestamp = (Boolean)prefs.get(PREFERENCE_TIMESTAMP);
scanInterval = (String)prefs.get(PREFERENCE_SCANINTERVAL);
realTimeLog = (Boolean)prefs.get(PREFERENCE_REALTIME);
// Get current background scan interval (if specified)
if (prefs.get(PREFERENCE_SCANINTERVAL) != null) {
beaconManager.setBackgroundBetweenScanPeriod(Long.parseLong(scanInterval));
}
logToDisplay("Scanning...");
// Initialise scan log
logString = new StringBuffer();
//Start scanning again.
beaconManager.setRangeNotifier(new RangeNotifier() {
#Override
public void didRangeBeaconsInRegion(Collection<Beacon> beacons, Region region) {
if (beacons.size() > 0) {
Iterator <Beacon> beaconIterator = beacons.iterator();
while (beaconIterator.hasNext()) {
Beacon beacon = beaconIterator.next();
// Debug - logging a beacon - checking background logging is working.
System.out.println("Logging another beacon.");
logBeaconData(beacon);
}
}
}
});
try {
beaconManager.startRangingBeaconsInRegion(region);
} catch (RemoteException e) {
// TODO - OK, what now then?
}
}
/**
* Stop looking for beacons.
*/
private void stopScanning(Button scanButton) {
try {
beaconManager.stopRangingBeaconsInRegion(region);
} catch (RemoteException e) {
// TODO - OK, what now then?
}
String scanData = logString.toString();
if (scanData.length() > 0) {
// Write file
fileHelper.createFile(scanData);
// Display file created message.
Toast.makeText(getBaseContext(),
"File saved to:" + getFilesDir().getAbsolutePath(),
Toast.LENGTH_SHORT).show();
scanButton.setText(MODE_STOPPED);
} else {
// We didn't get any data, so there's no point writing an empty file.
Toast.makeText(getBaseContext(),
"No data captured during scan, output file will not be created.",
Toast.LENGTH_SHORT).show();
scanButton.setText(MODE_STOPPED);
}
}
/**
*
* #return reference to the start/stop scanning button
*/
private Button getScanButton() {
return (Button)findViewById(R.id.scanButton);
}
/**
*
* #param beacon The detected beacon
*/
private void logBeaconData(Beacon beacon) {
StringBuilder scanString = new StringBuilder();
if (index) {
scanString.append(eventNum++);
}
if (beacon.getServiceUuid() == 0xfeaa) {
if (beacon.getBeaconTypeCode() == 0x00) {
scanString.append(" Eddystone-UID -> ");
scanString.append(" Namespace : ").append(beacon.getId1());
scanString.append(" Identifier : ").append(beacon.getId2());
logEddystoneTelemetry(scanString, beacon);
} else if (beacon.getBeaconTypeCode() == 0x10) {
String url = UrlBeaconUrlCompressor.uncompress(beacon.getId1().toByteArray());
scanString.append(" Eddystone-URL -> " + url);
} else if (beacon.getBeaconTypeCode() == 0x20) {
scanString.append(" Eddystone-TLM -> ");
logEddystoneTelemetry(scanString, beacon);
}
} else {
// Just an old fashioned iBeacon or AltBeacon...
logGenericBeacon(scanString, beacon);
}
logToDisplay(scanString.toString());
scanString.append("\n");
// Code added following a feature request by D.Schmid - writes a single entry to a file
// every time a beacon is detected, the file will only ever have one entry as it will be
// recreated on each call to this method.
// Get current background scan interval (if specified)
if (realTimeLog) {
// We're in realtime logging mode, create a new log file containing only this entry.
fileHelper.createFile(scanString.toString(), "realtimelog.txt");
}
logString.append(scanString.toString());
}
/**
* Logs iBeacon & AltBeacon data.
*/
private void logGenericBeacon(StringBuilder scanString, Beacon beacon) {
// Comment stuff out for whatever reason
/*
if (location) {
scanString.append(" Location: ").append(getCurrentLocation()).append(" ");
}
`
*/
if (uuid) {
scanString.append(" UUID: ").append(beacon.getId1());
if (beacon.getId1().equals("ffffffff-ffff-ffff-ffff-ffffffffffff ")){
scanString.append("WE DID IT!!!!!!!!!!!");
}else{
scanString.append(" WE DID NOT DO IT =( ");
}
/*
if ((beacon.getId1()).equals ("f")){
scanString.append("WE DID IT!!!!!!!!!!!");
}else{
scanString.append(" WE DID NOT DO IT!!!!!!!!!!! ");
}
*/
}
// Making if statements to test for advertising data
/*
if (majorMinor) {
scanString.append(" Maj. Mnr.: ");
if (beacon.getId2() != null) {
scanString.append(beacon.getId2());
}
scanString.append("-");
if (beacon.getId3() != null) {
scanString.append(beacon.getId3());
}
}
if (rssi) {
scanString.append(" RSSI: ").append(beacon.getRssi());
}
if (proximity) {
scanString.append(" Proximity: ").append(BeaconHelper.getProximityString(beacon.getDistance()));
}
if (power) {
scanString.append(" Power: ").append(beacon.getTxPower());
}
if (timestamp) {
scanString.append(" Timestamp: ").append(BeaconHelper.getCurrentTimeStamp());
} */
}
private void logEddystoneTelemetry(StringBuilder scanString, Beacon beacon) {
// Do we have telemetry data?
if (beacon.getExtraDataFields().size() > 0) {
long telemetryVersion = beacon.getExtraDataFields().get(0);
long batteryMilliVolts = beacon.getExtraDataFields().get(1);
long pduCount = beacon.getExtraDataFields().get(3);
long uptime = beacon.getExtraDataFields().get(4);
scanString.append(" Telemetry version : " + telemetryVersion);
scanString.append(" Uptime (sec) : " + uptime);
scanString.append(" Battery level (mv) " + batteryMilliVolts);
scanString.append(" Tx count: " + pduCount);
}
}
/**
*
* #param line
*/
private void logToDisplay(final String line) {
runOnUiThread(new Runnable() {
public void run() {
editText.append(line + "\n");
// Temp code - don't really want to do this for every line logged, will look for a
// workaround.
Linkify.addLinks(editText, Linkify.WEB_URLS);
scroller.fullScroll(View.FOCUS_DOWN);
}
});
}
private void verifyBluetooth() {
try {
if (!BeaconManager.getInstanceForApplication(this).checkAvailability()) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Bluetooth not enabled");
builder.setMessage("Please enable bluetooth in settings and restart this application.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
finish();
System.exit(0);
}
});
builder.show();
}
}
catch (RuntimeException e) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Bluetooth LE not available");
builder.setMessage("Sorry, this device does not support Bluetooth LE.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
finish();
System.exit(0);
}
});
builder.show();
}
}
/* Location services code follows */
#Override
protected void onStart() {
super.onStart();
// Connect the client.
locationClient.connect();
}
#Override
protected void onStop() {
// Disconnect the client.
locationClient.disconnect();
super.onStop();
}
#Override
public void onConnected(Bundle dataBundle) {
// Uncomment the following line to display the connection status.
// Toast.makeText(this, "Connected", Toast.LENGTH_SHORT).show();
}
#Override
public void onDisconnected() {
// Display the connection status
Toast.makeText(this, "Disconnected. Please re-connect.",
Toast.LENGTH_SHORT).show();
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
/* Google Play services can resolve some errors it detects.
* If the error has a resolution, try sending an Intent to
* start a Google Play services activity that can resolve
* error.
*/
if (connectionResult.hasResolution()) {
try {
// Start an Activity that tries to resolve the error
connectionResult.startResolutionForResult(
this,
CONNECTION_FAILURE_RESOLUTION_REQUEST);
/*
* Thrown if Google Play services canceled the original
* PendingIntent
*/
} catch (IntentSender.SendIntentException e) {
// Log the error
e.printStackTrace();
}
} else {
/*
* If no resolution is available, display a dialog to the
* user with the error.
*/
Toast.makeText(getBaseContext(),
"Location services not available, cannot track device location.",
Toast.LENGTH_SHORT).show();
}
}
// Define a DialogFragment that displays the error dialog
public static class ErrorDialogFragment extends DialogFragment {
// Global field to contain the error dialog
private Dialog mDialog;
// Default constructor. Sets the dialog field to null
public ErrorDialogFragment() {
super();
mDialog = null;
}
// Set the dialog to display
public void setDialog(Dialog dialog) {
mDialog = dialog;
}
// Return a Dialog to the DialogFragment.
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return mDialog;
}
}
/*
* Handle results returned to the FragmentActivity
* by Google Play services
*/
#Override
protected void onActivityResult(
int requestCode, int resultCode, Intent data) {
// Decide what to do based on the original request code
switch (requestCode) {
case CONNECTION_FAILURE_RESOLUTION_REQUEST :
/*
* If the result code is Activity.RESULT_OK, try
* to connect again
*/
switch (resultCode) {
case Activity.RESULT_OK :
/*
* TODO - Try the request again
*/
break;
}
}
}
}
Need to cast to string first .toString
if (uuid) {
scanString.append(" UUID: ").append(beacon.getId1());
// Making if statements to look for all f's in advertising data
if (beacon.getId1().toString().equals(Str1)){
scanString.append("\nAlarm ACTIVATED\n");
}else{
scanString.append("\n Alarm NOT active\n");
}
}

Pocketsphinx simply listening for one command

I have successfully created a speech to text app which is able to open activities based on recognising keywords. At the moment for me to start the Speech to text feature I need to click a button. I now do not want to manually press the button and want to use a continuous listener to trigger the button.
I have been looking into using pocketsphinx and have added a keyphrase "listen to command" once this keyphrase is heard I want the button to be automatically pressed which is then followed by a sequence of commands I can add into the code. I do not need offline speech to text etc and hence I am using Google's speech to text option but am planning to use pocketsphinx to trigger the google's speech to text feature.
Below is most of the code that I partly modified from pocketsphinx:
public class PocketSphinxActivity extends Activity implements RecognitionListener {
/* Named searches allow to quickly reconfigure the decoder */
private static final String KWS_SEARCH = "wakeup";
private static final String FORECAST_SEARCH = "forecast";
private static final String DIGITS_SEARCH = "digits";
private static final String PHONE_SEARCH = "phones";
private static final String MENU_SEARCH = "menu";
/* Keyword we are looking for to activate menu */
private static final String KEYPHRASE = "listen to command"; //adjust this keyphrase!
private SpeechRecognizer recognizer;
private HashMap < String, Integer > captions;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
// Prepare the data for UI
captions = new HashMap < String, Integer > ();
captions.put(KWS_SEARCH, R.string.kws_caption);
captions.put(MENU_SEARCH, R.string.menu_caption);
captions.put(DIGITS_SEARCH, R.string.digits_caption);
captions.put(PHONE_SEARCH, R.string.phone_caption);
captions.put(FORECAST_SEARCH, R.string.forecast_caption);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text))
.setText("Preparing the recognizer");
// Recognizer initialization is a time-consuming and it involves IO,
// so we execute it in async task
new AsyncTask < Void, Void, Exception > () {
#Override
protected Exception doInBackground(Void...params) {
try {
Assets assets = new Assets(PocketSphinxActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
((TextView) findViewById(R.id.caption_text))
.setText("Failed to init recognizer " + result);
} else {
switchSearch(KWS_SEARCH);
}
}
}.execute();
}
#Override
public void onDestroy() {
super.onDestroy();
recognizer.cancel();
recognizer.shutdown();
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
if (text.equals(KEYPHRASE))
switchSearch(MENU_SEARCH);
else if (text.equals(DIGITS_SEARCH))
switchSearch(DIGITS_SEARCH);
else if (text.equals(PHONE_SEARCH))
switchSearch(PHONE_SEARCH);
else if (text.equals(FORECAST_SEARCH))
switchSearch(FORECAST_SEARCH);
else
((TextView) findViewById(R.id.result_text)).setText(text);
}
/**
* This callback is called when we stop the recognizer.
*/
#Override
public void onResult(Hypothesis hypothesis) {
((TextView) findViewById(R.id.result_text)).setText("");
if (hypothesis != null) {
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
}
#Override
public void onBeginningOfSpeech() {}
/**
* We stop recognizer here to get a final result
*/
#Override
public void onEndOfSpeech() {
if (!recognizer.getSearchName().equals(KWS_SEARCH))
switchSearch(KWS_SEARCH);
}
private void switchSearch(String searchName) {
recognizer.stop();
// If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
if (searchName.equals(KWS_SEARCH))
recognizer.startListening(searchName);
else
recognizer.startListening(searchName, 10000);
String caption = getResources().getString(captions.get(searchName));
((TextView) findViewById(R.id.caption_text)).setText(caption);
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
// To disable logging of raw audio comment out this call (takes a lot of space on the device)
.setRawLogDir(assetsDir)
// Threshold to tune for keyphrase to balance between false alarms and misses
.setKeywordThreshold(1e-45 f)
// Use context-independent phonetic search, context-dependent is too slow for mobile
.setBoolean("-allphone_ci", true)
.getRecognizer();
recognizer.addListener(this);
/** In your application you might not need to add all those searches.
* They are added here for demonstration. You can leave just one.
*/
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
// Create grammar-based search for selection between demos
File menuGrammar = new File(assetsDir, "menu.gram");
recognizer.addGrammarSearch(MENU_SEARCH, menuGrammar);
// Create grammar-based search for digit recognition
File digitsGrammar = new File(assetsDir, "digits.gram");
recognizer.addGrammarSearch(DIGITS_SEARCH, digitsGrammar);
// Create language model search
File languageModel = new File(assetsDir, "weather.dmp");
recognizer.addNgramSearch(FORECAST_SEARCH, languageModel);
// Phonetic search
File phoneticModel = new File(assetsDir, "en-phone.dmp");
recognizer.addAllphoneSearch(PHONE_SEARCH, phoneticModel);
}
#Override
public void onError(Exception error) {
((TextView) findViewById(R.id.caption_text)).setText(error.getMessage());
}
#Override
public void onTimeout() {
switchSearch(KWS_SEARCH);
}
I am not interested in most of the commands, I just simply want the app to listen as soon as it is opened (which it currently does) and as soon as a person says Listen to command it then presses a button which I have set an id of bVoice
If someone can kindly help me modify the above code so that I know what to delete and what to add that will be greatly appreciated. Also note that if there is a much easier method of doing this please feel free to share (having a custom key phrase isn't also necessary).
Something like this, and you do not need to "push the button", you can directly call button callback instead and perform actual steps you are interested in.
public class PocketSphinxActivity extends Activity implements RecognitionListener {
private static final String KWS_SEARCH = "wakeup";
private static final String KEYPHRASE = "listen to command"; //adjust this keyphrase!
private SpeechRecognizer recognizer;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text))
.setText("Preparing the recognizer");
new AsyncTask < Void, Void, Exception > () {
#Override
protected Exception doInBackground(Void...params) {
try {
Assets assets = new Assets(PocketSphinxActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
((TextView) findViewById(R.id.caption_text))
.setText("Failed to init recognizer " + result);
} else {
recognizer.startListening(KWS_SEARCH);
}
}
}.execute();
}
#Override
public void onDestroy() {
super.onDestroy();
recognizer.cancel();
recognizer.shutdown();
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null)
return;
String text = hypothesis.getHypstr();
if (text.equals(KEYPHRASE)) {
recognizer.cancel();
performAction(); // <- You have to implement this
recognizer.startListening(KWS_SEARCH);
}
}
#Override
public void onResult(Hypothesis hypothesis) {}
#Override
public void onBeginningOfSpeech() {}
#Override
public void onEndOfSpeech() {}
#Override
public void onTimeout() {}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
.getRecognizer();
recognizer.addListener(this);
// Create keyword-activation search.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
}
#Override
public void onError(Exception error) {
((TextView) findViewById(R.id.caption_text)).setText(error.getMessage());
}
public void peformAction() {
// do here whatever you want
}

Recognizing multiple keywords using PocketSphinx

I've installed the PocketSphinx demo and it works fine under Ubuntu and Eclipse, but despite trying I can't work out how I would add recognition of multiple words.
All I want is for the code to recognize single words, which I can then switch() within the code, e.g. "up", "down", "left", "right". I don't want to recognize sentences, just single words.
Any help on this would be grateful. I have spotted other users' having similar problems but nobody knows the answer so far.
One thing which is baffling me is why do we need to use the "wakeup" constant at all?
private static final String KWS_SEARCH = "wakeup";
private static final String KEYPHRASE = "oh mighty computer";
.
.
.
recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
What has wakeup got to do with anything?
I have made some progress (?) : Using addGrammarSearch I am able to use a .gram file to list my words, e.g. up,down,left,right,forwards,backwards, which seems to work well if all I say are those particular words. However, any other words will cause the system to match what is said to the "nearest" word from those stated. Ideally I don't want recognition to occur if words spoken are not in the .gram file...
Thanks to Nikolay's tip (see his answer above), I have developed the following code which works fine, and does not recognize words unless they're on the list. You can copy and paste this directly over the main class in the PocketSphinxDemo code:
public class PocketSphinxActivity extends Activity implements RecognitionListener
{
private static final String DIGITS_SEARCH = "digits";
private SpeechRecognizer recognizer;
#Override
public void onCreate(Bundle state)
{
super.onCreate(state);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text)).setText("Preparing the recognizer");
try
{
Assets assets = new Assets(PocketSphinxActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
}
catch (IOException e)
{
// oops
}
((TextView) findViewById(R.id.caption_text)).setText("Say up, down, left, right, forwards, backwards");
reset();
}
#Override
public void onPartialResult(Hypothesis hypothesis)
{
}
#Override
public void onResult(Hypothesis hypothesis)
{
((TextView) findViewById(R.id.result_text)).setText("");
if (hypothesis != null)
{
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
}
}
#Override
public void onBeginningOfSpeech()
{
}
#Override
public void onEndOfSpeech()
{
reset();
}
private void setupRecognizer(File assetsDir)
{
File modelsDir = new File(assetsDir, "models");
recognizer = defaultSetup().setAcousticModel(new File(modelsDir, "hmm/en-us-semi"))
.setDictionary(new File(modelsDir, "dict/cmu07a.dic"))
.setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
.getRecognizer();
recognizer.addListener(this);
File digitsGrammar = new File(modelsDir, "grammar/digits.gram");
recognizer.addKeywordSearch(DIGITS_SEARCH, digitsGrammar);
}
private void reset()
{
recognizer.stop();
recognizer.startListening(DIGITS_SEARCH);
}
}
Your digits.gram file should be something like:
up /1e-1/
down /1e-1/
left /1e-1/
right /1e-1/
forwards /1e-1/
backwards /1e-1/
You should experiment with the thresholds within the double slashes // for performance, where 1e-1 represents 0.1 (I think). I think the maximum is 1.0.
And it's 5.30pm so I can stop working now. Result.
you can use addKeywordSearch which uses to file with keyphrases. One phrase per line with threshold for each phrase in //, for example
up /1.0/
down /1.0/
left /1.0/
right /1.0/
forwards /1e-1/
Threshold must be selected to avoid false alarms.
Working on updating Antinous amendment to the PocketSphinx demo to allow it to run on Android Studio. This is what I have so far,
//Note: change MainActivity to PocketSphinxActivity for demo use...
public class MainActivity extends Activity implements RecognitionListener {
private static final String DIGITS_SEARCH = "digits";
private SpeechRecognizer recognizer;
/* Used to handle permission request */
private static final int PERMISSIONS_REQUEST_RECORD_AUDIO = 1;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.main);
((TextView) findViewById(R.id.caption_text))
.setText("Preparing the recognizer");
// Check if user has given permission to record audio
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.RECORD_AUDIO);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, PERMISSIONS_REQUEST_RECORD_AUDIO);
return;
}
new AsyncTask<Void, Void, Exception>() {
#Override
protected Exception doInBackground(Void... params) {
try {
Assets assets = new Assets(MainActivity.this);
File assetDir = assets.syncAssets();
setupRecognizer(assetDir);
} catch (IOException e) {
return e;
}
return null;
}
#Override
protected void onPostExecute(Exception result) {
if (result != null) {
((TextView) findViewById(R.id.caption_text))
.setText("Failed to init recognizer " + result);
} else {
reset();
}
}
}.execute();
((TextView) findViewById(R.id.caption_text)).setText("Say one, two, three, four, five, six...");
}
/**
* In partial result we get quick updates about current hypothesis. In
* keyword spotting mode we can react here, in other modes we need to wait
* for final result in onResult.
*/
#Override
public void onPartialResult(Hypothesis hypothesis) {
if (hypothesis == null) {
return;
} else if (hypothesis != null) {
if (recognizer != null) {
//recognizer.rapidSphinxPartialResult(hypothesis.getHypstr());
String text = hypothesis.getHypstr();
if (text.equals(DIGITS_SEARCH)) {
recognizer.cancel();
performAction();
recognizer.startListening(DIGITS_SEARCH);
}else{
//Toast.makeText(getApplicationContext(),"Partial result = " +text,Toast.LENGTH_SHORT).show();
}
}
}
}
#Override
public void onResult(Hypothesis hypothesis) {
((TextView) findViewById(R.id.result_text)).setText("");
if (hypothesis != null) {
String text = hypothesis.getHypstr();
makeText(getApplicationContext(), "Hypothesis" +text, Toast.LENGTH_SHORT).show();
}else if(hypothesis == null){
makeText(getApplicationContext(), "hypothesis = null", Toast.LENGTH_SHORT).show();
}
}
#Override
public void onDestroy() {
super.onDestroy();
recognizer.cancel();
recognizer.shutdown();
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onEndOfSpeech() {
reset();
}
#Override
public void onTimeout() {
}
private void setupRecognizer(File assetsDir) throws IOException {
// The recognizer can be configured to perform multiple searches
// of different kind and switch between them
recognizer = defaultSetup()
.setAcousticModel(new File(assetsDir, "en-us-ptm"))
.setDictionary(new File(assetsDir, "cmudict-en-us.dict"))
// .setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
.getRecognizer();
recognizer.addListener(this);
File digitsGrammar = new File(assetsDir, "digits.gram");
recognizer.addKeywordSearch(DIGITS_SEARCH, digitsGrammar);
}
private void reset(){
recognizer.stop();
recognizer.startListening(DIGITS_SEARCH);
}
#Override
public void onError(Exception error) {
((TextView) findViewById(R.id.caption_text)).setText(error.getMessage());
}
public void performAction() {
// do here whatever you want
makeText(getApplicationContext(), "performAction done... ", Toast.LENGTH_SHORT).show();
}
}
Caveat emptor: this is a work in progress. Check back later. Suggestions would be appreciated.

Categories

Resources