Multiple Voice Inputs on a screen - android

I have an activity here. When the user touches the layout to the activity, I want the app to ask the user to give five pieces of information. In order to accomplish this, a series of five voice input prompts come up. Below, I have the code for this:
package com.example.shivamgandhi.gyrosafe;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.v7.app.AppCompatActivity;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import android.widget.RelativeLayout;
import java.util.ArrayList;
import java.util.Locale;
public class Memory_Test1_Activity extends AppCompatActivity implements View.OnClickListener, View.OnTouchListener {
EditText ed23, ed24, ed25, ed26, ed27;
private final int REQ_CODE_SPEECH_INPUT_TOWN = 100;
private final int REQ_CODE_SPEECH_INPUT_WIN = 101;
private final int REQ_CODE_SPEECH_INPUT_MONTH = 102;
private final int REQ_CODE_SPEECH_INPUT_DAY = 103;
private final int REQ_CODE_SPEECH_INPUT_TEAM = 104;
int n = 1;
Button btnarray[] = new Button[n];
public static final String MyPREFERENCES = "MyPrefs";
SharedPreferences sharedpreferences;
RelativeLayout RelativeLayout;
int count = 0;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.memory_test1);
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
btnarray[0] = (Button)findViewById(R.id.button8);
sharedpreferences = this.getSharedPreferences(MyPREFERENCES, Context.MODE_PRIVATE);
ed23 = (EditText)findViewById(R.id.editText23);
ed24 = (EditText)findViewById(R.id.editText24);
ed25 = (EditText)findViewById(R.id.editText25);
ed26 = (EditText)findViewById(R.id.editText26);
ed27 = (EditText)findViewById(R.id.editText27);
RelativeLayout = (RelativeLayout)findViewById(R.id.RelativeLayout);
for(int i = 0; i <n; i++){
btnarray[i].setOnClickListener(this);
}
RelativeLayout.setOnTouchListener(this);
}
private void promptSpeechInput_town() {
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
STSintent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(STSintent, REQ_CODE_SPEECH_INPUT_TOWN);
}
catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
private void promptSpeechInput_win() {
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
STSintent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(STSintent, REQ_CODE_SPEECH_INPUT_WIN);
}
catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
private void promptSpeechInput_month() {
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
STSintent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(STSintent, REQ_CODE_SPEECH_INPUT_MONTH);
}
catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
private void promptSpeechInput_day() {
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
STSintent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(STSintent, REQ_CODE_SPEECH_INPUT_DAY);
}
catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
private void promptSpeechInput_team() {
Intent STSintent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
STSintent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
STSintent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
try {
startActivityForResult(STSintent, REQ_CODE_SPEECH_INPUT_TEAM);
}
catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT_TOWN: {
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result_twn = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
ed23.setText(result_twn.get(0));
promptSpeechInput_win();
}
}
case REQ_CODE_SPEECH_INPUT_DAY:{
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result_day = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
ed26.setText(result_day.get(0));
promptSpeechInput_team();
}
}
case REQ_CODE_SPEECH_INPUT_WIN:{
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result_win = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
ed24.setText(result_win.get(0));
promptSpeechInput_month();
}
}
case REQ_CODE_SPEECH_INPUT_MONTH:{
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result_month = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
ed25.setText(result_month.get(0));
promptSpeechInput_day();
}
}
case REQ_CODE_SPEECH_INPUT_TEAM:{
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result_team = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
ed27.setText(result_team.get(0));
}
}
}
}
#Override
public boolean onTouch(View v, MotionEvent event){
if(v == RelativeLayout && count == 0 ){
promptSpeechInput_town();
count = 1;
return true;
}
else{
return false;
}
}
The problem I am facing is that when I touch the layout, only one of the speech prompts happen, promptSpeechInput_team. How can I make it so that each of the prompts is called?
Edit: I now have each function in OnActivityResult calling eachother. However, I still have the voice input keep going indefinitely.

You need to call promptSpeechInput_win()
in onActivityResult of promptSpeechInput_town
and so on.. Ideally only one voice input can be acquired from the user. Hence you should be initiating the next one onActivityResult of previous voice request.
Also you need be breaking the switch case to avoid all the cases being executed every time.

Related

How to get past information while converting speech to text in android?

As i am developing an android app and wants to convert speech into text, i am using built-in Google speech input activity to convert voice into text. I need past information but it continuously get cleared i got only current response. How need to handle same as google voice keyboard. As i talk it included to current String instep of clear.
MainActivity .java
public class MainActivity extends AppCompatActivity
{
private EditText txtSpeechInput;
private ImageButton btnSpeak;
private final int REQ_CODE_SPEECH_INPUT = 100;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
txtSpeechInput = findViewById(R.id.txtSpeechInput);
btnSpeak = (ImageButton) findViewById(R.id.btnSpeak);
btnSpeak.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View v)
{
promptSpeechInput();
}
});
private void promptSpeechInput()
{
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 20000000);
try
{
startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
}
catch (ActivityNotFoundException a)
{
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode)
{
case REQ_CODE_SPEECH_INPUT:
{
if (resultCode == RESULT_OK && null != data)
{
final ArrayList<String> result= data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
txtSpeechInput.setText(result.get(0));
}
break;
}
}
}
If you only want to save one previously detected String, for this to achieve, you need to make a global String variable and store the value in that variable from results list.(Save the same String as you are setting on text view). But if you want to save all the strings, you need to make global String Arraylist and add all those string in that array list. Below is the code for that.
private EditText txtSpeechInput;
private ImageButton btnSpeak;
private final int REQ_CODE_SPEECH_INPUT = 100;
private List<String> previousStringList;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
previousStringList = new ArrayList<>();
txtSpeechInput = findViewById(R.id.txtSpeechInput);
btnSpeak = (ImageButton) findViewById(R.id.btnSpeak);
btnSpeak.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
promptSpeechInput();
}
});
}
private void promptSpeechInput() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
getString(R.string.speech_prompt));
intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 20000000);
try {
startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
} catch (ActivityNotFoundException a) {
Toast.makeText(getApplicationContext(),
getString(R.string.speech_not_supported),
Toast.LENGTH_SHORT).show();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT: {
if (resultCode == RESULT_OK && null != data) {
final ArrayList<String> result = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
txtSpeechInput.setText(result.get(0));
if (result.get(0) != null) {
previousStringList.add(result.get(0));
}
}
break;
}
}
}
Hope that helps you.If you don't understand anything feel free to ask. If you don't want to save same String twice(already saved string), just replace below conditional line of code..
if (result.get(0) != null && !previousStringList.contains(result.get(0))) {
previousStringList.add(result.get(0));
}
The words get stored in Arraylists.
You can see an example of the implementation here, it works fine. The app stores the words, and then also performs the action requested.
https://github.com/saumyabahu/Travel-Safe/blob/master/MainActivity.java
public class MainActivity extends AppCompatActivity {
private SpeechRecognizer speechRecognizer;
private Intent intentRecognizer;
private EditText txtSpeechInput;
private ImageButton btnSpeak;
//this is the string in which words get stored and past strings with left to right cursor.
String previous = " ";
// ArrayList result = null;
private final int REQ_CODE_SPEECH_INPUT = 100;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate( savedInstanceState );
setContentView( R.layout.activity_main );
// ActivityCompat.requestPermissions( this, new String[]{Manifest.permission.RECORD_AUDIO}, PackageManager.PERMISSION_GRANTED );
txtSpeechInput = findViewById( R.id.ed );
btnSpeak = (ImageButton) findViewById( R.id.iButton );
btnSpeak.setOnClickListener( new View.OnClickListener() {
#Override
public void onClick(View v) {
promptSpeechInput();
}
} );
}
private void promptSpeechInput() {
Intent intent = new Intent( RecognizerIntent.ACTION_RECOGNIZE_SPEECH );
intent.putExtra( RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM );
intent.putExtra( RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault() );
intent.putExtra( RecognizerIntent.EXTRA_PROMPT,
getString( R.string.speech_prompt ) );
intent.putExtra( RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 20000000 );
try {
startActivityForResult( intent, REQ_CODE_SPEECH_INPUT );
} catch (ActivityNotFoundException a) {
Toast.makeText( getApplicationContext(),
getString( R.string.speech_not_supported ),
Toast.LENGTH_SHORT ).show();
}
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult( requestCode, resultCode, data );
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT: {
if (resultCode == RESULT_OK && null != data) {
final ArrayList<String> result = data
.getStringArrayListExtra( RecognizerIntent.EXTRA_RESULTS );
//this is the real problem.
txtSpeechInput.setText( previous + " " + result.get( 0 ) );
previous = txtSpeechInput.getText().toString();
txtSpeechInput.setText( previous );
}
break;
}
}
}
}

calling another class functions from one class on button click in android

I am making an app with various activities and each activity uses the camera function which is defined in another class. I want that in each activity when the camera button is clicked the camera class is called.
This is my main class:-
package com.example.ishan.complainbox;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import java.lang.String;
public class Crime extends MainActivity implements View.OnClickListener
{
camera cam=new camera();
EditText str,city,pn,det;
Button save,pic;
crimeDBHandler dbHandler;
#Override
protected void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_crime);
// Get References of Views
str = (EditText) findViewById(R.id.str);
city = (EditText) findViewById(R.id.city);
pn = (EditText) findViewById(R.id.pin);
det = (EditText) findViewById(R.id.detail);
save = (Button) findViewById(R.id.save);
pic=(Button) findViewById(R.id.uploadpic);
dbHandler = new crimeDBHandler(this, null, null, 1);
}
public void onClick(View view) {
String street = str.getText().toString();
String cty = city.getText().toString();
String pin = pn.getText().toString();
String detail = det.getText().toString();
// check if any of the fields are vaccant
if(str.equals("")||city.equals("")||pn.equals("")||det.equals(""))
{
Toast.makeText(getApplicationContext(), "Field Vacant",
Toast.LENGTH_LONG).show();
return;
}
// check if both passwords match
else
{
// Save the Data in Database
dbHandler.insertEntry(street,cty,pin,detail);
Toast.makeText(getApplicationContext(), "Complaint Successfully
Filed ", Toast.LENGTH_LONG).show();
}
}
};
.....and this is the camera class..:-
package com.example.ishan.complainbox;
/**
* Created by ishan on 13/04/2017.
*/
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
public class camera extends MainActivity{
private int REQUEST_CAMERA = 0, SELECT_FILE = 1;
private Button btnSelect;
private ImageView ivImage;
private String userChosenTask;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_crime);
btnSelect = (Button) findViewById(R.id.uploadpic);
btnSelect.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
selectImage();
}
});
ivImage = (ImageView) findViewById(R.id.imgView);
}
#Override
public void onRequestPermissionsResult(int requestCode, String[]
permissions, int[] grantResults) {
switch (requestCode) {
case Utility.MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE:
if (grantResults.length > 0 && grantResults[0] ==
PackageManager.PERMISSION_GRANTED)
{
if(userChosenTask.equals("Take Photo"))
cameraIntent();
else if(userChosenTask.equals("Choose from Library"))
galleryIntent();
} else {
}
break;
}
}
private void selectImage() {
final CharSequence[] items = { "Take Photo", "Choose from Library",
"Cancel" };
AlertDialog.Builder builder = new AlertDialog.Builder(camera.this);
builder.setTitle("Add Photo!");
builder.setItems(items, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int item) {
boolean result=Utility.checkPermission(camera.this);
if (items[item].equals("Take Photo")) {
userChosenTask ="Take Photo";
if(result)
cameraIntent();
} else if (items[item].equals("Choose from Library")) {
userChosenTask ="Choose from Library";
if(result)
galleryIntent();
} else if (items[item].equals("Cancel")) {
dialog.dismiss();
}
}
});
builder.show();
}
private void galleryIntent()
{
Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select
File"),SELECT_FILE);
}
private void cameraIntent()
{
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, REQUEST_CAMERA);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent
data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK) {
if (requestCode == SELECT_FILE)
onSelectFromGalleryResult(data);
else if (requestCode == REQUEST_CAMERA)
onCaptureImageResult(data);
}
}
private void onCaptureImageResult(Intent data) {
Bitmap thumbnail = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
thumbnail.compress(Bitmap.CompressFormat.JPEG, 90, bytes);
File destination = new
File(Environment.getExternalStorageDirectory(),System.currentTimeMillis() +
".jpg");
FileOutputStream fo;
try {
destination.createNewFile();
fo = new FileOutputStream(destination);
fo.write(bytes.toByteArray());
fo.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
ivImage.setImageBitmap(thumbnail);
}
#SuppressWarnings("deprecation")
private void onSelectFromGalleryResult(Intent data) {
Bitmap bm=null;
if (data != null) {
try {
bm =
MediaStore.Images.Media.getBitmap(getApplicationContext().
getContentResolver(),
data.getData());
} catch (IOException e) {
e.printStackTrace();
}
}
ivImage.setImageBitmap(bm);
}
}
This can be achieved through regular inter-activity communication mechanisms like passing intents or using broadcast receivers. I would suggest using intents - Refer this basic example from Android doc: https://developer.android.com/training/basics/firstapp/starting-activity.html
EDIT
Response to OP's question in comment-
You have to save the image file to FS in your Camera Class and pass the file name as an Extra with the intent to your Crime class. Since you are dealing with storage your Apps's manifest now would need additional permissions. I would recommend you go through this thread: Camera is not saving after taking picture

Back to the previous activity on back pressed

I have a function which display FileDialog and Intent to use Bluetooth.
But when I press back button, it comes to previous activity, it is visible but not clickable (like screenshot) and I have to press the back button once again.
I tried function onBackPressed() { finish(); }, but nothing worked properly.
MainActivity:
...
if(item == shareMenu) {
startActivity(new Intent(getBaseContext(), ShareViaBluetoothActivity.class));
}
...
ShareViaBluetoothActivity:
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.widget.Toast;
import java.io.File;
import java.util.List;
public class ShareViaBluetoothActivity extends Activity {
private static final int DISCOVER_DURATION = 300;
private static final int REQUEST_BLU = 1;
private FileDialog fileDialog;
public File getFile() {
return file;
}
public void setFile(File file) {
this.file = file;
}
private File file;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
File mPath = new File(Environment.getExternalStorageDirectory(), "//DIR//");
fileDialog = new FileDialog(this, mPath);
fileDialog.addFileListener(new FileDialog.FileSelectedListener() {
public void fileSelected(File file) {
Log.d(getClass().getName(), "selected file " + file.toString());
setFile(file);
sendViaBluetooth();
}
});
fileDialog.showDialog();
}
public void sendViaBluetooth() {
BluetoothAdapter btAdapter = BluetoothAdapter.getDefaultAdapter();
if(btAdapter == null) {
Toast.makeText(this, "Bluetooth is not supported on this device!", Toast.LENGTH_LONG).show();
} else {
enableBluetooth();
}
}
public void enableBluetooth() {
Intent discoveryIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE);
discoveryIntent.putExtra(BluetoothAdapter.EXTRA_DISCOVERABLE_DURATION, DISCOVER_DURATION);
startActivityForResult(discoveryIntent, REQUEST_BLU);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode == DISCOVER_DURATION && requestCode == REQUEST_BLU) {
Intent intent = new Intent();
intent.setAction(Intent.ACTION_SEND);
intent.setType("*/*");
intent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(new File(file.toString())));
intent.setPackage("com.android.bluetooth");
PackageManager pm = getPackageManager();
List<ResolveInfo> appsList = pm.queryIntentActivities(intent, 0);
if(appsList.size() > 0) {
String packageName = null;
String className = null;
boolean found = false;
for(ResolveInfo info : appsList) {
packageName = info.activityInfo.packageName;
if(packageName.equals("com.android.bluetooth")) {
className = info.activityInfo.name;
found = true;
break;
}
}
if (!found) {
Toast.makeText(this, "Bluetooth havn't been found",
Toast.LENGTH_LONG).show();
} else {
intent.setClassName(packageName, className);
startActivity(intent);
}
}
} else {
Toast.makeText(this, "Bluetooth is cancelled", Toast.LENGTH_LONG)
.show();
}
}
}
What you described was not so complete. But I this there are some reasons may result to this, then you can check it.
1. use super.onBackPressed() or give onBackPressed() return can make different
2. please check activity launch mode work.
but you'd better give a more detailed code,then I can help you

Would someone please straighten me out with this Android voice recognition class?

I'm analyzing this Voice Recognizer class and need help understanding this line in the code. What is the role of the following line? Does it create a list of all of the activities on the android platform loaded on the device? I especially find the resolve info bit confusing...
List activities = pm.queryIntentActivities(new Intent(
Here is the code used in context
package com.example.voicerecognitionactivity;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.app.SearchManager;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.Toast;
public class VoiceRecognitionActivity extends Activity {
private static final int VOICE_RECOGNITION_REQUEST_CODE = 1001;
private EditText metTextHint;
private ListView mlvTextMatches;
private Spinner msTextMatches;
private Button mbtSpeak;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
metTextHint = (EditText) findViewById(R.id.etTextHint);
mlvTextMatches = (ListView) findViewById(R.id.lvTextMatches);
msTextMatches = (Spinner) findViewById(R.id.sNoOfMatches);
mbtSpeak = (Button) findViewById(R.id.btSpeak);
}
public void checkVoiceRecognition() {
// Check if voice recognition is present
PackageManager pm = getPackageManager();
List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
if (activities.size() == 0) {
mbtSpeak.setEnabled(false);
Toast.makeText(this, "Voice recognizer not present",
Toast.LENGTH_SHORT).show();
}
}
public void speak(View view) {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
// Specify the calling package to identify your application
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass()
.getPackage().getName());
// Display an hint to the user about what he should say.
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, metTextHint.getText()
.toString());
// Given an hint to the recognizer about what the user is going to say
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
// If number of Matches is not selected then return show toast message
if (msTextMatches.getSelectedItemPosition() == AdapterView.INVALID_POSITION) {
Toast.makeText(this, "Please select No. of Matches from spinner",
Toast.LENGTH_SHORT).show();
return;
}
int noOfMatches = Integer.parseInt(msTextMatches.getSelectedItem()
.toString());
// Specify how many results you want to receive. The results will be
// sorted where the first result is the one with higher confidence.
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, noOfMatches);
startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE)
//If Voice recognition is successful then it returns RESULT_OK
if(resultCode == RESULT_OK) {
ArrayList<String> textMatchList = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (!textMatchList.isEmpty()) {
// If first Match contains the 'search' word
// Then start web search.
if (textMatchList.get(0).contains("search")) {
String searchQuery = textMatchList.get(0).replace("search",
" ");
Intent search = new Intent(Intent.ACTION_WEB_SEARCH);
search.putExtra(SearchManager.QUERY, searchQuery);
startActivity(search);
} else {
// populate the Matches
mlvTextMatches
.setAdapter(new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1,
textMatchList));
}
}
//Result code for various error.
}else if(resultCode == RecognizerIntent.RESULT_AUDIO_ERROR){
showToastMessage("Audio Error");
}else if(resultCode == RecognizerIntent.RESULT_CLIENT_ERROR){
showToastMessage("Client Error");
}else if(resultCode == RecognizerIntent.RESULT_NETWORK_ERROR){
showToastMessage("Network Error");
}else if(resultCode == RecognizerIntent.RESULT_NO_MATCH){
showToastMessage("No Match");
}else if(resultCode == RecognizerIntent.RESULT_SERVER_ERROR){
showToastMessage("Server Error");
}
super.onActivityResult(requestCode, resultCode, data);
}
void showToastMessage(String message){
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
This, i think, is the simplest way of checking if voice recognition is present or not:
Intent intent = new Intent(
RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en-US");
try {
startActivityForResult(intent, RESULT_SPEECH);
} catch (ActivityNotFoundException a) {
Toast t = Toast.makeText(getApplicationContext(),
"Opps! Your device doesn't support Speech to Text",
Toast.LENGTH_LONG);
t.show();
}
ANd after that:
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case RESULT_SPEECH: {
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> text = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
et_task_notes.setText(text.get(0));
}
break;
}
}
}

How to get the result of voice recognition on EditBox?

I know there is like a ton topic and question regarding Voice Recognition and my question might be a stupid one too. but please bear with me guys.
I need to get the result of the speech recognition into an (Editable Text Box) instead of (Array List), the editable text box to allow the user to edit the result , just like a memo.
I found some questions like mine but I could not understand ,I am still a beginner comparing to you guys .
This is the code :
public class AVRScreen extends Activity {
private static final int VOICE_RECOGNITION_REQUEST_CODE = 1001;
private ListView mlvTextMatches;
private Button mbtSpeak;
private Button reButton;
private EditText result;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.vr_screen);
Toast.makeText(this, "Press Speak! to Start Speeking",
Toast.LENGTH_LONG).show();
result = (EditText) findViewById(R.id.out_text);
mlvTextMatches = (ListView) findViewById(R.id.lvTextMatches);
mbtSpeak = (Button) findViewById(R.id.btSpeak);
reButton = (Button)findViewById(R.id.Replay1);
reButton.setOnClickListener(new OnClickListener(){
public void onClick(View v) {
startActivity(new Intent(v.getContext(),KeyBoard.class));
}
});
checkVoiceRecognition();
}
public void checkVoiceRecognition() {
// Check if voice recognition is present
PackageManager pm = getPackageManager();
List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
if (activities.size() == 0) {
mbtSpeak.setEnabled(false);
mbtSpeak.setText("Voice recognizer not present");
Toast.makeText(this, "Voice recognizer not present",
Toast.LENGTH_SHORT).show();
}
}
public void speak(View view) {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass()
.getPackage().getName());
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
//Start the Voice recognizer activity for the result.
startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE)
//If Voice recognition is successful then it returns RESULT_OK
if(resultCode == RESULT_OK) {
ArrayList<String> textMatchList = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (textMatchList.get(0).contains("search")) {
} else {
// populate the Matches
mlvTextMatches .setAdapter(new ArrayAdapter<String>
(this,android.R.layout.simple_list_item_1,textMatchList));
}
}
//Result code for various error.
{
} if(resultCode == RecognizerIntent.RESULT_AUDIO_ERROR){
showToastMessage("Audio Error");
}else if(resultCode == RecognizerIntent.RESULT_CLIENT_ERROR){
showToastMessage("Client Error");
}else if(resultCode == RecognizerIntent.RESULT_NETWORK_ERROR){
showToastMessage("Network Error");
}else if(resultCode == RecognizerIntent.RESULT_NO_MATCH){
showToastMessage("No Match");
}else if(resultCode == RecognizerIntent.RESULT_SERVER_ERROR){
showToastMessage("Server Error");
}
super.onActivityResult(requestCode, resultCode, data);
}
/**
* Helper method to show the toast message
**/
void showToastMessage(String message){
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
This is the code after editing :
public class AVRScreen extends Activity {
private static final int VOICE_RECOGNITION_REQUEST_CODE = 1001;
private Button mbtSpeak;
private Button reButton;
private EditText myEditText;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.vr_screen);
Toast.makeText(this, "Press Speak! to Start Speeking",
Toast.LENGTH_LONG).show();
myEditText = (EditText) findViewById(R.id.out_text);
mbtSpeak = (Button) findViewById(R.id.btSpeak);
reButton = (Button)findViewById(R.id.Replay1);
reButton.setOnClickListener(new OnClickListener(){
public void onClick(View v) {
startActivity(new Intent(v.getContext(),KeyBoard.class));
}
});
checkVoiceRecognition();
}
public void checkVoiceRecognition() {
// Check if voice recognition is present
PackageManager pm = getPackageManager();
List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
if (activities.size() == 0) {
mbtSpeak.setEnabled(false);
mbtSpeak.setText("Voice recognizer not present");
Toast.makeText(this, "Voice recognizer not present",
Toast.LENGTH_SHORT).show();
}
}
public void speak(View view) {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass()
.getPackage().getName());
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
//Start the Voice recognizer activity for the result.
startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE)
//If Voice recognition is successful then it returns RESULT_OK
if(resultCode == RESULT_OK) {
ArrayList<String> textMatchList = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (textMatchList.get(0).contains("search")) {
} else {
// populate the Matches
myEditText.setText(textMatchList.toString());
// if the above does not look good
// for (String match : textMatchList) {
// myEditText.append(match + "\n"); // or whatever separator you want
// }
}
}
the second try is :
} else {
// populate the Matches
//myEditText.setText(textMatchList.toString());
// if the above does not look good
for (String match : textMatchList) {
myEditText.append(match + "\n"); // or whatever separator you want
}
}
}
if(resultCode == RESULT_OK) {
ArrayList<String> textMatchList = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (textMatchList.get(0).contains("search")) {
} else {
// populate the Matches
result.setText(textMatchList.toString());
// if the above does not look good
// for (String match : textMatchList) {
// result.append(match + "\n"); // or whatever separator you want
// }
}

Categories

Resources