TextToSpeech android build version not playing audio - android

I have a strange behavior of my text to speech engine on android:
When I test text to speech on my Galaxy S5 everything is fine, audio is playing in turkish and in german.
On some other phones (such as one LG for example) the textToSpeech does work too- except in following case:
Export the app (build apk) and installing it manually on a phone
Switch to Turkish language (German language always works!)
The problem is that I get no error message- the TTS seems to get normally initialized.
Any hint would be really appreciated!
here is my code:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getActionBar().setLogo(R.drawable.anne_eli_icons_pfeile_zurueck_weiss_17px);
getActionBar().setHomeButtonEnabled(true);
textToSpeech = new TextToSpeech(this, this);
}
#Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
log("onInit()");
int result = textToSpeech.setLanguage(new Locale(getTransLanguage()));
log("result:" + result);
textToSpeech.setSpeechRate(1.2f); // set speech speed rate
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) {
Tools.showToast(this, "Language " + getTransLanguage() + " is not supported! Error code: " + result);
Intent installIntent = new Intent();
installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installIntent);
} else {
speechEnabled = true;
}
} else {
speechEnabled = false;
Tools.showToast(this, "Text to speech initialization failed!");
}
}

Before, check if TTS Service is avaliable:
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
/**
* This class demonstrates checking for a TTS engine, and if one is
* available it will spit out some speak.
*/
public class Main extends Activity implements TextToSpeech.OnInitListener
{
private TextToSpeech mTts;
// This code can be any value you want, its just a checksum.
private static final int MY_DATA_CHECK_CODE = 1234;
/**
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Fire off an intent to check if a TTS engine is installed
Intent checkIntent = new Intent();
checkIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(checkIntent, MY_DATA_CHECK_CODE);
}
/**
* Executed when a new TTS is instantiated. Some static text is spoken via TTS here.
* #param i
*/
public void onInit(int i)
{
mTts.speak("Hello folks, welcome to my little demo on Text To Speech.",
TextToSpeech.QUEUE_FLUSH, // Drop all pending entries in the playback queue.
null);
}
/**
* This is the callback from the TTS engine check, if a TTS is installed we
* create a new TTS instance (which in turn calls onInit), if not then we will
* create an intent to go off and install a TTS engine
* #param requestCode int Request code returned from the check for TTS engine.
* #param resultCode int Result code returned from the check for TTS engine.
* #param data Intent Intent returned from the TTS check.
*/
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (requestCode == MY_DATA_CHECK_CODE)
{
if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS)
{
// success, create the TTS instance
mTts = new TextToSpeech(this, this);
}
else
{
// missing data, install it
Intent installIntent = new Intent();
installIntent.setAction(
TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installIntent);
}
}
}
/**
* Be kind, once you've finished with the TTS engine, shut it down so other
* applications can use it without us interfering with it :)
*/
#Override
public void onDestroy()
{
// Don't forget to shutdown!
if (mTts != null)
{
mTts.stop();
mTts.shutdown();
}
super.onDestroy();
}
}

Related

Detect and capture barcode automatically when it is read by camera

I used this android-vision project to scan a barcode. When the camera detects the barcode, I am currently required to manually tap to capture it. But, I want to change the code a little bit so that it is captured automatically when the barcode is detected. How can I do this? This is my BarcodeCaptureActivity class:
package com.example.fs.barcodescanner;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.WindowId;
import android.widget.Toast;
import com.example.fs.barcodescanner.ui.camera.CameraSource;
import com.example.fs.barcodescanner.ui.camera.CameraSourcePreview;
import com.example.fs.barcodescanner.ui.camera.GraphicOverlay;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.CommonStatusCodes;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
public final class BarcodeCaptureActivity extends AppCompatActivity {
private static final String TAG = "Barcode-reader";
// intent request code to handle updating play services if needed.
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
// constants used to pass extra data in the intent
public static final String AutoFocus = "AutoFocus";
public static final String UseFlash = "UseFlash";
public static final String BarcodeObject = "Barcode";
private CameraSource mCameraSource;
private CameraSourcePreview mPreview;
private GraphicOverlay<BarcodeGraphic> mGraphicOverlay;
// helper objects for detecting taps and pinches.
private ScaleGestureDetector scaleGestureDetector;
private GestureDetector gestureDetector;
private WindowId.FocusObserver fo;
/**
* Initializes the UI and creates the detector pipeline.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_barcode_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);
// read parameters from the intent used to launch the activity.
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
}
/**
* Handles the requesting of the camera permission. This includes
* showing a "Snackbar" message of why the permission is needed then
* sending the request.
*/
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA)) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
#Override
public boolean onTouchEvent(MotionEvent e) {
boolean b = scaleGestureDetector.onTouchEvent(e);
System.out.println("scale gesture"+b);
boolean c = gestureDetector.onTouchEvent(e);
System.out.println("tap gesture"+c);
return b || c || super.onTouchEvent(e);
}
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
*
* Suppressing InlinedApi since there is a check that the minimum version is met before using
* the constant.
*/
#SuppressLint("InlinedApi")
private void createCameraSource(boolean autoFocus, boolean useFlash) {
Context context = getApplicationContext();
// A barcode detector is created to track barcodes. An associated multi-processor instance
// is set to receive the barcode detection results, track the barcodes, and maintain
// graphics for each barcode on screen. The factory is used by the multi-processor to
// create a separate tracker instance for each barcode.
BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context).build();
BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay);
barcodeDetector.setProcessor(
new MultiProcessor.Builder<>(barcodeFactory).build());
if (!barcodeDetector.isOperational()) {
// Note: The first time that an app using the barcode or face API is installed on a
// device, GMS will download a native libraries to the device in order to do detection.
// Usually this completes before the app is run for the first time. But if that
// download has not yet completed, then the above call will not detect any barcodes
// and/or faces.
//
// isOperational() can be used to check if the required native libraries are currently
// available. The detectors will automatically become operational once the library
// downloads complete on device.
Log.w(TAG, "Detector dependencies are not yet available.");
// Check for low storage. If there is low storage, the native library will not be
// downloaded, so detection will not become operational.
IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;
if (hasLowStorage) {
Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
Log.w(TAG, getString(R.string.low_storage_error));
}
}
// Creates and starts the camera. Note that this uses a higher resolution in comparison
// to other detection examples to enable the barcode detector to detect small barcodes
// at long distances.
CameraSource.Builder builder = new CameraSource.Builder(getApplicationContext(), barcodeDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(1600, 1024)
.setRequestedFps(15.0f);
// make sure that auto focus is an available option
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
builder = builder.setFocusMode(
autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null);
}
mCameraSource = builder
.setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
if (mPreview != null) {
mPreview.stop();
}
}
/**
* Releases the resources associated with the camera source, the associated detectors, and the
* rest of the processing pipeline.
*/
#Override
protected void onDestroy() {
super.onDestroy();
if (mPreview != null) {
mPreview.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus,false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
createCameraSource(autoFocus, useFlash);
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Multitracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() throws SecurityException {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
/**
* onTap is called to capture the oldest barcode currently detected and
* return it to the caller.
*
* #param rawX - the raw position of the tap
* #param rawY - the raw position of the tap.
* #return true if the activity is ending.
*/
private boolean onTap(float rawX, float rawY) {
//TODO: use the tap position to select the barcode.
BarcodeGraphic graphic = mGraphicOverlay.getFirstGraphic();
Barcode barcode = null;
if (graphic != null) {
barcode = graphic.getBarcode();
if (barcode != null) {
Intent data = new Intent();
data.putExtra(BarcodeObject, barcode);
setResult(CommonStatusCodes.SUCCESS, data);
finish();
}
else {
Log.d(TAG, "barcode data is null");
}
}
else {
Log.d(TAG,"no barcode detected");
}
return barcode != null;
}
private class CaptureGestureListener extends GestureDetector.SimpleOnGestureListener {
#Override
public boolean onSingleTapConfirmed(MotionEvent e) {
System.out.println(e.getRawX());
System.out.println(e.getRawY());
return onTap(e.getRawX(), e.getRawY()) || super.onSingleTapConfirmed(e);
}
}
private class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener {
/**
* Responds to scaling events for a gesture in progress.
* Reported by pointer motion.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
* #return Whether or not the detector should consider this event
* as handled. If an event was not handled, the detector
* will continue to accumulate movement until an event is
* handled. This can be useful if an application, for example,
* only wants to update scaling factors if the change is
* greater than 0.01.
*/
#Override
public boolean onScale(ScaleGestureDetector detector) {
return false;
}
/**
* Responds to the beginning of a scaling gesture. Reported by
* new pointers going down.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
* #return Whether or not the detector should continue recognizing
* this gesture. For example, if a gesture is beginning
* with a focal point outside of a region where it makes
* sense, onScaleBegin() may return false to ignore the
* rest of the gesture.
*/
#Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
return true;
}
/**
* Responds to the end of a scale gesture. Reported by existing
* pointers going up.
* <p/>
* Once a scale has ended, {#link ScaleGestureDetector#getFocusX()}
* and {#link ScaleGestureDetector#getFocusY()} will return focal point
* of the pointers remaining on the screen.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
*/
#Override
public void onScaleEnd(ScaleGestureDetector detector) {
mCameraSource.doZoom(detector.getScaleFactor());
}
}
}
Use interfaces.
Steps to do:
Create interface, like this:
public interface QRCodeDetectedInterface {
void onQRCodeDetected();}
Implement it in BarcodeCaptureActivity, like this :
public final class BarcodeCaptureActivity extends AppCompatActivity implements QRCodeDetectedInterface{
#Override
public void onQRCodeDetected() {
BarcodeGraphic graphic = mGraphicOverlay.getFirstGraphic();
Barcode barcode = null;
if (graphic != null) {
barcode = graphic.getBarcode();
if (barcode != null) {
Intent data = new Intent();
data.putExtra(BarcodeObject, barcode);
setResult(CommonStatusCodes.SUCCESS, data);
finish();
}
else {
Log.d(TAG, "barcode data is null");
}
}
else {
Log.d(TAG,"no barcode detected");
}
}
}
And use it in BarcodeGraphic.java file, like this:
private Context context;
private QRCodeDetectedInterface mCallback;
BarcodeGraphic(GraphicOverlay overlay) {
super(overlay);
this.context = overlay.getContext();
mCallback = (BarcodeCaptureActivity) context;
}
And then:
#Override
public void draw(Canvas canvas) {
mCallback.onQRCodeDetected();}
As #Lynx said, you need to create an interface. I was in need of something like that. I created an interface to listen when the camera tracks the barcode and implemented that in the activity that will listen.
In BarcodeGraphicTracker.java class, This is the interface that I created.
public interface BarcodeDetectorListener{
//event call back
void onObjectDetected(Barcode data);
}
#Override
public void onNewItem(int id, Barcode item) {
mGraphic.setId(id);
if(mBarcodeDetectorListener == null) return;
mBarcodeDetectorListener.onObjectDetected(item);
mBarcodeDetectorListener = null;
}
in BarcodeScannerActivty
#Override
public void onObjectDetected(Barcode data) {
//do something with the barcode data here
Intent mIntent = new Intent();
mIntent.putExtra(BarcodeObject, data);
setResult(CommonStatusCodes.SUCCESS, mIntent);
finish();
}
I have uploaded a sample in github. Please find it here.
package com.example.fs.barcodescanner;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.widget.Toast;
import com.example.fs.barcodescanner.ui.camera.CameraSource;
import com.example.fs.barcodescanner.ui.camera.CameraSourcePreview;
import com.example.fs.barcodescanner.ui.camera.GraphicOverlay;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.CommonStatusCodes;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
public final class BarcodeCaptureActivity extends AppCompatActivity {
private static final String TAG = "Barcode-reader";
// intent request code to handle updating play services if needed.
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
// constants used to pass extra data in the intent
public static final String AutoFocus = "AutoFocus";
public static final String UseFlash = "UseFlash";
public static final String BarcodeObject = "Barcode";
private CameraSource mCameraSource;
private CameraSourcePreview mPreview;
private GraphicOverlay<BarcodeGraphic> mGraphicOverlay;
// helper objects for detecting taps and pinches.
private ScaleGestureDetector scaleGestureDetector;
private GestureDetector gestureDetector;
/**
* Initializes the UI and creates the detector pipeline.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_barcode_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);
// read parameters from the intent used to launch the activity.
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
gestureDetector = new GestureDetector(this, new CaptureGestureListener());
scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());
Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom",
Snackbar.LENGTH_LONG)
.show();
}
/**
* Handles the requesting of the camera permission. This includes
* showing a "Snackbar" message of why the permission is needed then
* sending the request.
*/
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA)) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
#Override
public boolean onTouchEvent(MotionEvent e) {
boolean b = scaleGestureDetector.onTouchEvent(e);
System.out.println("scale gesture" + b);
boolean c = gestureDetector.onTouchEvent(e);
System.out.println("tap gesture" + c);
return b || c || super.onTouchEvent(e);
}
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
* <p>
* Suppressing InlinedApi since there is a check that the minimum version is met before using
* the constant.
*/
#SuppressLint("InlinedApi")
private void createCameraSource() {
Context context = getApplicationContext();
// A barcode detector is created to track barcodes. An associated multi-processor instance
// is set to receive the barcode detection results, track the barcodes, and maintain
// graphics for each barcode on screen. The factory is used by the multi-processor to
// create a separate tracker instance for each barcode.
BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context).build();
BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay);
barcodeDetector.setProcessor(
new MultiProcessor.Builder<>(barcodeFactory).build());
if (!barcodeDetector.isOperational()) {
// Note: The first time that an app using the barcode or face API is installed on a
// device, GMS will download a native libraries to the device in order to do detection.
// Usually this completes before the app is run for the first time. But if that
// download has not yet completed, then the above call will not detect any barcodes
// and/or faces.
//
// isOperational() can be used to check if the required native libraries are currently
// available. The detectors will automatically become operational once the library
// downloads complete on device.
Log.w(TAG, "Detector dependencies are not yet available.");
// Check for low storage. If there is low storage, the native library will not be
// downloaded, so detection will not become operational.
IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;
if (hasLowStorage) {
Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
Log.w(TAG, getString(R.string.low_storage_error));
}
}
// Creates and starts the camera. Note that this uses a higher resolution in comparison
// to other detection examples to enable the barcode detector to detect small barcodes
// at long distances.
CameraSource.Builder builder = new CameraSource.Builder(getApplicationContext(), barcodeDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(1600, 1024)
.setRequestedFps(15.0f)
.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
// make sure that auto focus is an available option
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// builder = builder.setFocusMode(
// autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null);
// }
mCameraSource = builder
//.setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
.build();
mCameraSource.autoFocus(new CameraSource.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success) {
System.out.println("Hello");
}
});
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
if (mPreview != null) {
mPreview.stop();
}
}
/**
* Releases the resources associated with the camera source, the associated detectors, and the
* rest of the processing pipeline.
*/
#Override
protected void onDestroy() {
super.onDestroy();
if (mPreview != null) {
mPreview.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
boolean autoFocus = getIntent().getBooleanExtra(AutoFocus,false);
boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Multitracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() throws SecurityException {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
/**
* onTap is called to capture the oldest barcode currently detected and
* return it to the caller.
*
* #param rawX - the raw position of the tap
* #param rawY - the raw position of the tap.
* #return true if the activity is ending.
*/
private boolean onTap(float rawX, float rawY) {
//TODO: use the tap position to select the barcode.
BarcodeGraphic graphic = mGraphicOverlay.getFirstGraphic();
Barcode barcode = null;
if (graphic != null) {
barcode = graphic.getBarcode();
if (barcode != null) {
Intent data = new Intent();
data.putExtra(BarcodeObject, barcode);
setResult(CommonStatusCodes.SUCCESS, data);
finish();
} else {
Log.d(TAG, "barcode data is null");
}
} else {
Log.d(TAG, "no barcode detected");
}
return barcode != null;
}
private class CaptureGestureListener extends GestureDetector.SimpleOnGestureListener {
#Override
public boolean onSingleTapConfirmed(MotionEvent e) {
System.out.println(e.getRawX());
System.out.println(e.getRawY());
return onTap(e.getRawX(), e.getRawY()) || super.onSingleTapConfirmed(e);
}
}
private class ScaleListener implements ScaleGestureDetector.OnScaleGestureListener {
/**
* Responds to scaling events for a gesture in progress.
* Reported by pointer motion.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
* #return Whether or not the detector should consider this event
* as handled. If an event was not handled, the detector
* will continue to accumulate movement until an event is
* handled. This can be useful if an application, for example,
* only wants to update scaling factors if the change is
* greater than 0.01.
*/
#Override
public boolean onScale(ScaleGestureDetector detector) {
return false;
}
/**
* Responds to the beginning of a scaling gesture. Reported by
* new pointers going down.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
* #return Whether or not the detector should continue recognizing
* this gesture. For example, if a gesture is beginning
* with a focal point outside of a region where it makes
* sense, onScaleBegin() may return false to ignore the
* rest of the gesture.
*/
#Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
return true;
}
/**
* Responds to the end of a scale gesture. Reported by existing
* pointers going up.
* <p/>
* Once a scale has ended, {#link ScaleGestureDetector#getFocusX()}
* and {#link ScaleGestureDetector#getFocusY()} will return focal point
* of the pointers remaining on the screen.
*
* #param detector The detector reporting the event - use this to
* retrieve extended info about event state.
*/
#Override
public void onScaleEnd(ScaleGestureDetector detector) {
mCameraSource.doZoom(detector.getScaleFactor());
}
}
}

Integrate Mobile Vision Barcode API into android app [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 7 years ago.
Improve this question
I am developing a BarcodeScanner app on android. Google has release the native Mobile Vision API link for android barcode.So i want to use the android native barcode API instead of any third party library.
So to sum up my problem :
1- How to integrate Barcode API into my Android App?
2- After integrated how to call the barcode function and get back the scan result ?
Note- A step to step guide will be helpful.
I integrate the code in my app successfully.I posted the answer and question so that it will be helpful to others. If any other ans it would be appreciated.Thanks in advanced..:)
Prerequisite:
• Android 4.2.2+
• Latest Android SDK
• Google Play Services 7.8+ (Rev. 26.0 in SDK)
• compile 'com.android.support:appcompat-v7:22.0.0'
• compile 'com.google.android.gms:play-services:7.8+'
Download the code sample code from this link,import the barcode-reader project in android studio. Add the google play services 7.8+ & add the mentioned support library in your app gradle. Now modified the BarcodeTrackerFactory.java and MultiTrackerActivity.java class.
BarcodeTrackerFactory.java
import android.app.Activity;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.Log;
import com.google.android.gms.samples.vision.face.multitracker.ui.camera.GraphicOverlay;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.Tracker;
import com.google.android.gms.vision.barcode.Barcode;
/**
* Factory for creating a tracker and associated graphic to be associated with a new barcode. The
* multi-processor uses this factory to create barcode trackers as needed -- one for each barcode.
*/
class BarcodeTrackerFactory implements MultiProcessor.Factory<Barcode> {
public Activity context;
private GraphicOverlay mGraphicOverlay;
BarcodeTrackerFactory(GraphicOverlay graphicOverlay) {
mGraphicOverlay = graphicOverlay;
}
BarcodeTrackerFactory(GraphicOverlay graphicOverlay ,Activity ctx) {
mGraphicOverlay = graphicOverlay;
this.context =ctx;
}
#Override
public Tracker<Barcode> create(Barcode barcode) {
BarcodeGraphic graphic = new BarcodeGraphic(mGraphicOverlay,context);
return new GraphicTracker<>(mGraphicOverlay, graphic);
}
}
/**
* Graphic instance for rendering barcode position, size, and ID within an associated graphic
* overlay view.
*/
class BarcodeGraphic extends TrackedGraphic<Barcode> {
private static final int COLOR_CHOICES[] = {
Color.BLUE,
Color.CYAN,
Color.GREEN
};
private static int mCurrentColorIndex = 0;
private Paint mRectPaint;
private Paint mTextPaint;
private volatile Barcode mBarcode;
private Activity context;
private boolean isFirstScandetaction=true;
BarcodeGraphic(GraphicOverlay overlay ,Activity context) {
super(overlay);
this.context=context;
mCurrentColorIndex = (mCurrentColorIndex + 1) % COLOR_CHOICES.length;
final int selectedColor = COLOR_CHOICES[mCurrentColorIndex];
mRectPaint = new Paint();
mRectPaint.setColor(selectedColor);
mRectPaint.setStyle(Paint.Style.STROKE);
mRectPaint.setStrokeWidth(4.0f);
mTextPaint = new Paint();
mTextPaint.setColor(selectedColor);
mTextPaint.setTextSize(36.0f);
}
/**
* Updates the barcode instance from the detection of the most recent frame. Invalidates the
* relevant portions of the overlay to trigger a redraw.
*/
void updateItem(Barcode barcode) {
mBarcode = barcode;
if (isFirstScandetaction) {
postInvalidate();
if (null != mBarcode) {
isFirstScandetaction = false;
Intent intent = new Intent();
intent.putExtra("SCAN_RESULT", barcode.rawValue.toString());
intent.putExtra("SCAN_RESULT_FORMAT",String.valueOf( barcode.format));
context.setResult(Activity.RESULT_OK, intent);
context.finish();
}
}
}
/**
* Draws the barcode annotations for position, size, and raw value on the supplied canvas.
*/
#Override
public void draw(Canvas canvas) {
Barcode barcode = mBarcode;
if (barcode == null) {
return;
}
// Draws the bounding box around the barcode.
RectF rect = new RectF(barcode.getBoundingBox());
rect.left = translateX(rect.left);
rect.top = translateY(rect.top);
rect.right = translateX(rect.right);
rect.bottom = translateY(rect.bottom);
canvas.drawRect(rect, mRectPaint);
// Draws a label at the bottom of the barcode indicate the barcode value that was detected.
canvas.drawText(barcode.rawValue, rect.left, rect.bottom, mTextPaint);
Log.v("On Draw", "called");
}
}
MultiTrackerActivity.java
import java.io.IOException;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.samples.vision.face.multitracker.ui.camera.CameraSourcePreview;
import com.google.android.gms.samples.vision.face.multitracker.ui.camera.GraphicOverlay;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.barcode.BarcodeDetector;
/**
* Activity for the multi-tracker app. This app detects faces and barcodes with the rear facing
* camera, and draws overlay graphics to indicate the position, size, and ID of each face and
* barcode.
*/
public final class MultiTrackerActivity extends AppCompatActivity {
private static final String TAG = "MultiTracker";
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
private CameraSource mCameraSource = null;
private CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
/**
* Initializes the UI and creates the detector pipeline.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
mGraphicOverlay = (GraphicOverlay) findViewById(R.id.faceOverlay);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
requestCameraPermission();
}
}
/**
* Handles the requesting of the camera permission. This includes
* showing a "Snackbar" message of why the permission is needed then
* sending the request.
*/
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA)) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
return;
}
final Activity thisActivity = this;
View.OnClickListener listener = new View.OnClickListener() {
#Override
public void onClick(View view) {
ActivityCompat.requestPermissions(thisActivity, permissions,
RC_HANDLE_CAMERA_PERM);
}
};
Snackbar.make(mGraphicOverlay, R.string.permission_camera_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, listener)
.show();
}
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
*/
private void createCameraSource() {
Context context = getApplicationContext();
// A barcode detector is created to track barcodes. An associated multi-processor instance
// is set to receive the barcode detection results, track the barcodes, and maintain
// graphics for each barcode on screen. The factory is used by the multi-processor to
// create a separate tracker instance for each barcode.
BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context).build();
BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay,this);
barcodeDetector.setProcessor(
new MultiProcessor.Builder<>(barcodeFactory).build());
if (!barcodeDetector.isOperational()) {
// Note: The first time that an app using the barcode or face API is installed on a
// device, GMS will download a native libraries to the device in order to do detection.
// Usually this completes before the app is run for the first time. But if that
// download has not yet completed, then the above call will not detect any barcodes
// and/or faces.
//
// isOperational() can be used to check if the required native libraries are currently
// available. The detectors will automatically become operational once the library
// downloads complete on device.
Log.w(TAG, "Detector dependencies are not yet available.");
}
// Creates and starts the camera. Note that this uses a higher resolution in comparison
// to other detection examples to enable the barcode detector to detect small barcodes
// at long distances.
mCameraSource = new CameraSource.Builder(getApplicationContext(), barcodeDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(1600, 1024)
.setRequestedFps(15.0f)
.build();
}
/**
* Restarts the camera.
*/
#Override
protected void onResume() {
super.onResume();
startCameraSource();
}
/**
* Stops the camera.
*/
#Override
protected void onPause() {
super.onPause();
mPreview.stop();
}
/**
* Releases the resources associated with the camera source, the associated detectors, and the
* rest of the processing pipeline.
*/
#Override
protected void onDestroy() {
super.onDestroy();
if (mCameraSource != null) {
mCameraSource.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {#link #requestPermissions(String[], int)}.
* <p>
* <strong>Note:</strong> It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
* </p>
*
* #param requestCode The request code passed in {#link #requestPermissions(String[], int)}.
* #param permissions The requested permissions. Never null.
* #param grantResults The grant results for the corresponding permissions
* which is either {#link PackageManager#PERMISSION_GRANTED}
* or {#link PackageManager#PERMISSION_DENIED}. Never null.
* #see #requestPermissions(String[], int)
*/
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
createCameraSource();
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Multitracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
}
Now to call the scan code and recieve the result-
BarCodeReader.java
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
public class BarCodeReader extends FragmentActivity {
String QR_CODE = "256";
private Button btnscan;
#Override
protected void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.scanlayout);
btnscan = (Button) findViewById(R.id.scanbtn);
btnscan.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Intent intent = new Intent(BarCodeReader.this,
MultiTrackerActivity.class);
startActivityForResult(intent, 0);
}
});
}
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (requestCode == 0) {
if (resultCode == RESULT_OK) {
String contents = intent.getStringExtra("SCAN_RESULT");
String format = intent.getStringExtra("SCAN_RESULT_FORMAT");
if (format.equals(QR_CODE)) {
Toast.makeText(
BarCodeReader.this,
"content is : " + contents + " \n Format is: "
+ format, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(BarCodeReader.this, "wrong format",
Toast.LENGTH_LONG).show();
}
} else if (resultCode == RESULT_CANCELED) {
// To Handle cancel
Log.i("App", "Scan unsuccessful");
}
}
}
}

Speech Recogonition as an background service Phonegap/Cordova

I am developing an speech recognition system in android . I have succeeded in it . But the Google Voice Dialog box keeps annoying me and i want to get rid off that . i.e I want to Run this voice recognition in the background as an service . I know there are many SO posts on this with answers . But my problem is the every code here is on Native java and i develop on phone gap . SO i am not able to port this thing to phone gap . I have my code below . So , Any help would be appreciated ! And i use this plugin on phone gap . Thanks in advance .
package com.phonegap.plugins.speech;
import java.util.ArrayList;
import java.util.Locale;
import org.json.JSONArray;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CallbackContext;
import android.util.Log;
import android.app.Activity;
import android.content.Intent;
import android.speech.RecognizerIntent;
/**
* Style and such borrowed from the TTS and PhoneListener plugins
*/
public class SpeechRecognizer extends CordovaPlugin {
private static final String LOG_TAG = SpeechRecognizer.class.getSimpleName();
private static int REQUEST_CODE = 1001;
private CallbackContext callbackContext;
private LanguageDetailsChecker languageDetailsChecker;
//#Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) {
Boolean isValidAction = true;
this.callbackContext= callbackContext;
// Action selector
if ("startRecognize".equals(action)) {
// recognize speech
startSpeechRecognitionActivity(args);
} else if ("getSupportedLanguages".equals(action)) {
getSupportedLanguages();
} else {
// Invalid action
this.callbackContext.error("Unknown action: " + action);
isValidAction = false;
}
return isValidAction;
}
// Get the list of supported languages
private void getSupportedLanguages() {
if (languageDetailsChecker == null){
languageDetailsChecker = new LanguageDetailsChecker(callbackContext);
}
// Create and launch get languages intent
Intent detailsIntent = new Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS);
cordova.getActivity().sendOrderedBroadcast(detailsIntent, null, languageDetailsChecker, null, Activity.RESULT_OK, null, null);
}
/**
* Fire an intent to start the speech recognition activity.
*
* #param args Argument array with the following string args: [req code][number of matches][prompt string]
*/
private void startSpeechRecognitionActivity(JSONArray args) {
int maxMatches = 0;
String prompt = "";
String language = Locale.getDefault().toString();
try {
if (args.length() > 0) {
// Maximum number of matches, 0 means the recognizer decides
String temp = args.getString(0);
maxMatches = Integer.parseInt(temp);
}
if (args.length() > 1) {
// Optional text prompt
prompt = args.getString(1);
}
if (args.length() > 2) {
// Optional language specified
language = args.getString(2);
}
}
catch (Exception e) {
Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
}
// Create the intent and set parameters
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
if (maxMatches > 0)
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
if (!prompt.equals(""))
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
cordova.startActivityForResult(this, intent, REQUEST_CODE);
}
/**
* Handle the results from the recognition activity.
*/
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_OK) {
// Fill the list view with the strings the recognizer thought it could have heard
ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
returnSpeechResults(matches);
}
else {
// Failure - Let the caller know
this.callbackContext.error(Integer.toString(resultCode));
}
super.onActivityResult(requestCode, resultCode, data);
}
private void returnSpeechResults(ArrayList<String> matches) {
JSONArray jsonMatches = new JSONArray(matches);
this.callbackContext.success(jsonMatches);
}
}
Because your need is a little different than the functionality provided by the plugin, it seems that you would need to write your own plugin. You can of course use the plugin sources (especially plugin.xml, SpeechRecognizer.js and SpeechRecognizer.java) as an example how to implement it in general.

Android app Address family not supported by protocol error?

my code:
private static final String TAG = "TextToSpeechDemo";
private TextToSpeech mTts;
private Button mAgainButton;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.speak);
TextToSpeech mTts = new TextToSpeech(this,this);
// Initialize text-to-speech. This is an asynchronous operation.
// The OnInitListener (second argument) is called after initialization completes.
mTts = new TextToSpeech(this,
this // TextToSpeech.OnInitListener
);
// The button is disabled in the layout.
// It will be enabled upon initialization of the TTS engine.
mAgainButton = (Button) findViewById(R.id.again_button);
mAgainButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
sayHello();
}
});
#Override
public void onInit( int status) {
// TODO Auto-generated method stub
// status can be either TextToSpeech.SUCCESS or TextToSpeech.ERROR.
if (status == TextToSpeech.SUCCESS) {
// Set preferred language to US english.
// Note that a language may not be available, and the result will indicate this.
int result = mTts.setLanguage(Locale.US);
// Try this someday for some interesting results.
// int result mTts.setLanguage(Locale.FRANCE);
if (result == TextToSpeech.LANG_MISSING_DATA ||
result == TextToSpeech.LANG_NOT_SUPPORTED) {
// Language data is missing or the language is not supported.
Log.e(TAG, "Language is not available.");
} else {
// Check the documentation for other possible result codes.
// For example, the language may be available for the locale,
// but not for the specified country and variant.
// The TTS engine has been successfully initialized.
// Allow the user to press the button for the app to speak again.
mAgainButton.setEnabled(true);
// Greet the user.
sayHello();
}
}
else {
// Initialization failed.
Log.e(TAG, "Could not initialize TextToSpeech.");
}
}
private void sayHello() {
Bundle extras = getIntent().getExtras();
String filename = extras.getString("filename");
String filecontent = extras.getString("filecontent");
mTts.speak(filecontent,
TextToSpeech.QUEUE_FLUSH, // Drop all pending entries in the playback queue.
null);
}
it is showing 03-03 12:01:21.778: D/SntpClient(61): request time failed: java.net.SocketException: Address family not supported by protocol error!!!
I would take a guess at this. I think, String filename = extras.getString("filename"); should be changed to String filename = extras.getString("file://" + "filename");
Please post relevant logcat output to help everyone understand the problem.

Failure in resolving TextToSpeech class for Android Development

I'm new to Android development, Eclipse and Java (done mostly .Net and IVR programming up till now), so when I tried to compile and run a sample app I found for TTS on the Droid, I wasn't surprised that I got a runtime error right away. The error is:
dalvikvm Failed resolving com/sample/TTSapp/AndroidTTSapp; interface 4 'android/speech/tts/TextToSpeech$OnInitListner;'
I suppose the OnInitListner method must be in one of the classes that was installed when I installed the Android SDK (release 1.6 R1 I believe), but I'm not sure how to import the associated class module into the current program. I can't find a speech/tts/TextToSpeech directory anywhere on my system. Do I need to download this directory from somewhere? Following is the Java source code for the demo TTS program I'm trying to run:
package com.sample.TTSApp;
import android.app.Activity;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.sample.TTSApp.R;
import java.util.Locale;
import java.util.Random;
public class AndroidTTSapp extends Activity implements
TextToSpeech.OnInitListener
{
private static final String TAG = "TextToSpeechDemo";
private TextToSpeech mTts;
private Button mAgainButton;
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Initialize text-to-speech. This is an asynchronous operation.
// The OnInitListener (second argument) is called after initialization completes.
// Instantiate TextToSpeech.OnInitListener
mTts = new TextToSpeech(this, this);
mAgainButton = (Button) findViewById(R.id.again_button);
mAgainButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
sayHello();
}
});
}
#Override
public void onDestroy()
{ // Don't forget to shutdown!
if (mTts != null)
{
mTts.stop();
mTts.shutdown();
}
super.onDestroy();
}
// Implements TextToSpeech.OnInitListener.
public void onInit(int status)
{
// status can be either TextToSpeech.SUCCESS or TextToSpeech.ERROR.
if (status == TextToSpeech.SUCCESS)
{
int result = mTts.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED)
{
// Language data is missing or the language is not supported.
Log.e(TAG, "Language is not available.");
}
else
{
// Check the documentation for other possible result codes. For example, the language may be available for the locale
// but not for the specified country and variant.
// The TTS engine has been successfully initialized. Allow the user to press the button for the app to speak again.
mAgainButton.setEnabled(true);
// Greet the user
sayHello();
}
}
else
{
// Initialization failed.
Log.e(TAG, "Could not initialize TextToSpeech.");
}
};
private static final Random RANDOM = new Random();
private static final String[] HELLOS =
{
"Hello World", "This is Text to speech demo by Zahid Shaikh"
};
private void sayHello()
{
// Select a random hello.
int i =0;
int helloLength = HELLOS.length;
String hello = HELLOS[i];
i++;
if(i == helloLength) i =0;
mTts.speak(hello,TextToSpeech.QUEUE_FLUSH,null);
}
}
Thanks in advance for any assitance anyone can give a beginner like myself.
Don Tilley
On device or emulator in */system/tts/lang_pico/* must be tts-lang-files (*.bin).
It's init TTS example:
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
........
........
initTTS();
}
private void initTTS() {
Intent checkIntent = new Intent();
checkIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(checkIntent, MY_DATA_CHECK_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == MY_DATA_CHECK_CODE) {
if(resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) {
mTts = new TextToSpeech(this, this);
} else {
Intent installIntent = new Intent();
installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installIntent);
}
}
}
public void onInit(int status) {
if(status == TextToSpeech.SUCCESS) {
int result = mTts.setLanguage(Locale.US);
if(result == TextToSpeech.LANG_AVAILABLE
|| result == TextToSpeech.LANG_COUNTRY_AVAILABLE) {
mTts.speak("Start system", TextToSpeech.QUEUE_FLUSH, null);
}
}
}

Categories

Resources