Unable to read output from barcode scanner - android

I want to upload the result from the barcode scanner to the edit text, but it is not displaying in the edit text. May I know what the problem is and how do I solve it?
This is my code
import android.os.Bundle;
import android.view.View;
import android.widget.EditText;
import android.widget.Toast;
import com.google.zxing.Result;
import me.dm7.barcodescanner.zxing.ZXingScannerView;
public class ScannerActivity extends AppCompatActivity {
private ZXingScannerView scannerView;
private EditText ScanBarcode;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scanner);
ScanBarcode = (EditText)findViewById(R.id.editscanbar);
}
public void scanCode(View view){
scannerView = new ZXingScannerView(this);
scannerView.setResultHandler(new ZXingScannerResultHandler());
setContentView(scannerView);
scannerView.startCamera();
}
#Override
public void onPause(){
super.onPause();
scannerView.stopCamera();
}
class ZXingScannerResultHandler implements ZXingScannerView.ResultHandler{
#Override
public void handleResult(Result result){
String resultCode = result.getText().toString();
ScanBarcode.setText(resultCode);
Toast.makeText(ScannerActivity.this, resultCode,
Toast.LENGTH_SHORT).show();
setContentView(R.layout.activity_scanner);
scannerView.stopCamera();
}
}
}
I've changed it and there is still nothing showing up in the edit text
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.EditText;
import com.google.zxing.Result;
import me.dm7.barcodescanner.zxing.ZXingScannerView;
public class ScannerActivity extends AppCompatActivity implements
ZXingScannerView.ResultHandler {
private ZXingScannerView mScannerView;
private EditText ScanBarcode;
#Override
public void onCreate(Bundle State) {
super.onCreate(State);
mScannerView = new ZXingScannerView(this);
setContentView(R.layout.activity_scanner);
ScanBarcode = (EditText)findViewById(R.id.editscanbar);
}
public void scanCode(View view){
mScannerView.setResultHandler(this);
setContentView(mScannerView);
mScannerView.startCamera();
}
#Override
public void onResume(){
super.onResume();
mScannerView.setResultHandler(this);
mScannerView.startCamera();
}
#Override
public void onPause(){
super.onPause();
mScannerView.stopCamera();
}
#Override
public void handleResult(Result rawResult){
ScanBarcode.setText(rawResult.getText());
setContentView(R.layout.activity_scanner);
mScannerView.stopCamera();
}
}

you should implement the ResultHandler in your Activity, override the callback method and there set the result of the scan in your EditText, example:
public class SimpleScannerActivity extends Activity implements ZXingScannerView.ResultHandler {
private ZXingScannerView mScannerView;
private EditText editText;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
mScannerView = new ZXingScannerView(this); // Programmatically initialize the scanner view
setContentView(mScannerView); // Set the scanner view as the content view
EditText editText = (EditText)findViewById(R.id.myedittext);
}
#Override
public void onResume() {
super.onResume();
mScannerView.setResultHandler(this); // Register ourselves as a handler for scan results.
mScannerView.startCamera(); // Start camera on resume
}
#Override
public void onPause() {
super.onPause();
mScannerView.stopCamera(); // Stop camera on pause
}
#Override
public void handleResult(Result rawResult) {
// Do something with the result here
editText.setText(rawResult.getText());
Log.v(TAG, rawResult.getText()); // Prints scan results
Log.v(TAG, rawResult.getBarcodeFormat().toString()); // Prints the scan format (qrcode, pdf417 etc.)
// If you would like to resume scanning, call this method below:
mScannerView.resumeCameraPreview(this);
}
}
Hope it helps!

Related

Method loadVideoFromAsset must be called from UI thread

I am trying to implement 360 Video Viewer in my project but I am getting an error for the line:
mVrVideoView.loadVideoFromAsset("sea.mp4", options);
This is the error
Method loadVideoFromAsset must be called from the UI thread, currently inferred
thread is worker
Following is my code:
package com.example.jal.jp;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import com.google.vr.sdk.widgets.video.VrVideoEventListener;
import com.google.vr.sdk.widgets.video.VrVideoView;
import java.io.IOException;
public abstract class VR_Video extends AppCompatActivity implements SeekBar.OnSeekBarChangeListener {
private VrVideoView mVrVideoView;
private SeekBar mSeekBar;
private Button mVolumeButton;
private boolean mIsPaused;
private boolean mIsMuted;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_vr__video);
initViews();
}
public void onPlayPausePressed() {
}
public void onVolumeToggleClicked() {
}
#Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
}
private void initViews() {
mVrVideoView = (VrVideoView) findViewById(R.id.video_view);
mSeekBar = (SeekBar) findViewById(R.id.seek_bar);
mVolumeButton = (Button) findViewById(R.id.btn_volume);
mVrVideoView.setEventListener(new ActivityEventListener());
mSeekBar.setOnSeekBarChangeListener(this);
mVolumeButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
onVolumeToggleClicked();
}
});
}
class VideoLoaderTask extends AsyncTask<Void, Void, Boolean> {
#Override
protected Boolean doInBackground(Void... voids) {
try {
VrVideoView.Options options = new VrVideoView.Options();
options.inputType = VrVideoView.Options.TYPE_MONO;
mVrVideoView.loadVideoFromAsset("sea.mp4", options);
} catch( IOException e ) {
//Handle exception
}
return true;
}
}
public void playPause() {
}
#Override
protected void onPause() {
super.onPause();
mVrVideoView.pauseRendering();
mIsPaused = true;
}
#Override
protected void onResume() {
super.onResume();
mVrVideoView.resumeRendering();
mIsPaused = false;
}
#Override
protected void onDestroy() {
mVrVideoView.shutdown();
super.onDestroy();
}
private class ActivityEventListener extends VrVideoEventListener {
#Override
public void onLoadSuccess() {
super.onLoadSuccess();
}
#Override
public void onLoadError(String errorMessage) {
super.onLoadError(errorMessage);
}
#Override
public void onClick() {
super.onClick();
}
#Override
public void onNewFrame() {
super.onNewFrame();
}
#Override
public void onCompletion() {
super.onCompletion();
}
}
}
Please help. I tried my best but couldn't fix.
Remove AsyncTask Implementation And Call Required Methods From UI Thread
Use Below Code :
private void initViews() {
mVrVideoView = (VrVideoView) findViewById(R.id.video_view);
mSeekBar = (SeekBar) findViewById(R.id.seek_bar);
mVolumeButton = (Button) findViewById(R.id.btn_volume);
mVrVideoView.setEventListener(new ActivityEventListener());
try {
VrVideoView.Options options = new VrVideoView.Options();
options.inputType = VrVideoView.Options.TYPE_MONO;
mVrVideoView.loadVideoFromAsset("sea.mp4", options);
} catch( IOException e ) {
//Handle exception
}
mSeekBar.setOnSeekBarChangeListener(this);
mVolumeButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
onVolumeToggleClicked();
}
});
}
I dont know much about videoview but I show in your code that you are trying to access some the features in doInbackground method. The doINbackground method runs on a separate thread from the UI thread so that complex ant time consuming tasks do not block the UI thread. You can implement the features you are implementing in doINbackground method in the onCreate method or if you still need to use doInbackground you can access the UI thread inside doInbackground using runOnUiThread as follows
runOnUiThread(new Runnable() {
#Override
public void run() {
//your code here
}
});

How to set the library in android studio

I am trying to set a waveInApp library http://www.materialup.com/posts/waveinapp
I have declared all the required thing .The app
working but the background wave is not working .I am not able to set the wave function as a beginner in android development .
I am able to set the all the basic things including the media player.
Here I am not able to set the speech recognization handler
Here is what I have done
import android.content.Context;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.ImageButton;
import android.widget.Toast;
import com.cleveroad.audiovisualization.AudioVisualization;
import com.cleveroad.audiovisualization.DbmHandler;
import com.cleveroad.audiovisualization.SpeechRecognizerDbmHandler;
import com.cleveroad.audiovisualization.VisualizerDbmHandler;
public class MainActivity extends AppCompatActivity {
private AudioVisualization audioVisualization;
private Context context;
private ImageButton button1,button2;
private MediaPlayer mediaPlayer;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
audioVisualization = (AudioVisualization)findViewById(R.id.visualizer_view);
button1=(ImageButton)findViewById(R.id.imageButton);
button2=(ImageButton)findViewById(R.id.imageButton2);
mediaPlayer =MediaPlayer.create(this,R.raw.song);
button2.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(),"playing",Toast.LENGTH_SHORT).show();
mediaPlayer.start();
}
});
button1.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Toast.makeText(getApplicationContext(),"pause",Toast.LENGTH_SHORT).show();
mediaPlayer.pause();
VisualizerDbmHandler vizualizerHandler = DbmHandler.Factory.newVisualizerHandler(getContext(), 0);
audioVisualization.linkTo(vizualizerHandler);
// set speech recognizer handler
SpeechRecognizerDbmHandler speechRecHandler = DbmHandler.Factory.newSpeechRecognizerHandler(context);
speechRecHandler.innerRecognitionListener(...);
audioVisualization.linkTo(speechRecHandler);
}
});
}
#Override
public void onResume() {
super.onResume();
audioVisualization.onResume();
}
#Override
public void onPause() {
audioVisualization.onPause();
super.onPause();
}
}
So how to set the above handeler. I am not able to set the connect audio visualization view to audio output . How to set the following method with the media player
// set speech recognizer handler
SpeechRecognizerDbmHandler speechRecHandler = DbmHandler.Factory.newSpeechRecognizerHandler(context);
speechRecHandler.innerRecognitionListener(...);
audioVisualization.linkTo(speechRecHandler);
// set audio visualization handler. This will REPLACE previously set speech recognizer handler
VisualizerDbmHandler vizualizerHandler = DbmHandler.Factory.newVisualizerHandler(getContext(), 0);
audioVisualization.linkTo(vizualizerHandler);
Except this all are working.any hint or advice will be helpfull.
I tried to search
import android.content.Intent;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import com.cleveroad.audiovisualization.AudioVisualization;
import com.cleveroad.audiovisualization.DbmHandler;
import com.cleveroad.audiovisualization.SpeechRecognizerDbmHandler;
public class SpeechRecognitionFragment extends Fragment {
public static SpeechRecognitionFragment newInstance() {
return new SpeechRecognitionFragment();
}
private AudioVisualization audioVisualization;
private Button btnRecognize;
private SpeechRecognizerDbmHandler handler;
private boolean recognizing;
#Nullable
#Override
public View onCreateView(LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_gles, container, false);
audioVisualization = (AudioVisualization) view.findViewById(R.id.visualizer_view);
btnRecognize = (Button) view.findViewById(R.id.btn_recognize);
return view;
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
btnRecognize.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (recognizing) {
handler.stopListening();
} else {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getContext().getPackageName());
handler.startListening(intent);
}
btnRecognize.setEnabled(false);
}
});
handler = DbmHandler.Factory.newSpeechRecognizerHandler(getContext());
handler.innerRecognitionListener(new SimpleRecognitionListener() {
#Override
public void onReadyForSpeech(Bundle params) {
super.onReadyForSpeech(params);
onStartRecognizing();
}
#Override
public void onResults(Bundle results) {
super.onResults(results);
onStopRecognizing();
}
#Override
public void onError(int error) {
super.onError(error);
onStopRecognizing();
}
});
audioVisualization.linkTo(handler);
}
private void onStopRecognizing() {
recognizing = false;
btnRecognize.setText(R.string.start_recognition);
btnRecognize.setEnabled(true);
}
private void onStartRecognizing() {
btnRecognize.setText(R.string.stop_recognition);
btnRecognize.setEnabled(true);
recognizing = true;
}
#Override
public void onDestroyView() {
audioVisualization.release();
super.onDestroyView();
}
private static class SimpleRecognitionListener implements RecognitionListener {
#Override
public void onReadyForSpeech(Bundle params) {
}
#Override
public void onBeginningOfSpeech() {
}
#Override
public void onRmsChanged(float rmsdB) {
}
#Override
public void onBufferReceived(byte[] buffer) {
}
#Override
public void onEndOfSpeech() {
}
#Override
public void onError(int error) {
}
#Override
public void onResults(Bundle results) {
}
#Override
public void onPartialResults(Bundle partialResults) {
}
#Override
public void onEvent(int eventType, Bundle params) {
}
}
}
from https://github.com/Cleveroad/WaveInApp/blob/master/app/src/main/java/com/cleveroad/example/SpeechRecognitionFragment.java
So can some one tell me why fragment is used and how I can set this in my main activity?
I know i am late to post this answer...but for future use try this
check my code you will find your answer
first of all in the liberay you have to make some changes in your xml.
<com.cleveroad.audiovisualization.GLAudioVisualizationView
android:id="#+id/visualizer_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:av_bubblesSize= "#dimen/bubble_size"
app:av_bubblesRandomizeSizes= "true"
app:av_wavesHeight= "#dimen/wave_height"
app:av_wavesFooterHeight="#dimen/footer_height"
app:av_wavesCount="7"
app:av_layersCount="4" />
then
private VisualizerDbmHandler handler;
MediaPlayer mp ;
private AudioVisualization audioVisualization;
public View onCreateView(LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
View ve = inflater.inflate(R.layout.cat, container, false);
ibCapture = (ImageButton) ve.findViewById(R.id.ibCapture);
audioVisualization = (AudioVisualization)ve.findViewById(R.id.visualizer_view);
hell();
return ve;
}
ibCapture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (bool) {
bool = false;
mp = MediaPlayer.create(getContext(), R.raw.blackcat);
mp.setLooping(true);
handler = VisualizerDbmHandler.Factory.newVisualizerHandler(getContext(),mp);
audioVisualization.linkTo(handler);
mp.start();
((home)getActivity()).vis();
} else {
bool = true;
((home)getActivity()).visgone();
stopPlaying();
}
}
});
Replace
audioVisualization = (AudioVisualization) glAudioVisualizationView;
With
audioVisualization = (AudioVisualization)findViewById(R.id.glAudioVisualizationView);
glAudioVisualizationView should be id of AudioVisualization in your layout file
define
private AudioVisualization audioVisualization;
initialize it
audioVisualization = (AudioVisualization) view.findViewById(R.id.visualizer_view);
for speech use below code
SpeechRecognizerDbmHandler speechRecHandler = DbmHandler.Factory.newSpeechRecognizerHandler(getContext());
speechRecHandler.innerRecognitionListener();
audioVisualization.linkTo(speechRecHandler);
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getContext().getPackageName());
speechRecHandler.startListening(intent);
for a media on your device use below code, Sandeep Singh Answer
MediaPlayer mp = MediaPlayer.create(getContext(), R.raw.blackcat);
mp.setLooping(true);
handler = VisualizerDbmHandler.Factory.newVisualizerHandler(getContext(),mp);
audioVisualization.linkTo(handler);
mp.start();

SharedPreferences in Non-Activity

Hi I need to get a String from an Activity to another Class. I need the String VideoID in the Test2 class. I wanted to use sharedpreferences, but this does not work. How can i use sharedpreferences here? Or any other idea how i can get the String? Thanks
In my Activity I have this Button:
// Video Button
final Button videobutton = (Button) findViewById(R.id.videobutton);
videobutton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// Video ID save
SharedPreferences.Editor editor = getSharedPreferences("VideoID_saver", MODE_PRIVATE).edit();
editor.putString("VideoID", current_video);
editor.commit();
Intent OpenTest = new Intent(Questions.this, Test2.class);
startActivity(OpenTest);
}
});
The Test2.class looks like this:
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.youtube.player.YouTubeBaseActivity;
import com.google.android.youtube.player.YouTubeInitializationResult;
import com.google.android.youtube.player.YouTubePlayer;
import com.google.android.youtube.player.YouTubePlayer.ErrorReason;
import com.google.android.youtube.player.YouTubePlayer.PlaybackEventListener;
import com.google.android.youtube.player.YouTubePlayer.PlayerStateChangeListener;
import com.google.android.youtube.player.YouTubePlayer.Provider;
import com.google.android.youtube.player.YouTubePlayerView;
public class Test2 extends YouTubeBaseActivity implements YouTubePlayer.OnInitializedListener {
public static final String API_KEY = "AIzaSyCb2pCHbqhBDKaYzFvU7g1SBG14YrM3XWE";
//Test
SharedPreferences prefs = getSharedPreferences("VideoID_saver", MODE_PRIVATE);
String testtext = prefs.getString("VideoID", "");
//http://youtu.be/<VIDEO_ID>
public final String VIDEO_ID = testtext;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test2);
/** Initializing YouTube player view **/
YouTubePlayerView youTubePlayerView = (YouTubePlayerView) findViewById(R.id.youtube_player);
youTubePlayerView.initialize(API_KEY, this);
}
#Override
public void onInitializationFailure(Provider provider, YouTubeInitializationResult result) {
Toast.makeText(this, "Failured to Initialize!", Toast.LENGTH_LONG).show();
}
#Override
public void onInitializationSuccess(Provider provider, YouTubePlayer player, boolean wasRestored) {
/** add listeners to YouTubePlayer instance **/
player.setPlayerStateChangeListener(playerStateChangeListener);
player.setPlaybackEventListener(playbackEventListener);
/** Start buffering **/
if (!wasRestored) {
player.cueVideo(VIDEO_ID);
}
}
private PlaybackEventListener playbackEventListener = new PlaybackEventListener() {
#Override
public void onBuffering(boolean arg0) {}
#Override
public void onPaused() {}
#Override
public void onPlaying() {}
#Override
public void onSeekTo(int arg0) {}
#Override
public void onStopped() {}
};
private PlayerStateChangeListener playerStateChangeListener = new PlayerStateChangeListener() {
#Override
public void onAdStarted() {}
#Override
public void onError(ErrorReason arg0) {}
#Override
public void onLoaded(String arg0) {}
#Override
public void onLoading() {}
#Override
public void onVideoEnded() {}
#Override
public void onVideoStarted() {}
};
You're probably somehow accessing different SharedPreferences instances. It's easier to make sure this doesn't happen using the getDefaultSharedPreferences() method:
In you Activity:
PreferenceManager.getDefaultSharedPreferences(this)
.edit()
.putString("VideoID", current_video)
.apply();
In your Test2 onCreate method:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String testtext = prefs.getString("VideoID", "");
Couple of things. Test2 is inheriting an activity it seems, so not sure why your title says Non-Activity. SharedPreferences reading and writing seems ok but maybe reading the sharedpreferences even before the constructor on the activity has had change to run is too early.
Your goal is to get a string from button to test2. It seems like it would be better to avoid shared preferences entirely and pass the string to the next activity directly via the put/getExtra function.
Like so:
Intent OpenTest = new Intent(Questions.this, Test2.class);
i.putExtra("VideoID", current_video);
startActivity(OpenTest);
Then extract it from your new activity in your OnCreate method:
//http://youtu.be/<VIDEO_ID>
public final String VIDEO_ID;
#
Override
protected void onCreate(Bundle savedInstanceState) {
//Test
Intent intent = getIntent();
VIDEO_ID = intent.getExtras().getString("VideoID");
}

Radius Network Beacon Library 2.0 (AltBeacon) unable to detect Beacons

Hello I'm currently using Radius Network Beacon SDK but unable to detect my beacons.
In didRangeBeaconsInRegion(Collection beacons, Region region) collection object size is 0
Please help me!!
RangingActivity Code-
import java.util.Collection;
import android.app.Activity;
import android.os.Bundle;
import android.os.RemoteException;
import android.util.Log;
import android.widget.EditText;
import org.altbeacon.beacon.AltBeacon;
import org.altbeacon.beacon.Beacon;
import org.altbeacon.beacon.BeaconConsumer;
import org.altbeacon.beacon.BeaconManager;
import org.altbeacon.beacon.BeaconParser;
import org.altbeacon.beacon.RangeNotifier;
import org.altbeacon.beacon.Region;
public class RangingActivity extends Activity implements BeaconConsumer {
protected static final String TAG = "RangingActivity";
private BeaconManager beaconManager = BeaconManager.getInstanceForApplication(this);
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ranging);
beaconManager.bind(this);
beaconManager.debug = true;
}
#Override
protected void onDestroy() {
super.onDestroy();
beaconManager.unbind(this);
}
#Override
protected void onPause() {
super.onPause();
}
#Override
protected void onResume() {
super.onResume();
}
#Override
public void onBeaconServiceConnect() {
beaconManager.setRangeNotifier(new RangeNotifier() {
#Override
public void didRangeBeaconsInRegion(Collection<Beacon> beacons, Region region) {
if (beacons.size() > 0) {
EditText editText = (EditText)RangingActivity.this
.findViewById(R.id.rangingText);
Beacon firstBeacon = beacons.iterator().next();
logToDisplay("The first beacon "+firstBeacon.toString()+" is about "+firstBeacon.getDistance()+" meters away.");
}
}
});
try {
beaconManager.startRangingBeaconsInRegion(new Region("myRangingUniqueId", null, null, null));
//beaconManager.updateScanPeriods();
} catch (RemoteException e) { }
}
private void logToDisplay(final String line) {
runOnUiThread(new Runnable() {
public void run() {
EditText editText = (EditText)RangingActivity.this
.findViewById(R.id.rangingText);
editText.append(line+"\n");
}
});
}
}
You need a custom Parser to properly recognize that beacons.
Take a look at this answer:
Is this the correct layout to detect iBeacons with AltBeacon's Android Beacon Library?
You need to call the onBeaconConnect() in your onCreate function
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ranging);
beaconManager.bind(this);
beaconManager.debug = true;
onBeaconConnect()
}

Android TTS onUtteranceCompleted callback isn't getting called

I am trying to get the Android TTS API to read my "utterance" and then call the onUtteranceCompleted() listener unsuccessfully. I've registered my TTS object and it returns SUCCESS, so I can't figure out for the life of me why my callback isn't getting called.
I've tried searching for help, but it seems others have difficulty with this too. Am I missing something simple?
Thanks for any help you can offer.
package com.test.mytts;
import java.util.HashMap;
import android.app.Activity;
import android.media.AudioManager;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.speech.tts.TextToSpeech.OnUtteranceCompletedListener;
import android.widget.TextView;
import android.widget.Toast;
public class MyTTS extends Activity implements OnInitListener, OnUtteranceCompletedListener
{
TextView tv;
private TextToSpeech _tts;
#Override
public void onCreate(Bundle savedInstanceState)
{
tv = new TextView(this);
tv.setText("MyTTS: ");
super.onCreate(savedInstanceState);
setContentView(tv);
_tts = new TextToSpeech(this, this);
}
#Override
public void onInit(int status)
{
HashMap<String, String> myHashAlarm = new HashMap<String, String>();
myHashAlarm.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(AudioManager.STREAM_NOTIFICATION));
myHashAlarm.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "test");
if (status == TextToSpeech.SUCCESS)
{
Toast.makeText(this, "Trying to speak...", Toast.LENGTH_SHORT).show();
int result = _tts.setOnUtteranceCompletedListener(this);
tv.append(String.valueOf(result));
_tts.setSpeechRate((float) .5);
_tts.speak("Testing one, two, three", TextToSpeech.QUEUE_ADD, myHashAlarm);
}
else
Toast.makeText(this, "Failed to initialize TTS.", Toast.LENGTH_SHORT).show();
}
#Override
public void onUtteranceCompleted(String utteranceId)
{
Toast.makeText(this, "onUtteranceCompleted", Toast.LENGTH_SHORT).show();
}
#Override
public void onDestroy()
{
super.onDestroy();
_tts.shutdown();
}
}
Call the setOnUtteranceCompletedListener inside the onInit function of the tts object.
If you want to make any changes to the UI on the call of the onUtteranceCompleted function, add the code inside a runOnUIThread method.
And do remember to add the Hashmap param value while calling the speak() function
Example :
TextToSpeech tts= new TextToSpeech(context, new OnInitListener() {
#Override
public void onInit(int status) {
mTts.setOnUtteranceCompletedListener(new OnUtteranceCompletedListener() {
#Override
public void onUtteranceCompleted(String utteranceId) {
runOnUiThread(new Runnable() {
#Override
public void run() {
//UI changes
}
});
}
});
}
});
HashMap<String, String> params = new HashMap<String, String>();
params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID,"stringId");
tts.speak("Text to Speak",TextToSpeech.QUEUE_FLUSH, params);
I believe that unless you specify an utterance with an id, like:
map.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceid);
your utterance completed method will not be called.
in this case, map is the Hashmap you pass to the engine when you speak.
this will work for you on API Level >=15
import java.util.HashMap;
import java.util.Locale;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.speech.tts.UtteranceProgressListener;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
public class MainActivity extends Activity implements OnInitListener{
private static final int CHECK_TTS_DATA = 0X123;
protected static final String TAG = MainActivity.class.getSimpleName();
private TextToSpeech textToSpeech;
private Button buttonSayIt;
private EditText editTextTts;
String tts;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
buttonSayIt=(Button) findViewById(R.id.buttonSayIt);
editTextTts=(EditText) findViewById(R.id.editTextTts);
buttonSayIt.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
tts=editTextTts.getText().toString();
Log.d(TAG, tts);
speach(tts,"you_utterance_id");
}
});
//check for TTs data
Intent checkTtsDataIntent=new Intent();
checkTtsDataIntent.setAction(TextToSpeech.Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(checkTtsDataIntent, CHECK_TTS_DATA);
}
protected void speach(String tts,String utteranceId) {
HashMap<String, String> params = new HashMap<String, String>();
params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID,utteranceId);
textToSpeech.speak(tts,TextToSpeech.QUEUE_FLUSH,params);
}
#Override
public void onInit(int status) {
if(status==TextToSpeech.SUCCESS){
if(textToSpeech.isLanguageAvailable(Locale.US)==TextToSpeech.LANG_AVAILABLE){
textToSpeech.setLanguage(Locale.US);
}
}else if(status==TextToSpeech.ERROR){
Toast.makeText(this, "Sorry Text To Speach faild", Toast.LENGTH_SHORT).show();
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode==CHECK_TTS_DATA){
if(resultCode==TextToSpeech.Engine.CHECK_VOICE_DATA_PASS){
textToSpeech=new TextToSpeech(this, this);
textToSpeech.setOnUtteranceProgressListener(utteranceProgressListener);
}else{
Intent installTtsIntent=new Intent();
installTtsIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installTtsIntent);
}
}
}
UtteranceProgressListener utteranceProgressListener=new UtteranceProgressListener() {
#Override
public void onStart(String utteranceId) {
Log.d(TAG, "onStart ( utteranceId :"+utteranceId+" ) ");
}
#Override
public void onError(String utteranceId) {
Log.d(TAG, "onError ( utteranceId :"+utteranceId+" ) ");
}
#Override
public void onDone(String utteranceId) {
Log.d(TAG, "onDone ( utteranceId :"+utteranceId+" ) ");
}
};
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
in case anybody is still finding it difficult
Code Snippet
textToSpeech=new TextToSpeech(this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if (status==TextToSpeech.SUCCESS){
int result=textToSpeech.setLanguage(Locale.ENGLISH);
if (result==TextToSpeech.LANG_MISSING_DATA||result==TextToSpeech.LANG_NOT_SUPPORTED){
Log.i("TextToSpeech","Language Not Supported");
}
textToSpeech.setOnUtteranceProgressListener(new UtteranceProgressListener() {
#Override
public void onStart(String utteranceId) {
Log.i("TextToSpeech","On Start");
}
#Override
public void onDone(String utteranceId) {
Log.i("TextToSpeech","On Done");
}
#Override
public void onError(String utteranceId) {
Log.i("TextToSpeech","On Error");
}
});
}else {
Log.i("TextToSpeech","Initialization Failed");
}
}
});
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
textToSpeech.speak(text,TextToSpeech.QUEUE_FLUSH,null,TextToSpeech.ACTION_TTS_QUEUE_PROCESSING_COMPLETED);
}

Categories

Resources