Speech recognizer by WCF service with Xamarin Android app client - android

I would like to write WCF Serwis where I use Microsotf.Speech.Recognition library to make speech to text service. Here is my Service code:
public class Rozpoznawacz : IRozpoznawacz
{
public void AudioToText(Stream audioStr)
{
SpeechRecognitionEngine _sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pl-PL"));
// Create a simple grammar that recognizes the words
Choices words = new Choices();
// Add the words to be recognised
words.Add("red");
words.Add("green");
words.Add("blue");
words.Add("yellow");
words.Add("orange");
words.Add("Dzień dobry");
words.Add("Chrząszcz");
words.Add("Brzmi");
words.Add("w");
words.Add("trzcinie");
words.Add("Wystaw fakturę");
words.Add("Stefan Burczymucha");
GrammarBuilder gb = new GrammarBuilder();
gb.Culture = new System.Globalization.CultureInfo("pl-PL");
gb.Append(words);
// Create the actual Grammar instance, and then load it into the speech recognizer.
Grammar g = new Grammar(gb);
_sre.LoadGrammar(g);
// Register a handler for the SpeechRecognized event.
_sre.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);
//_sre.SetInputToDefaultAudioDevice();
_sre.SetInputToWaveStream(audioStr);
_sre.RecognizeAsync(RecognizeMode.Multiple);
}
void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
string rozpoznanie = "";
rozpoznanie += e.Result.Text;
using (StreamWriter outfile = new StreamWriter(#"C:\Test.txt"))
{
outfile.Write(rozpoznanie);
}
}
public string Test(string query)
{
return string.Format("Przyjęto: {0}", query);
}
}
Next, I tried to write Android App with service references, I tried to record voice and send to webService host, but I don't know how I should send audio file. Here is my not working code in android app:
using System;
using Android.App;
using Android.Content;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Android.OS;
using Android.Media;
using System.IO;
namespace RozpoznawanieMowyZdalne.Adroid
{
[Activity(Label = "RozpoznawanieMowyZdalne.Adroid", MainLauncher = true, Icon = "#drawable/icon")]
public class MainActivity : Activity
{
int count = 1;
MediaRecorder recorder;
MediaPlayer player;
Button btnStart;
Button btnStop;
string path = "/sdcard/test.3gpp";
private RozpoznawaczService.Rozpoznawacz client;
private TextView aLabel;
byte[] audioByte;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
// Set our view from the "main" layout resource
SetContentView(Resource.Layout.Main);
// Get our button from the layout resource,
// and attach an event to it
btnStart = FindViewById<Button>(Resource.Id.btnStart);
btnStop = FindViewById<Button>(Resource.Id.btnStop);
btnStart.Click += delegate
{
client.TestAsync("Android");
btnStop.Enabled = !btnStop.Enabled;
btnStart.Enabled = !btnStart.Enabled;
recorder.SetAudioSource(AudioSource.VoiceRecognition);
recorder.SetOutputFormat(OutputFormat.ThreeGpp);
recorder.SetAudioEncoder(AudioEncoder.AmrNb);
recorder.SetOutputFile(path);
recorder.Prepare();
recorder.Start();
Toast.MakeText(this, "Rozpoczęto nagrywanie", ToastLength.Long).Show();
};
btnStop.Click += delegate
{
btnStop.Enabled = !btnStop.Enabled;
recorder.Stop();
Toast.MakeText(this, "Zakończono nagrywanie", ToastLength.Long).Show();
recorder.Reset();
player.SetDataSource(path);
player.Prepare();
player.Start();
File.WriteAllBytes(path, audioByte);
client.AudioToTextAsync(audioByte);
};
InitializeServiceClient();
}
protected override void OnResume()
{
base.OnResume();
recorder = new MediaRecorder();
player = new MediaPlayer();
player.Completion += (sender, e) =>
{
player.Reset();
btnStart.Enabled = !btnStart.Enabled;
};
}
protected override void OnPause()
{
base.OnPause();
player.Release();
recorder.Release();
player.Dispose();
recorder.Dispose();
player = null;
recorder = null;
}
private void InitializeServiceClient()
{
client = new RozpoznawaczService.Rozpoznawacz();
client.TestCompleted += client_TestCompleted;
client.AudioToTextCompleted += client_AudioToTextCompleted;
aLabel = FindViewById<TextView>(Resource.Id.textViewTest);
}
void client_AudioToTextCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
//msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = "Wyslane");
}
void client_TestCompleted(object sender, RozpoznawaczService.TestCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = msg);
}
}
}
How can I send my audio file to my webService?
PS. File.WriteAllBytes(path, audioByte); - it doesn't work in Android app...

Related

Can I auto create subtitle from audio file using MediaPlayer?

Below is my code;
Can I create mp3 to .srt file programmatically?
holder.llView.setOnClickListener(new View.OnClickListener() {
#TargetApi(Build.VERSION_CODES.JELLY_BEAN)
#Override
public void onClick(View v) {
MediaPlayer mediaPlayer = new MediaPlayer();
try {
mediaPlayer.setDataSource(filePathList.get(position));
mediaPlayer.prepare();
//mediaPlayer.addTimedTextSource(subTitleSrc, MediaPlayer.MEDIA_MIMETYPE_TEXT_SUBRIP);
//int textTrackIndex = findTrackIndexFor(
//MediaPlayer.TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT,
//mediaPlayer.getTrackInfo());
//if (textTrackIndex >= 0) {
// mediaPlayer.selectTrack(textTrackIndex);
//} else {
// Log.w("test", "Cannot find text track!");
//}
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
// mediaPlayer.setOnTimedTextListener(new MediaPlayer.OnTimedTextListener() {
//#Override
//public void onTimedText(final MediaPlayer mediaPlayer, final TimedText timedText) {
//if (timedText != null) {
//Log.d("test", "subtitle: " + timedText.getText());
// }
// }
// });
// }
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
} catch (IOException e) {
e.printStackTrace();
}
mediaPlayer.start();
}
});
Below code for ffmpeg command;
String cmd= "ffmpeg -i"+ filePathList.get(position).substring(filePathList.get(position).lastIndexOf("/")+1);
FFmpeg ffmpeg = FFmpeg.getInstance(CountryListActivity.this);
try {
// to execute "ffmpeg -version" command you just need to pass "-version"
ffmpeg.execute(new String[]{cmd}, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
Log.e("!!!!!!!!!",""+filePathList.get(position));
}
#Override
public void onProgress(String message) {
Log.e("message..............",""+message);
}
#Override
public void onFailure(String message) {
Log.e("messageeeeeeeee..............",""+message);
}
#Override
public void onSuccess(String message) {
Log.e("messageeeeeeewwwwwwwwwwwee..............",""+message);
}
#Override
public void onFinish() {
Log.e("messagewwwewreeeeeeee..............","");
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// Handle if FFmpeg is already running
e.printStackTrace();
}
What you are basically looking for it Speech To Text. As you've mentioned in your Question that you already have recorded audio. You can use Google's Speech To Text API to get Text from Audio. Here is a sample code from Google
// Imports the Google Cloud client library
import com.google.cloud.speech.v1.RecognitionAudio;
import com.google.cloud.speech.v1.RecognitionConfig;
import com.google.cloud.speech.v1.RecognitionConfig.AudioEncoding;
import com.google.cloud.speech.v1.RecognizeResponse;
import com.google.cloud.speech.v1.SpeechClient;
import com.google.cloud.speech.v1.SpeechRecognitionAlternative;
import com.google.cloud.speech.v1.SpeechRecognitionResult;
import com.google.protobuf.ByteString;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
public class QuickstartSample {
/**
* Demonstrates using the Speech API to transcribe an audio file.
*/
public static void main(String... args) throws Exception {
// Instantiates a client
try (SpeechClient speechClient = SpeechClient.create()) {
// The path to the audio file to transcribe
String fileName = "./resources/audio.raw";
// Reads the audio file into memory
Path path = Paths.get(fileName);
byte[] data = Files.readAllBytes(path);
ByteString audioBytes = ByteString.copyFrom(data);
// Builds the sync recognize request
RecognitionConfig config = RecognitionConfig.newBuilder()
.setEncoding(AudioEncoding.LINEAR16)
.setSampleRateHertz(16000)
.setLanguageCode("en-US")
.build();
RecognitionAudio audio = RecognitionAudio.newBuilder()
.setContent(audioBytes)
.build();
// Performs speech recognition on the audio file
RecognizeResponse response = speechClient.recognize(config, audio);
List<SpeechRecognitionResult> results = response.getResultsList();
for (SpeechRecognitionResult result : results) {
// There can be several alternative transcripts for a given chunk of speech. Just use the
// first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
System.out.printf("Transcription: %s%n", alternative.getTranscript());
}
}
}
}
In order to add it to your Android Project. For adding GoogleCloud you can visit or you can directly download JAR

MediaPlayer - Sounds Stop Playing

I am creating a class witch loads up a few sounds. However, isPlaying keeps on throwing an exception after a while and then stops playing that particular sound permanently, while other sounds keep playing OK.
public class MySound {
int m_IdMyId;
int m_ResId;
boolean m_IsLoaded;
MediaPlayer m_Media;
public MySound(int idMyId, int resId){
m_IdMyId = idMyId;
m_ResId = resId;
m_IsLoaded = false;
m_Media = null;
}
}
In this m_IdMyId is just an id for my game. m_ResId is something like R.raw.mysound1. m_IsLoaded I think is automatically set to true as I am loading synconously. m_Media is the MediaPlayer object.
I am calling stop() very regularly, as it is a game and I need to check every second or so to make sure certain sounds are stopped. It is here that it throws an exception when snd.m_Media.isPlaying() is called.
I cannot seem to access e to see what the error is.
Also I would like to know how I can set m_IsLoaded correctly. How do I know when the sound is fully loaded and ready to use?
Here is my management class:
public class MySoundManager {
MainActivity m_Context;
ArrayList<MySound> mySounds;
public MySoundManager(MainActivity context) {
m_Context = context;
mySounds = new ArrayList<MySound>();
mySounds.add(new MySound(8, R.raw.mysound1));
mySounds.add(new MySound(10, R.raw.mysound2));
mySounds.add(new MySound(22, R.raw.mysound3));
mySounds.add(new MySound(100, R.raw.click));
mySounds.add(new MySound(101, R.raw.error));
for(MySound mysound : mySounds) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
// I call this when the main thread calls onResume
public void onResume(){
for(MySound mysound : mySounds) {
if(mysound.m_Media == null) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
}
// I call this when the main thread calls onPause
public void onPause(){
for(MySound mysound : mySounds) {
if(mysound.m_Media != null) {
mysound.m_Media.stop();
mysound.m_Media.release();
mysound.m_Media = null;
}
}
}
public boolean IsAllLoaded(){
for(MySound mysound : mySounds) {
if(!mysound.m_IsLoaded) return false;
}
return true;
}
public MySound FindMySoundByIdMyId(int idMyId){
try {
for(MySound mysound : mySounds) {
if (mysound.m_IdMyId == idMyId) return mysound;
}
}catch(Exception e) {
MySound snd;
snd = null; // ToDo
}
return null;
}
public void play(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null)
snd.m_Media.start();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pause(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.pause();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pauseAll(){
try{
for (MySound mysound : mySounds) {
if(mysound.m_Media.isPlaying())
mysound.m_Media.pause();
}
}catch(IllegalStateException e) {
MySound snd;
snd = null; // ToDo
}
}
public boolean isPlaying(int idMyId, MySound[] fill){
MySound snd;
fill[0] = null;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null){
fill[0] = snd;
return snd.m_Media.isPlaying();
}
}catch(IllegalStateException e) {
snd = null; // ToDo
}
return false;
}
public void stop(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.stop();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
// The str is in the format
// number id, 1 = on 0 = off,dont play if this id playing;
public void PlaySound(String str) {
boolean isplaying;
int i, len, id, idDontPlay, milliNow;
String[] strARR = str.split(";");
String[] strARR2;
Integer[] tmpIntARR;
ArrayList<Integer[]> onARR = new ArrayList<Integer[]>();
ArrayList<Integer> offARR = new ArrayList<Integer>();
MySound snd;
for (i = 0, len = strARR.length; i < len; i++) {
if(strARR[i].length() <= 0) continue;
if((strARR2 = strARR[i].split(",")) != null &&
strARR2.length >= 3 &&
strARR2[0].length() > 0 &&
strARR2[1].length() > 0 &&
strARR2[2].length() > 0){
id = Integer.parseInt(strARR2[0]);
idDontPlay = Integer.parseInt(strARR2[2]);
tmpIntARR = new Integer[2];
tmpIntARR[0] = id;
tmpIntARR[1] = idDontPlay;
if(Integer.parseInt(strARR2[1]) == 1){
onARR.add(tmpIntARR);
} else offARR.add(id);
}
}
// Turn off all sounds that need to be turned off
for (i=0,len=offARR.size();i<len;i++) {
id = offARR.get(i);
stop(id);
}
// Turn all sounds that need to be turned on,
// but only if the sound that blocks a new sound is not playing
for (i=0,len=onARR.size();i<len;i++) {
tmpIntARR = onARR.get(i);
id = tmpIntARR[0];
idDontPlay = tmpIntARR[1];
// We dont play if the idDontPlay sound is already playing
if((snd = FindMySoundByIdMyId(idDontPlay)) != null &&
snd.m_Media.isPlaying())
continue;
if((snd = FindMySoundByIdMyId(id)) != null){
isplaying = snd.m_Media.isPlaying();
milliNow = snd.m_Media.getCurrentPosition();
if(milliNow > (snd.m_Media.getDuration() - 1000) ||
(!isplaying && milliNow > 0)){
snd.m_Media.seekTo(0); // Half a second inside
}
if(!isplaying) snd.m_Media.start();
}
}
}
}
Creating a MediaPlayer instance for every sound is not a good practice to get low latency, especially for short clips. MediaPlayer is for longer clips such as Music files it uses large buffer so, larger buffer means high latency. Also, there is AudioFocus mechanism on Android that may interfere your sound playing session. So, I strongly recommend you to use SoundPool to play short clips like game sounds.

Streaming video to Android Device in Google VR Cardboard in Unity

I have been trying to make a VR Video Player app for Google Cardboard on Android, where video can either be streamed or downloaded. The video works fine in the editor but the does not work in my phone. I am using Unity 5.6.1f1 on Windows 10, the phone is a Moto G4 plus running on Noughat.
Here are the scripts used to stream or download video
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class GazeButton : MonoBehaviour {
public static bool toStream;
//private bool buttonPress;
public Image streamImg;
public Image downImg;
public Slider progress;
public Text txt;
//public WWW url;
//public GvrVideoPlayerTexture
private PlayVideo thePlayVideo;
public Camera theCamera;
// Use this for initialization
void Start () {
if (theCamera == null)
theCamera = GameObject.Find ("Main Camera").GetComponent<Camera> ();
if(streamImg == null)
streamImg = GameObject.Find ("StreamImage").GetComponent<Image>();
if(downImg == null)
downImg = GameObject.Find ("DownImage").GetComponent<Image>();
streamImg.color = Color.green;
downImg.color = Color.red;
if (progress == null)
progress = GameObject.Find ("ProgressSlider").GetComponent<Slider> ();
progress.value = 0;
progress.gameObject.SetActive (false);
if (txt == null)
txt = GameObject.Find ("GuideText").GetComponent<Text>();
thePlayVideo = FindObjectOfType<PlayVideo> ();
}
// Update is called once per frame
void Update () {
if (progress.IsActive()) {
progress.value += 1;
if (progress.value >= progress.maxValue /*&& buttonPress*/) {
if (toStream) {
streamImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Stream ();
} else {
downImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Download ();
}
}
}
}
public void StreamButtonDown(){
streamImg.color = Color.blue;
toStream = true;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void DownButtonDown(){
downImg.color = Color.blue;
toStream = false;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void StreamButtonUp(){
streamImg.color = Color.green;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public void DownButtonUp(){
downImg.color = Color.red;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public bool GetCondition(){
return toStream;
}
}
And this is used to actually stream the video :
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;
public class PlayVideo : MonoBehaviour {
//public GameObject theSphere;
private VideoPlayer theVideoPlayer;
//private VideoSource theVideoSource;
private AudioSource theAudioSource;
public GazeButton theGazeButton;
// Use this for initialization
void Start () {
/*if (theSphere == null)
theSphere = GameObject.Find ("Sphere");*/
theGazeButton = GetComponent<GazeButton> ();
}
// Update is called once per frame
void Update () {
if (theVideoPlayer != null) {
if (/*(!theGazeButton.GetCondition ()) &&*/ theVideoPlayer.isPrepared) {
theVideoPlayer.Play ();
theAudioSource.Play ();
}
}
}
public void RealStart(){
theVideoPlayer = gameObject.AddComponent<VideoPlayer> ();
//theVideoSource = gameObject.AddComponent<VideoSource> ();
theAudioSource = gameObject.AddComponent<AudioSource> ();
theVideoPlayer.source = VideoSource.Url;
theVideoPlayer.url = "https://<SOME LINK>.mp4";
//theSphere.AddComponent<VideoPlayer>(theVideoPlayer);
theVideoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
theVideoPlayer.EnableAudioTrack (0, true);
theVideoPlayer.SetTargetAudioSource (0, theAudioSource);
}
public void Stream(){
RealStart ();
theVideoPlayer.playOnAwake = true;
theVideoPlayer.Play ();
theAudioSource.Play ();
}
public void Download(){
RealStart ();
theVideoPlayer.playOnAwake = false;
theVideoPlayer.Prepare ();
}
}
I can't for the life of mine, understand why the video runs perfectly in the Editor and not on the phone. Please Help
Use a modern Unity editor (at least 2017.4 LTS) and install the latest Google VR SDK for Unity. There, look at the VideoDemo.scene file to see video in Google Cardboard on Unity.

NFC tag(for NfcA) scan works only from the second time

I wrote a custom plugin to read blocks of data from an NfcA(i.e.non-ndef) tag. It seems to work fine , but only after the second scan. I am using Activity intent to derive the "NfcAdapter.EXTRA_TAG" to later use it for reading the values. I am also updating the Intents in onNewIntent(). OnNewIntent gets called after the second scan and after that I get result all the time.But in the first scan onNewIntent does not gets called, hence I end up using the Activity tag that does not have "NfcAdapter.EXTRA_TAG", hence I get null. Please see the my code below.
SE_NfcA.java(my native code for plugin)
#Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
String Result = "";
String TypeOfTalking = "";
if (action.contains("TalkToNFC"))
{
JSONObject arg_object = args.getJSONObject(0);
TypeOfTalking = arg_object.getString("type");
if(TypeOfTalking != "")
{
if (TypeOfTalking.contains("readBlock"))
{
if(TypeOfTalking.contains("#"))
{
try
{
String[] parts = TypeOfTalking.split("#");
int index = Integer.parseInt(parts[1]);
Result = Readblock(cordova.getActivity().getIntent(),(byte)index);
callbackContext.success(Result);
}
catch(Exception e)
{
callbackContext.error("Exception Reading "+ TypeOfTalking + "due to "+ e.toString());
return false;
}
}
}
else
{
return false;
}
}
else
{
return false;
}
}
else
{
return false;
}
return true;
}
#Override
public void onNewIntent(Intent intent) {
ShowAlert("onNewIntent called");
Tag tagFromIntent = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
super.onNewIntent(intent);
getActivity().setIntent(intent);
savedTag = tagFromIntent;
savedIntent = intent;
}
#Override
public void onPause(boolean multitasking) {
Log.d(TAG, "onPause " + getActivity().getIntent());
super.onPause(multitasking);
if (multitasking) {
// nfc can't run in background
stopNfc();
}
}
#Override
public void onResume(boolean multitasking) {
Log.d(TAG, "onResume " + getActivity().getIntent());
super.onResume(multitasking);
startNfc();
}
public String Readblock(Intent Intent,byte block) throws IOException{
byte[] response = new byte[]{};
if(Intent != null)
{
Tag myTag = Intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
if(savedTag != null)
myTag = savedTag;
if(myTag != null)
{
try{
Reader nTagReader = new Reader(myTag);
nTagReader.close();
nTagReader.connect();
nTagReader.SectorSelect(Sector.Sector0);
response = nTagReader.fast_read(block, block);
nTagReader.close();
return ConvertH(response);
}catch(Exception e){
ShowAlert(e.toString());
}
}
else
ShowAlert("myTag is null.");
}
return null;
}
private void createPendingIntent() {
if (pendingIntent == null) {
Activity activity = getActivity();
Intent intent = new Intent(activity, activity.getClass());
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP| Intent.FLAG_ACTIVITY_CLEAR_TOP);
pendingIntent = PendingIntent.getActivity(activity, 0, intent, 0);
}
}
private void startNfc() {
createPendingIntent(); // onResume can call startNfc before execute
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null && !getActivity().isFinishing()) {
try {
nfcAdapter.enableForegroundDispatch(getActivity(), getPendingIntent(), getIntentFilters(), getTechLists());
if (p2pMessage != null) {
nfcAdapter.setNdefPushMessage(p2pMessage, getActivity());
}
} catch (IllegalStateException e) {
// issue 110 - user exits app with home button while nfc is initializing
Log.w(TAG, "Illegal State Exception starting NFC. Assuming application is terminating.");
}
}
}
});
}
private void stopNfc() {
Log.d(TAG, "stopNfc");
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null) {
try {
nfcAdapter.disableForegroundDispatch(getActivity());
} catch (IllegalStateException e) {
// issue 125 - user exits app with back button while nfc
Log.w(TAG, "Illegal State Exception stopping NFC. Assuming application is terminating.");
}
}
}
});
}
private Activity getActivity() {
return this.cordova.getActivity();
}
private PendingIntent getPendingIntent() {
return pendingIntent;
}
private IntentFilter[] getIntentFilters() {
return intentFilters.toArray(new IntentFilter[intentFilters.size()]);
}
private String[][] getTechLists() {
//noinspection ToArrayCallWithZeroLengthArrayArgument
return techLists.toArray(new String[0][0]);
}
}
My index.js file
nfc.addTagDiscoveredListener(
function(nfcEvent){
console.log(nfcEvent.tag.id);
alert(nfcEvent.tag.id);
window.echo("readBlock#88");//call to plugin
},
function() {
alert("Listening for NFC tags.");
},
function() {
alert("NFC activation failed.");
}
);
SE_NfcA.js(plugin interface for interaction b/w index.js and SE_NfcA.java)
window.echo = function(natureOfTalk)
{
alert("Inside JS Interface, arg =" + natureOfTalk);
cordova.exec(function(result){alert("Result is : "+result);},
function(error){alert("Some Error happened : "+ error);},
"SE_NfcA","TalkToNFC",[{"type": natureOfTalk}]);
};
I guess I have messed up with the Intents/Activity Life-Cycle, please help. TIA!
I found a tweak/hack and made it to work.
Before making any call to read or write, I made one dummy Initialize call.
window.echo("Initialize");
window.echo("readBlock#88");//call to plugin to read.
And in the native code of the plugin, on receiving the "Initialize" token I made a startNFC() call.
else if(TypeOfTalking.equalsIgnoreCase("Initialize"))
{
startNfc();
}

How to send and receive Voice Stream using RTP

I am new in SIP call using RTP, now I am trying to send and receive
voice streams using RTP for sip call. I am done with connecting
two emulators and able to send INVITE and INVITE-ACK using jain sip.
After I got an Ack I want to start RTP for media streaming, I use the RtpPacket
function to send and receive
I use RtpFunction to send media with all RTP header like this:
byte Version;
boolean Padding;
boolean Extension;
byte CC;
boolean Marker;
byte PayloadType;
short SequenceNumber;
int TimeStamp;
Please give some ideas and actual links where I can find an answer.
This can be achieved in a simpler manner
AudioManager audio = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
audio.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioGroup = new AudioGroup();
audioGroup.setMode(AudioGroup.MODE_ECHO_SUPPRESSION);
audioStream = new AudioStream(InetAddress.getByAddress(getLocalIPAddress()));
audioStream.setCodec(AudioCodec.PCMU);
audioStream.setMode(RtpStream.MODE_NORMAL);
audioStream.associate(InetAddress.getByName(SipStackAndroid.getRemoteIp()), REMOTE_PORT);
audioStream.join(audioGroup);
We send and receive RTP data using RTPpacket.
import javax.media.rtp.*;
import javax.media.rtp.rtcp.*;
import javax.media.rtp.event.*;
import javax.media.*;
import javax.media.protocol.*;
import java.net.InetAddress;
import javax.media.format.AudioFormat;
import com.sun.media.ui.*;
import java.util.Vector;
public class RTPSourceStream < RTPPlayerWindow > implements ReceiveStreamListener,
ControllerListener {
#SuppressWarnings("rawtypes")
Vector playerlist = new Vector();
#SuppressWarnings("deprecation")
SessionManager mgr = null;
boolean terminatedbyClose = false;
#SuppressWarnings("deprecation")
public SessionManager createManager(String address,
String sport,
String sttl,
boolean listener,
boolean sendlistener) {
return createManager(address,
new Integer(sport).intValue(),
new Integer(sttl).intValue(),
listener,
sendlistener);
}
#SuppressWarnings("deprecation")
public SessionManager createManager(String address,
int port,
int ttl,
boolean listener,
boolean sendlistener) {
mgr = (SessionManager) new com.sun.media.rtp.RTPSessionMgr();
if (mgr == null) return null;
mgr.addFormat(new AudioFormat(AudioFormat.DVI_RTP, 44100, 4, 1), 18);
if (listener) mgr.addReceiveStreamListener(this);
// if (sendlistener) new RTPSendStreamWindow(mgr);
// ask RTPSM to generate the local participants CNAME
String cname = mgr.generateCNAME();
String username = null;
try {
username = System.getProperty("user.name");
} catch (SecurityException e) {
username = "jmf-user";
}
// create our local Session Address
SessionAddress localaddr = new SessionAddress();
try {
InetAddress destaddr = InetAddress.getByName(address);
SessionAddress sessaddr = new SessionAddress(destaddr,
port,
destaddr,
port + 1);
SourceDescription[] userdesclist = new SourceDescription[] {
new SourceDescription(SourceDescription
.SOURCE_DESC_EMAIL,
"jmf-user#sun.com",
1,
false),
new SourceDescription(SourceDescription
.SOURCE_DESC_CNAME,
cname,
1,
false),
new
SourceDescription(SourceDescription.SOURCE_DESC_TOOL, "JMF RTP Player v2.0",
1,
false)
};
mgr.initSession(localaddr,
userdesclist,
0.05,
0.25);
mgr.startSession(sessaddr, ttl, null);
} catch (Exception e) {
System.err.println(e.getMessage());
return null;
}
return mgr;
}
public void update(ReceiveStreamEvent event) {
Player newplayer = null;
RTPPacket playerWindow = null;
// find the sourceRTPSM for this event
SessionManager source = (SessionManager) event.getSource();
// create a new player if a new recvstream is detected
if (event instanceof NewReceiveStreamEvent) {
String cname = "Java Media Player";
ReceiveStream stream = null;
try {
// get a handle over the ReceiveStream
stream = ((NewReceiveStreamEvent) event)
.getReceiveStream();
Participant part = stream.getParticipant();
if (part != null) cname = part.getCNAME();
// get a handle over the ReceiveStream datasource
DataSource dsource = stream.getDataSource();
// create a player by passing datasource to the
// Media Manager
newplayer = Manager.createPlayer(dsource);
System.out.println("created player " + newplayer);
} catch (Exception e) {
System.err.println("NewReceiveStreamEvent exception " +
e.getMessage());
return;
}
if (newplayer == null) return;
playerlist.addElement(newplayer);
newplayer.addControllerListener(this);
// send this player to player GUI
playerWindow = new RTPPacket(newplayer, cname);
}
}
public void controllerUpdate(ControllerEvent evt) {
// get a handle over controller, remove it from the player
// list.
// if player list is empty, close the sesssion manager.
if ((evt instanceof ControllerClosedEvent) ||
(evt instanceof ControllerErrorEvent) ||
(evt instanceof DeallocateEvent)) {
Player p = (Player) evt.getSourceController();
if (!terminatedbyClose) {
if (playerlist.contains(p))
playerlist.removeElement(p);
if ((playerlist.size() == 0) && (mgr != null))
mgr.closeSession("All players are closed");
}
}
}
public void closeManager() {
terminatedbyClose = true;
// first close all the players
for (int i = 0; i < playerlist.size(); i++) {
((Player) playerlist.elementAt(i)).close();
}
if (mgr != null) {
mgr.closeSession("RTP Session Terminated");
mgr = null;
}
}
class RTPPacket extends RTPSourceStream {
public RTPPacket(Player newplayer, String cname) {
// TODO Auto-generated constructor stub
}
}
}

Categories

Resources