MediaPlayer.Prepare() has been thrown when I try to next track - android

If I replace a "prepare()" method into a "prepareasync()" that's fine but Media will not play.
If I use a "prepare()"then I got error's has been thrown .
Please help me
OnCreate(....)
//Media
player = new MediaPlayer ();
player.Stop ();
public void StartMedia(string url_string)
{
Load_Data ();
seekBar.Max = player.Duration;
player.Stop ();
player.Reset ();
player.SetAudioStreamType (Stream.Music);
player.SetDataSource(url_string);
player.Prepare();
player.Start ();
imgPlayorPause.SetImageResource (Resource.Drawable.ic_pause_black_36dp);
//UpdatedTimerTask ();
UpdateProcessBar ();
}
public void NextTracks(int positon)
{
if (Isrepeat == false) {
if (positon >= mListData.Count ()) {
mPosition = 1;
} else {
mPosition++;
}
} else {
mPosition = positon;
}
StartMedia(mListData [mPosition].stream_url + "?client_id=" + clienId);
}
I have tried some ways but that is not success.

player.Prepared+= (object sender, EventArgs e) =>
{
player.Start();
};
This is fine and solve this problem.

Related

MediaPlayer - Sounds Stop Playing

I am creating a class witch loads up a few sounds. However, isPlaying keeps on throwing an exception after a while and then stops playing that particular sound permanently, while other sounds keep playing OK.
public class MySound {
int m_IdMyId;
int m_ResId;
boolean m_IsLoaded;
MediaPlayer m_Media;
public MySound(int idMyId, int resId){
m_IdMyId = idMyId;
m_ResId = resId;
m_IsLoaded = false;
m_Media = null;
}
}
In this m_IdMyId is just an id for my game. m_ResId is something like R.raw.mysound1. m_IsLoaded I think is automatically set to true as I am loading synconously. m_Media is the MediaPlayer object.
I am calling stop() very regularly, as it is a game and I need to check every second or so to make sure certain sounds are stopped. It is here that it throws an exception when snd.m_Media.isPlaying() is called.
I cannot seem to access e to see what the error is.
Also I would like to know how I can set m_IsLoaded correctly. How do I know when the sound is fully loaded and ready to use?
Here is my management class:
public class MySoundManager {
MainActivity m_Context;
ArrayList<MySound> mySounds;
public MySoundManager(MainActivity context) {
m_Context = context;
mySounds = new ArrayList<MySound>();
mySounds.add(new MySound(8, R.raw.mysound1));
mySounds.add(new MySound(10, R.raw.mysound2));
mySounds.add(new MySound(22, R.raw.mysound3));
mySounds.add(new MySound(100, R.raw.click));
mySounds.add(new MySound(101, R.raw.error));
for(MySound mysound : mySounds) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
// I call this when the main thread calls onResume
public void onResume(){
for(MySound mysound : mySounds) {
if(mysound.m_Media == null) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
}
// I call this when the main thread calls onPause
public void onPause(){
for(MySound mysound : mySounds) {
if(mysound.m_Media != null) {
mysound.m_Media.stop();
mysound.m_Media.release();
mysound.m_Media = null;
}
}
}
public boolean IsAllLoaded(){
for(MySound mysound : mySounds) {
if(!mysound.m_IsLoaded) return false;
}
return true;
}
public MySound FindMySoundByIdMyId(int idMyId){
try {
for(MySound mysound : mySounds) {
if (mysound.m_IdMyId == idMyId) return mysound;
}
}catch(Exception e) {
MySound snd;
snd = null; // ToDo
}
return null;
}
public void play(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null)
snd.m_Media.start();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pause(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.pause();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pauseAll(){
try{
for (MySound mysound : mySounds) {
if(mysound.m_Media.isPlaying())
mysound.m_Media.pause();
}
}catch(IllegalStateException e) {
MySound snd;
snd = null; // ToDo
}
}
public boolean isPlaying(int idMyId, MySound[] fill){
MySound snd;
fill[0] = null;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null){
fill[0] = snd;
return snd.m_Media.isPlaying();
}
}catch(IllegalStateException e) {
snd = null; // ToDo
}
return false;
}
public void stop(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.stop();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
// The str is in the format
// number id, 1 = on 0 = off,dont play if this id playing;
public void PlaySound(String str) {
boolean isplaying;
int i, len, id, idDontPlay, milliNow;
String[] strARR = str.split(";");
String[] strARR2;
Integer[] tmpIntARR;
ArrayList<Integer[]> onARR = new ArrayList<Integer[]>();
ArrayList<Integer> offARR = new ArrayList<Integer>();
MySound snd;
for (i = 0, len = strARR.length; i < len; i++) {
if(strARR[i].length() <= 0) continue;
if((strARR2 = strARR[i].split(",")) != null &&
strARR2.length >= 3 &&
strARR2[0].length() > 0 &&
strARR2[1].length() > 0 &&
strARR2[2].length() > 0){
id = Integer.parseInt(strARR2[0]);
idDontPlay = Integer.parseInt(strARR2[2]);
tmpIntARR = new Integer[2];
tmpIntARR[0] = id;
tmpIntARR[1] = idDontPlay;
if(Integer.parseInt(strARR2[1]) == 1){
onARR.add(tmpIntARR);
} else offARR.add(id);
}
}
// Turn off all sounds that need to be turned off
for (i=0,len=offARR.size();i<len;i++) {
id = offARR.get(i);
stop(id);
}
// Turn all sounds that need to be turned on,
// but only if the sound that blocks a new sound is not playing
for (i=0,len=onARR.size();i<len;i++) {
tmpIntARR = onARR.get(i);
id = tmpIntARR[0];
idDontPlay = tmpIntARR[1];
// We dont play if the idDontPlay sound is already playing
if((snd = FindMySoundByIdMyId(idDontPlay)) != null &&
snd.m_Media.isPlaying())
continue;
if((snd = FindMySoundByIdMyId(id)) != null){
isplaying = snd.m_Media.isPlaying();
milliNow = snd.m_Media.getCurrentPosition();
if(milliNow > (snd.m_Media.getDuration() - 1000) ||
(!isplaying && milliNow > 0)){
snd.m_Media.seekTo(0); // Half a second inside
}
if(!isplaying) snd.m_Media.start();
}
}
}
}
Creating a MediaPlayer instance for every sound is not a good practice to get low latency, especially for short clips. MediaPlayer is for longer clips such as Music files it uses large buffer so, larger buffer means high latency. Also, there is AudioFocus mechanism on Android that may interfere your sound playing session. So, I strongly recommend you to use SoundPool to play short clips like game sounds.

Streaming video to Android Device in Google VR Cardboard in Unity

I have been trying to make a VR Video Player app for Google Cardboard on Android, where video can either be streamed or downloaded. The video works fine in the editor but the does not work in my phone. I am using Unity 5.6.1f1 on Windows 10, the phone is a Moto G4 plus running on Noughat.
Here are the scripts used to stream or download video
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class GazeButton : MonoBehaviour {
public static bool toStream;
//private bool buttonPress;
public Image streamImg;
public Image downImg;
public Slider progress;
public Text txt;
//public WWW url;
//public GvrVideoPlayerTexture
private PlayVideo thePlayVideo;
public Camera theCamera;
// Use this for initialization
void Start () {
if (theCamera == null)
theCamera = GameObject.Find ("Main Camera").GetComponent<Camera> ();
if(streamImg == null)
streamImg = GameObject.Find ("StreamImage").GetComponent<Image>();
if(downImg == null)
downImg = GameObject.Find ("DownImage").GetComponent<Image>();
streamImg.color = Color.green;
downImg.color = Color.red;
if (progress == null)
progress = GameObject.Find ("ProgressSlider").GetComponent<Slider> ();
progress.value = 0;
progress.gameObject.SetActive (false);
if (txt == null)
txt = GameObject.Find ("GuideText").GetComponent<Text>();
thePlayVideo = FindObjectOfType<PlayVideo> ();
}
// Update is called once per frame
void Update () {
if (progress.IsActive()) {
progress.value += 1;
if (progress.value >= progress.maxValue /*&& buttonPress*/) {
if (toStream) {
streamImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Stream ();
} else {
downImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Download ();
}
}
}
}
public void StreamButtonDown(){
streamImg.color = Color.blue;
toStream = true;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void DownButtonDown(){
downImg.color = Color.blue;
toStream = false;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void StreamButtonUp(){
streamImg.color = Color.green;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public void DownButtonUp(){
downImg.color = Color.red;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public bool GetCondition(){
return toStream;
}
}
And this is used to actually stream the video :
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;
public class PlayVideo : MonoBehaviour {
//public GameObject theSphere;
private VideoPlayer theVideoPlayer;
//private VideoSource theVideoSource;
private AudioSource theAudioSource;
public GazeButton theGazeButton;
// Use this for initialization
void Start () {
/*if (theSphere == null)
theSphere = GameObject.Find ("Sphere");*/
theGazeButton = GetComponent<GazeButton> ();
}
// Update is called once per frame
void Update () {
if (theVideoPlayer != null) {
if (/*(!theGazeButton.GetCondition ()) &&*/ theVideoPlayer.isPrepared) {
theVideoPlayer.Play ();
theAudioSource.Play ();
}
}
}
public void RealStart(){
theVideoPlayer = gameObject.AddComponent<VideoPlayer> ();
//theVideoSource = gameObject.AddComponent<VideoSource> ();
theAudioSource = gameObject.AddComponent<AudioSource> ();
theVideoPlayer.source = VideoSource.Url;
theVideoPlayer.url = "https://<SOME LINK>.mp4";
//theSphere.AddComponent<VideoPlayer>(theVideoPlayer);
theVideoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
theVideoPlayer.EnableAudioTrack (0, true);
theVideoPlayer.SetTargetAudioSource (0, theAudioSource);
}
public void Stream(){
RealStart ();
theVideoPlayer.playOnAwake = true;
theVideoPlayer.Play ();
theAudioSource.Play ();
}
public void Download(){
RealStart ();
theVideoPlayer.playOnAwake = false;
theVideoPlayer.Prepare ();
}
}
I can't for the life of mine, understand why the video runs perfectly in the Editor and not on the phone. Please Help
Use a modern Unity editor (at least 2017.4 LTS) and install the latest Google VR SDK for Unity. There, look at the VideoDemo.scene file to see video in Google Cardboard on Unity.

Cannot close turnbased multiplayer match with Google Play Games

I have a problem closing my game after the last player close the last match. My turn scheme is:
Player A
Player B
Player B
Player A
Player A
Player B
The game works well but in turn "6" when player B try to close the match, player A always see the matsh as "my turn" and not as "completed"
here there is the code thar rules turns and game ended:
#Override
public void onGameEnd(final NMXGameData updatedData) {
super.onGameEnd(updatedData);
if (updatedData.getMatchNumber() == NMXGameConfig.MATCHES) {
boolean iWin = updatedData.getResultPoints()[1] > updatedData.getResultPoints()[0];
boolean tile = updatedData.getResultPoints()[1] == updatedData.getResultPoints()[0];
ParticipantResult opponentParticipantResult;
ParticipantResult myParticipantResult;
if (tile) {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_TIE, 1);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_TIE, 1);
} else {
if (iWin) {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_LOSS, 2);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_WIN, 1);
} else {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_WIN, 1);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_LOSS, 2);
}
}
ArrayList<ParticipantResult> participantResultArrayList = new ArrayList<>();
participantResultArrayList.add(opponentParticipantResult);
participantResultArrayList.add(myParticipantResult);
Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), opponentParticipantResult, myParticipantResult).setResultCallback(new ResultCallback<TurnBasedMultiplayer.UpdateMatchResult>() {
#Override
public void onResult(TurnBasedMultiplayer.UpdateMatchResult updateMatchResult) {
finish();
}
});
} else if (updatedData.getMatchNumber() < NMXGameConfig.MATCHES) {
if (getNextPlayerIndex(updatedData.getMatchNumber()) != getNextPlayerIndex(updatedData.getMatchNumber() - 1)) {
Games.TurnBasedMultiplayer.takeTurn(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), getNextParticipantId());
} else {
Games.TurnBasedMultiplayer.takeTurn(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), getCurrentPlayerId());
startActivity(startNewOnlineGameIntent(this, updatedData, match.getMatchId()));
}
finish();
}
}
private String getCurrentPlayerId() {
return match.getParticipantId(Games.Players.getCurrentPlayerId(getApiClient()));
}
private String getOpponentId() {
for (String id : match.getParticipantIds()) {
if (!id.equals(getCurrentPlayerId())) {
return id;
}
}
return null;
}
private int getNextPlayerIndex(int nextRoundIndex) {
nextRoundIndex = nextRoundIndex + 1;
return (nextRoundIndex / 2) % 2;
}
I finally figured it out.
I don't know if that is the desired behavior but when in round 6 player_B calls:
Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), opponentParticipantResult, myParticipantResult).setResultCallback(new ResultCallback<TurnBasedMultiplayer.UpdateMatchResult>() {
#Override
public void onResult(TurnBasedMultiplayer.UpdateMatchResult updateMatchResult) {
finish();
}
});
The turn goes to player_A that see that match as "my turn". At this point player A must call Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId()) (without playing a real game) and the game is completed for both players

NFC tag(for NfcA) scan works only from the second time

I wrote a custom plugin to read blocks of data from an NfcA(i.e.non-ndef) tag. It seems to work fine , but only after the second scan. I am using Activity intent to derive the "NfcAdapter.EXTRA_TAG" to later use it for reading the values. I am also updating the Intents in onNewIntent(). OnNewIntent gets called after the second scan and after that I get result all the time.But in the first scan onNewIntent does not gets called, hence I end up using the Activity tag that does not have "NfcAdapter.EXTRA_TAG", hence I get null. Please see the my code below.
SE_NfcA.java(my native code for plugin)
#Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
String Result = "";
String TypeOfTalking = "";
if (action.contains("TalkToNFC"))
{
JSONObject arg_object = args.getJSONObject(0);
TypeOfTalking = arg_object.getString("type");
if(TypeOfTalking != "")
{
if (TypeOfTalking.contains("readBlock"))
{
if(TypeOfTalking.contains("#"))
{
try
{
String[] parts = TypeOfTalking.split("#");
int index = Integer.parseInt(parts[1]);
Result = Readblock(cordova.getActivity().getIntent(),(byte)index);
callbackContext.success(Result);
}
catch(Exception e)
{
callbackContext.error("Exception Reading "+ TypeOfTalking + "due to "+ e.toString());
return false;
}
}
}
else
{
return false;
}
}
else
{
return false;
}
}
else
{
return false;
}
return true;
}
#Override
public void onNewIntent(Intent intent) {
ShowAlert("onNewIntent called");
Tag tagFromIntent = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
super.onNewIntent(intent);
getActivity().setIntent(intent);
savedTag = tagFromIntent;
savedIntent = intent;
}
#Override
public void onPause(boolean multitasking) {
Log.d(TAG, "onPause " + getActivity().getIntent());
super.onPause(multitasking);
if (multitasking) {
// nfc can't run in background
stopNfc();
}
}
#Override
public void onResume(boolean multitasking) {
Log.d(TAG, "onResume " + getActivity().getIntent());
super.onResume(multitasking);
startNfc();
}
public String Readblock(Intent Intent,byte block) throws IOException{
byte[] response = new byte[]{};
if(Intent != null)
{
Tag myTag = Intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
if(savedTag != null)
myTag = savedTag;
if(myTag != null)
{
try{
Reader nTagReader = new Reader(myTag);
nTagReader.close();
nTagReader.connect();
nTagReader.SectorSelect(Sector.Sector0);
response = nTagReader.fast_read(block, block);
nTagReader.close();
return ConvertH(response);
}catch(Exception e){
ShowAlert(e.toString());
}
}
else
ShowAlert("myTag is null.");
}
return null;
}
private void createPendingIntent() {
if (pendingIntent == null) {
Activity activity = getActivity();
Intent intent = new Intent(activity, activity.getClass());
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP| Intent.FLAG_ACTIVITY_CLEAR_TOP);
pendingIntent = PendingIntent.getActivity(activity, 0, intent, 0);
}
}
private void startNfc() {
createPendingIntent(); // onResume can call startNfc before execute
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null && !getActivity().isFinishing()) {
try {
nfcAdapter.enableForegroundDispatch(getActivity(), getPendingIntent(), getIntentFilters(), getTechLists());
if (p2pMessage != null) {
nfcAdapter.setNdefPushMessage(p2pMessage, getActivity());
}
} catch (IllegalStateException e) {
// issue 110 - user exits app with home button while nfc is initializing
Log.w(TAG, "Illegal State Exception starting NFC. Assuming application is terminating.");
}
}
}
});
}
private void stopNfc() {
Log.d(TAG, "stopNfc");
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null) {
try {
nfcAdapter.disableForegroundDispatch(getActivity());
} catch (IllegalStateException e) {
// issue 125 - user exits app with back button while nfc
Log.w(TAG, "Illegal State Exception stopping NFC. Assuming application is terminating.");
}
}
}
});
}
private Activity getActivity() {
return this.cordova.getActivity();
}
private PendingIntent getPendingIntent() {
return pendingIntent;
}
private IntentFilter[] getIntentFilters() {
return intentFilters.toArray(new IntentFilter[intentFilters.size()]);
}
private String[][] getTechLists() {
//noinspection ToArrayCallWithZeroLengthArrayArgument
return techLists.toArray(new String[0][0]);
}
}
My index.js file
nfc.addTagDiscoveredListener(
function(nfcEvent){
console.log(nfcEvent.tag.id);
alert(nfcEvent.tag.id);
window.echo("readBlock#88");//call to plugin
},
function() {
alert("Listening for NFC tags.");
},
function() {
alert("NFC activation failed.");
}
);
SE_NfcA.js(plugin interface for interaction b/w index.js and SE_NfcA.java)
window.echo = function(natureOfTalk)
{
alert("Inside JS Interface, arg =" + natureOfTalk);
cordova.exec(function(result){alert("Result is : "+result);},
function(error){alert("Some Error happened : "+ error);},
"SE_NfcA","TalkToNFC",[{"type": natureOfTalk}]);
};
I guess I have messed up with the Intents/Activity Life-Cycle, please help. TIA!
I found a tweak/hack and made it to work.
Before making any call to read or write, I made one dummy Initialize call.
window.echo("Initialize");
window.echo("readBlock#88");//call to plugin to read.
And in the native code of the plugin, on receiving the "Initialize" token I made a startNFC() call.
else if(TypeOfTalking.equalsIgnoreCase("Initialize"))
{
startNfc();
}

Speech recognizer by WCF service with Xamarin Android app client

I would like to write WCF Serwis where I use Microsotf.Speech.Recognition library to make speech to text service. Here is my Service code:
public class Rozpoznawacz : IRozpoznawacz
{
public void AudioToText(Stream audioStr)
{
SpeechRecognitionEngine _sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pl-PL"));
// Create a simple grammar that recognizes the words
Choices words = new Choices();
// Add the words to be recognised
words.Add("red");
words.Add("green");
words.Add("blue");
words.Add("yellow");
words.Add("orange");
words.Add("Dzień dobry");
words.Add("Chrząszcz");
words.Add("Brzmi");
words.Add("w");
words.Add("trzcinie");
words.Add("Wystaw fakturę");
words.Add("Stefan Burczymucha");
GrammarBuilder gb = new GrammarBuilder();
gb.Culture = new System.Globalization.CultureInfo("pl-PL");
gb.Append(words);
// Create the actual Grammar instance, and then load it into the speech recognizer.
Grammar g = new Grammar(gb);
_sre.LoadGrammar(g);
// Register a handler for the SpeechRecognized event.
_sre.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);
//_sre.SetInputToDefaultAudioDevice();
_sre.SetInputToWaveStream(audioStr);
_sre.RecognizeAsync(RecognizeMode.Multiple);
}
void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
string rozpoznanie = "";
rozpoznanie += e.Result.Text;
using (StreamWriter outfile = new StreamWriter(#"C:\Test.txt"))
{
outfile.Write(rozpoznanie);
}
}
public string Test(string query)
{
return string.Format("Przyjęto: {0}", query);
}
}
Next, I tried to write Android App with service references, I tried to record voice and send to webService host, but I don't know how I should send audio file. Here is my not working code in android app:
using System;
using Android.App;
using Android.Content;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Android.OS;
using Android.Media;
using System.IO;
namespace RozpoznawanieMowyZdalne.Adroid
{
[Activity(Label = "RozpoznawanieMowyZdalne.Adroid", MainLauncher = true, Icon = "#drawable/icon")]
public class MainActivity : Activity
{
int count = 1;
MediaRecorder recorder;
MediaPlayer player;
Button btnStart;
Button btnStop;
string path = "/sdcard/test.3gpp";
private RozpoznawaczService.Rozpoznawacz client;
private TextView aLabel;
byte[] audioByte;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
// Set our view from the "main" layout resource
SetContentView(Resource.Layout.Main);
// Get our button from the layout resource,
// and attach an event to it
btnStart = FindViewById<Button>(Resource.Id.btnStart);
btnStop = FindViewById<Button>(Resource.Id.btnStop);
btnStart.Click += delegate
{
client.TestAsync("Android");
btnStop.Enabled = !btnStop.Enabled;
btnStart.Enabled = !btnStart.Enabled;
recorder.SetAudioSource(AudioSource.VoiceRecognition);
recorder.SetOutputFormat(OutputFormat.ThreeGpp);
recorder.SetAudioEncoder(AudioEncoder.AmrNb);
recorder.SetOutputFile(path);
recorder.Prepare();
recorder.Start();
Toast.MakeText(this, "Rozpoczęto nagrywanie", ToastLength.Long).Show();
};
btnStop.Click += delegate
{
btnStop.Enabled = !btnStop.Enabled;
recorder.Stop();
Toast.MakeText(this, "Zakończono nagrywanie", ToastLength.Long).Show();
recorder.Reset();
player.SetDataSource(path);
player.Prepare();
player.Start();
File.WriteAllBytes(path, audioByte);
client.AudioToTextAsync(audioByte);
};
InitializeServiceClient();
}
protected override void OnResume()
{
base.OnResume();
recorder = new MediaRecorder();
player = new MediaPlayer();
player.Completion += (sender, e) =>
{
player.Reset();
btnStart.Enabled = !btnStart.Enabled;
};
}
protected override void OnPause()
{
base.OnPause();
player.Release();
recorder.Release();
player.Dispose();
recorder.Dispose();
player = null;
recorder = null;
}
private void InitializeServiceClient()
{
client = new RozpoznawaczService.Rozpoznawacz();
client.TestCompleted += client_TestCompleted;
client.AudioToTextCompleted += client_AudioToTextCompleted;
aLabel = FindViewById<TextView>(Resource.Id.textViewTest);
}
void client_AudioToTextCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
//msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = "Wyslane");
}
void client_TestCompleted(object sender, RozpoznawaczService.TestCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = msg);
}
}
}
How can I send my audio file to my webService?
PS. File.WriteAllBytes(path, audioByte); - it doesn't work in Android app...

Categories

Resources