I have been trying to make a VR Video Player app for Google Cardboard on Android, where video can either be streamed or downloaded. The video works fine in the editor but the does not work in my phone. I am using Unity 5.6.1f1 on Windows 10, the phone is a Moto G4 plus running on Noughat.
Here are the scripts used to stream or download video
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class GazeButton : MonoBehaviour {
public static bool toStream;
//private bool buttonPress;
public Image streamImg;
public Image downImg;
public Slider progress;
public Text txt;
//public WWW url;
//public GvrVideoPlayerTexture
private PlayVideo thePlayVideo;
public Camera theCamera;
// Use this for initialization
void Start () {
if (theCamera == null)
theCamera = GameObject.Find ("Main Camera").GetComponent<Camera> ();
if(streamImg == null)
streamImg = GameObject.Find ("StreamImage").GetComponent<Image>();
if(downImg == null)
downImg = GameObject.Find ("DownImage").GetComponent<Image>();
streamImg.color = Color.green;
downImg.color = Color.red;
if (progress == null)
progress = GameObject.Find ("ProgressSlider").GetComponent<Slider> ();
progress.value = 0;
progress.gameObject.SetActive (false);
if (txt == null)
txt = GameObject.Find ("GuideText").GetComponent<Text>();
thePlayVideo = FindObjectOfType<PlayVideo> ();
}
// Update is called once per frame
void Update () {
if (progress.IsActive()) {
progress.value += 1;
if (progress.value >= progress.maxValue /*&& buttonPress*/) {
if (toStream) {
streamImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Stream ();
} else {
downImg.color = Color.gray;
streamImg.gameObject.SetActive (false);
downImg.gameObject.SetActive (false);
progress.gameObject.SetActive (false);
txt.gameObject.SetActive (false);
//FlipCameraView ();
thePlayVideo.Download ();
}
}
}
}
public void StreamButtonDown(){
streamImg.color = Color.blue;
toStream = true;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void DownButtonDown(){
downImg.color = Color.blue;
toStream = false;
//buttonPress = true;
progress.gameObject.SetActive (true);
progress.value = 0;
}
public void StreamButtonUp(){
streamImg.color = Color.green;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public void DownButtonUp(){
downImg.color = Color.red;
//buttonPress = false;
progress.gameObject.SetActive (false);
}
public bool GetCondition(){
return toStream;
}
}
And this is used to actually stream the video :
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;
public class PlayVideo : MonoBehaviour {
//public GameObject theSphere;
private VideoPlayer theVideoPlayer;
//private VideoSource theVideoSource;
private AudioSource theAudioSource;
public GazeButton theGazeButton;
// Use this for initialization
void Start () {
/*if (theSphere == null)
theSphere = GameObject.Find ("Sphere");*/
theGazeButton = GetComponent<GazeButton> ();
}
// Update is called once per frame
void Update () {
if (theVideoPlayer != null) {
if (/*(!theGazeButton.GetCondition ()) &&*/ theVideoPlayer.isPrepared) {
theVideoPlayer.Play ();
theAudioSource.Play ();
}
}
}
public void RealStart(){
theVideoPlayer = gameObject.AddComponent<VideoPlayer> ();
//theVideoSource = gameObject.AddComponent<VideoSource> ();
theAudioSource = gameObject.AddComponent<AudioSource> ();
theVideoPlayer.source = VideoSource.Url;
theVideoPlayer.url = "https://<SOME LINK>.mp4";
//theSphere.AddComponent<VideoPlayer>(theVideoPlayer);
theVideoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
theVideoPlayer.EnableAudioTrack (0, true);
theVideoPlayer.SetTargetAudioSource (0, theAudioSource);
}
public void Stream(){
RealStart ();
theVideoPlayer.playOnAwake = true;
theVideoPlayer.Play ();
theAudioSource.Play ();
}
public void Download(){
RealStart ();
theVideoPlayer.playOnAwake = false;
theVideoPlayer.Prepare ();
}
}
I can't for the life of mine, understand why the video runs perfectly in the Editor and not on the phone. Please Help
Use a modern Unity editor (at least 2017.4 LTS) and install the latest Google VR SDK for Unity. There, look at the VideoDemo.scene file to see video in Google Cardboard on Unity.
Related
I am creating a class witch loads up a few sounds. However, isPlaying keeps on throwing an exception after a while and then stops playing that particular sound permanently, while other sounds keep playing OK.
public class MySound {
int m_IdMyId;
int m_ResId;
boolean m_IsLoaded;
MediaPlayer m_Media;
public MySound(int idMyId, int resId){
m_IdMyId = idMyId;
m_ResId = resId;
m_IsLoaded = false;
m_Media = null;
}
}
In this m_IdMyId is just an id for my game. m_ResId is something like R.raw.mysound1. m_IsLoaded I think is automatically set to true as I am loading synconously. m_Media is the MediaPlayer object.
I am calling stop() very regularly, as it is a game and I need to check every second or so to make sure certain sounds are stopped. It is here that it throws an exception when snd.m_Media.isPlaying() is called.
I cannot seem to access e to see what the error is.
Also I would like to know how I can set m_IsLoaded correctly. How do I know when the sound is fully loaded and ready to use?
Here is my management class:
public class MySoundManager {
MainActivity m_Context;
ArrayList<MySound> mySounds;
public MySoundManager(MainActivity context) {
m_Context = context;
mySounds = new ArrayList<MySound>();
mySounds.add(new MySound(8, R.raw.mysound1));
mySounds.add(new MySound(10, R.raw.mysound2));
mySounds.add(new MySound(22, R.raw.mysound3));
mySounds.add(new MySound(100, R.raw.click));
mySounds.add(new MySound(101, R.raw.error));
for(MySound mysound : mySounds) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
// I call this when the main thread calls onResume
public void onResume(){
for(MySound mysound : mySounds) {
if(mysound.m_Media == null) {
mysound.m_Media = MediaPlayer.create(m_Context, mysound.m_ResId); // no need to call prepare(); create() does that for you
mysound.m_IsLoaded = true;
}
}
}
// I call this when the main thread calls onPause
public void onPause(){
for(MySound mysound : mySounds) {
if(mysound.m_Media != null) {
mysound.m_Media.stop();
mysound.m_Media.release();
mysound.m_Media = null;
}
}
}
public boolean IsAllLoaded(){
for(MySound mysound : mySounds) {
if(!mysound.m_IsLoaded) return false;
}
return true;
}
public MySound FindMySoundByIdMyId(int idMyId){
try {
for(MySound mysound : mySounds) {
if (mysound.m_IdMyId == idMyId) return mysound;
}
}catch(Exception e) {
MySound snd;
snd = null; // ToDo
}
return null;
}
public void play(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null)
snd.m_Media.start();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pause(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.pause();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
public void pauseAll(){
try{
for (MySound mysound : mySounds) {
if(mysound.m_Media.isPlaying())
mysound.m_Media.pause();
}
}catch(IllegalStateException e) {
MySound snd;
snd = null; // ToDo
}
}
public boolean isPlaying(int idMyId, MySound[] fill){
MySound snd;
fill[0] = null;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null){
fill[0] = snd;
return snd.m_Media.isPlaying();
}
}catch(IllegalStateException e) {
snd = null; // ToDo
}
return false;
}
public void stop(int idMyId){
MySound snd;
try{
if((snd = FindMySoundByIdMyId(idMyId)) != null &&
snd.m_Media.isPlaying())
snd.m_Media.stop();
}catch(IllegalStateException e) {
snd = null; // ToDo
}
}
// The str is in the format
// number id, 1 = on 0 = off,dont play if this id playing;
public void PlaySound(String str) {
boolean isplaying;
int i, len, id, idDontPlay, milliNow;
String[] strARR = str.split(";");
String[] strARR2;
Integer[] tmpIntARR;
ArrayList<Integer[]> onARR = new ArrayList<Integer[]>();
ArrayList<Integer> offARR = new ArrayList<Integer>();
MySound snd;
for (i = 0, len = strARR.length; i < len; i++) {
if(strARR[i].length() <= 0) continue;
if((strARR2 = strARR[i].split(",")) != null &&
strARR2.length >= 3 &&
strARR2[0].length() > 0 &&
strARR2[1].length() > 0 &&
strARR2[2].length() > 0){
id = Integer.parseInt(strARR2[0]);
idDontPlay = Integer.parseInt(strARR2[2]);
tmpIntARR = new Integer[2];
tmpIntARR[0] = id;
tmpIntARR[1] = idDontPlay;
if(Integer.parseInt(strARR2[1]) == 1){
onARR.add(tmpIntARR);
} else offARR.add(id);
}
}
// Turn off all sounds that need to be turned off
for (i=0,len=offARR.size();i<len;i++) {
id = offARR.get(i);
stop(id);
}
// Turn all sounds that need to be turned on,
// but only if the sound that blocks a new sound is not playing
for (i=0,len=onARR.size();i<len;i++) {
tmpIntARR = onARR.get(i);
id = tmpIntARR[0];
idDontPlay = tmpIntARR[1];
// We dont play if the idDontPlay sound is already playing
if((snd = FindMySoundByIdMyId(idDontPlay)) != null &&
snd.m_Media.isPlaying())
continue;
if((snd = FindMySoundByIdMyId(id)) != null){
isplaying = snd.m_Media.isPlaying();
milliNow = snd.m_Media.getCurrentPosition();
if(milliNow > (snd.m_Media.getDuration() - 1000) ||
(!isplaying && milliNow > 0)){
snd.m_Media.seekTo(0); // Half a second inside
}
if(!isplaying) snd.m_Media.start();
}
}
}
}
Creating a MediaPlayer instance for every sound is not a good practice to get low latency, especially for short clips. MediaPlayer is for longer clips such as Music files it uses large buffer so, larger buffer means high latency. Also, there is AudioFocus mechanism on Android that may interfere your sound playing session. So, I strongly recommend you to use SoundPool to play short clips like game sounds.
If I replace a "prepare()" method into a "prepareasync()" that's fine but Media will not play.
If I use a "prepare()"then I got error's has been thrown .
Please help me
OnCreate(....)
//Media
player = new MediaPlayer ();
player.Stop ();
public void StartMedia(string url_string)
{
Load_Data ();
seekBar.Max = player.Duration;
player.Stop ();
player.Reset ();
player.SetAudioStreamType (Stream.Music);
player.SetDataSource(url_string);
player.Prepare();
player.Start ();
imgPlayorPause.SetImageResource (Resource.Drawable.ic_pause_black_36dp);
//UpdatedTimerTask ();
UpdateProcessBar ();
}
public void NextTracks(int positon)
{
if (Isrepeat == false) {
if (positon >= mListData.Count ()) {
mPosition = 1;
} else {
mPosition++;
}
} else {
mPosition = positon;
}
StartMedia(mListData [mPosition].stream_url + "?client_id=" + clienId);
}
I have tried some ways but that is not success.
player.Prepared+= (object sender, EventArgs e) =>
{
player.Start();
};
This is fine and solve this problem.
I have a problem closing my game after the last player close the last match. My turn scheme is:
Player A
Player B
Player B
Player A
Player A
Player B
The game works well but in turn "6" when player B try to close the match, player A always see the matsh as "my turn" and not as "completed"
here there is the code thar rules turns and game ended:
#Override
public void onGameEnd(final NMXGameData updatedData) {
super.onGameEnd(updatedData);
if (updatedData.getMatchNumber() == NMXGameConfig.MATCHES) {
boolean iWin = updatedData.getResultPoints()[1] > updatedData.getResultPoints()[0];
boolean tile = updatedData.getResultPoints()[1] == updatedData.getResultPoints()[0];
ParticipantResult opponentParticipantResult;
ParticipantResult myParticipantResult;
if (tile) {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_TIE, 1);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_TIE, 1);
} else {
if (iWin) {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_LOSS, 2);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_WIN, 1);
} else {
opponentParticipantResult = new ParticipantResult(getOpponentId(), ParticipantResult.MATCH_RESULT_WIN, 1);
myParticipantResult = new ParticipantResult(getCurrentPlayerId(), ParticipantResult.MATCH_RESULT_LOSS, 2);
}
}
ArrayList<ParticipantResult> participantResultArrayList = new ArrayList<>();
participantResultArrayList.add(opponentParticipantResult);
participantResultArrayList.add(myParticipantResult);
Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), opponentParticipantResult, myParticipantResult).setResultCallback(new ResultCallback<TurnBasedMultiplayer.UpdateMatchResult>() {
#Override
public void onResult(TurnBasedMultiplayer.UpdateMatchResult updateMatchResult) {
finish();
}
});
} else if (updatedData.getMatchNumber() < NMXGameConfig.MATCHES) {
if (getNextPlayerIndex(updatedData.getMatchNumber()) != getNextPlayerIndex(updatedData.getMatchNumber() - 1)) {
Games.TurnBasedMultiplayer.takeTurn(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), getNextParticipantId());
} else {
Games.TurnBasedMultiplayer.takeTurn(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), getCurrentPlayerId());
startActivity(startNewOnlineGameIntent(this, updatedData, match.getMatchId()));
}
finish();
}
}
private String getCurrentPlayerId() {
return match.getParticipantId(Games.Players.getCurrentPlayerId(getApiClient()));
}
private String getOpponentId() {
for (String id : match.getParticipantIds()) {
if (!id.equals(getCurrentPlayerId())) {
return id;
}
}
return null;
}
private int getNextPlayerIndex(int nextRoundIndex) {
nextRoundIndex = nextRoundIndex + 1;
return (nextRoundIndex / 2) % 2;
}
I finally figured it out.
I don't know if that is the desired behavior but when in round 6 player_B calls:
Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId(), new Gson().toJson(updatedData).getBytes(), opponentParticipantResult, myParticipantResult).setResultCallback(new ResultCallback<TurnBasedMultiplayer.UpdateMatchResult>() {
#Override
public void onResult(TurnBasedMultiplayer.UpdateMatchResult updateMatchResult) {
finish();
}
});
The turn goes to player_A that see that match as "my turn". At this point player A must call Games.TurnBasedMultiplayer.finishMatch(getApiClient(), match.getMatchId()) (without playing a real game) and the game is completed for both players
I would like to write WCF Serwis where I use Microsotf.Speech.Recognition library to make speech to text service. Here is my Service code:
public class Rozpoznawacz : IRozpoznawacz
{
public void AudioToText(Stream audioStr)
{
SpeechRecognitionEngine _sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pl-PL"));
// Create a simple grammar that recognizes the words
Choices words = new Choices();
// Add the words to be recognised
words.Add("red");
words.Add("green");
words.Add("blue");
words.Add("yellow");
words.Add("orange");
words.Add("Dzień dobry");
words.Add("Chrząszcz");
words.Add("Brzmi");
words.Add("w");
words.Add("trzcinie");
words.Add("Wystaw fakturę");
words.Add("Stefan Burczymucha");
GrammarBuilder gb = new GrammarBuilder();
gb.Culture = new System.Globalization.CultureInfo("pl-PL");
gb.Append(words);
// Create the actual Grammar instance, and then load it into the speech recognizer.
Grammar g = new Grammar(gb);
_sre.LoadGrammar(g);
// Register a handler for the SpeechRecognized event.
_sre.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);
//_sre.SetInputToDefaultAudioDevice();
_sre.SetInputToWaveStream(audioStr);
_sre.RecognizeAsync(RecognizeMode.Multiple);
}
void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
string rozpoznanie = "";
rozpoznanie += e.Result.Text;
using (StreamWriter outfile = new StreamWriter(#"C:\Test.txt"))
{
outfile.Write(rozpoznanie);
}
}
public string Test(string query)
{
return string.Format("Przyjęto: {0}", query);
}
}
Next, I tried to write Android App with service references, I tried to record voice and send to webService host, but I don't know how I should send audio file. Here is my not working code in android app:
using System;
using Android.App;
using Android.Content;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Android.OS;
using Android.Media;
using System.IO;
namespace RozpoznawanieMowyZdalne.Adroid
{
[Activity(Label = "RozpoznawanieMowyZdalne.Adroid", MainLauncher = true, Icon = "#drawable/icon")]
public class MainActivity : Activity
{
int count = 1;
MediaRecorder recorder;
MediaPlayer player;
Button btnStart;
Button btnStop;
string path = "/sdcard/test.3gpp";
private RozpoznawaczService.Rozpoznawacz client;
private TextView aLabel;
byte[] audioByte;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
// Set our view from the "main" layout resource
SetContentView(Resource.Layout.Main);
// Get our button from the layout resource,
// and attach an event to it
btnStart = FindViewById<Button>(Resource.Id.btnStart);
btnStop = FindViewById<Button>(Resource.Id.btnStop);
btnStart.Click += delegate
{
client.TestAsync("Android");
btnStop.Enabled = !btnStop.Enabled;
btnStart.Enabled = !btnStart.Enabled;
recorder.SetAudioSource(AudioSource.VoiceRecognition);
recorder.SetOutputFormat(OutputFormat.ThreeGpp);
recorder.SetAudioEncoder(AudioEncoder.AmrNb);
recorder.SetOutputFile(path);
recorder.Prepare();
recorder.Start();
Toast.MakeText(this, "Rozpoczęto nagrywanie", ToastLength.Long).Show();
};
btnStop.Click += delegate
{
btnStop.Enabled = !btnStop.Enabled;
recorder.Stop();
Toast.MakeText(this, "Zakończono nagrywanie", ToastLength.Long).Show();
recorder.Reset();
player.SetDataSource(path);
player.Prepare();
player.Start();
File.WriteAllBytes(path, audioByte);
client.AudioToTextAsync(audioByte);
};
InitializeServiceClient();
}
protected override void OnResume()
{
base.OnResume();
recorder = new MediaRecorder();
player = new MediaPlayer();
player.Completion += (sender, e) =>
{
player.Reset();
btnStart.Enabled = !btnStart.Enabled;
};
}
protected override void OnPause()
{
base.OnPause();
player.Release();
recorder.Release();
player.Dispose();
recorder.Dispose();
player = null;
recorder = null;
}
private void InitializeServiceClient()
{
client = new RozpoznawaczService.Rozpoznawacz();
client.TestCompleted += client_TestCompleted;
client.AudioToTextCompleted += client_AudioToTextCompleted;
aLabel = FindViewById<TextView>(Resource.Id.textViewTest);
}
void client_AudioToTextCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
//msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = "Wyslane");
}
void client_TestCompleted(object sender, RozpoznawaczService.TestCompletedEventArgs e)
{
string msg = null;
if (e.Error != null)
{
msg = e.Error.Message;
}
else if (e.Cancelled)
{
msg = "Request was cancelled.";
}
else
{
msg = e.Result;
}
RunOnUiThread(() => aLabel.Text = msg);
}
}
}
How can I send my audio file to my webService?
PS. File.WriteAllBytes(path, audioByte); - it doesn't work in Android app...
I am new in SIP call using RTP, now I am trying to send and receive
voice streams using RTP for sip call. I am done with connecting
two emulators and able to send INVITE and INVITE-ACK using jain sip.
After I got an Ack I want to start RTP for media streaming, I use the RtpPacket
function to send and receive
I use RtpFunction to send media with all RTP header like this:
byte Version;
boolean Padding;
boolean Extension;
byte CC;
boolean Marker;
byte PayloadType;
short SequenceNumber;
int TimeStamp;
Please give some ideas and actual links where I can find an answer.
This can be achieved in a simpler manner
AudioManager audio = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
audio.setMode(AudioManager.MODE_IN_COMMUNICATION);
audioGroup = new AudioGroup();
audioGroup.setMode(AudioGroup.MODE_ECHO_SUPPRESSION);
audioStream = new AudioStream(InetAddress.getByAddress(getLocalIPAddress()));
audioStream.setCodec(AudioCodec.PCMU);
audioStream.setMode(RtpStream.MODE_NORMAL);
audioStream.associate(InetAddress.getByName(SipStackAndroid.getRemoteIp()), REMOTE_PORT);
audioStream.join(audioGroup);
We send and receive RTP data using RTPpacket.
import javax.media.rtp.*;
import javax.media.rtp.rtcp.*;
import javax.media.rtp.event.*;
import javax.media.*;
import javax.media.protocol.*;
import java.net.InetAddress;
import javax.media.format.AudioFormat;
import com.sun.media.ui.*;
import java.util.Vector;
public class RTPSourceStream < RTPPlayerWindow > implements ReceiveStreamListener,
ControllerListener {
#SuppressWarnings("rawtypes")
Vector playerlist = new Vector();
#SuppressWarnings("deprecation")
SessionManager mgr = null;
boolean terminatedbyClose = false;
#SuppressWarnings("deprecation")
public SessionManager createManager(String address,
String sport,
String sttl,
boolean listener,
boolean sendlistener) {
return createManager(address,
new Integer(sport).intValue(),
new Integer(sttl).intValue(),
listener,
sendlistener);
}
#SuppressWarnings("deprecation")
public SessionManager createManager(String address,
int port,
int ttl,
boolean listener,
boolean sendlistener) {
mgr = (SessionManager) new com.sun.media.rtp.RTPSessionMgr();
if (mgr == null) return null;
mgr.addFormat(new AudioFormat(AudioFormat.DVI_RTP, 44100, 4, 1), 18);
if (listener) mgr.addReceiveStreamListener(this);
// if (sendlistener) new RTPSendStreamWindow(mgr);
// ask RTPSM to generate the local participants CNAME
String cname = mgr.generateCNAME();
String username = null;
try {
username = System.getProperty("user.name");
} catch (SecurityException e) {
username = "jmf-user";
}
// create our local Session Address
SessionAddress localaddr = new SessionAddress();
try {
InetAddress destaddr = InetAddress.getByName(address);
SessionAddress sessaddr = new SessionAddress(destaddr,
port,
destaddr,
port + 1);
SourceDescription[] userdesclist = new SourceDescription[] {
new SourceDescription(SourceDescription
.SOURCE_DESC_EMAIL,
"jmf-user#sun.com",
1,
false),
new SourceDescription(SourceDescription
.SOURCE_DESC_CNAME,
cname,
1,
false),
new
SourceDescription(SourceDescription.SOURCE_DESC_TOOL, "JMF RTP Player v2.0",
1,
false)
};
mgr.initSession(localaddr,
userdesclist,
0.05,
0.25);
mgr.startSession(sessaddr, ttl, null);
} catch (Exception e) {
System.err.println(e.getMessage());
return null;
}
return mgr;
}
public void update(ReceiveStreamEvent event) {
Player newplayer = null;
RTPPacket playerWindow = null;
// find the sourceRTPSM for this event
SessionManager source = (SessionManager) event.getSource();
// create a new player if a new recvstream is detected
if (event instanceof NewReceiveStreamEvent) {
String cname = "Java Media Player";
ReceiveStream stream = null;
try {
// get a handle over the ReceiveStream
stream = ((NewReceiveStreamEvent) event)
.getReceiveStream();
Participant part = stream.getParticipant();
if (part != null) cname = part.getCNAME();
// get a handle over the ReceiveStream datasource
DataSource dsource = stream.getDataSource();
// create a player by passing datasource to the
// Media Manager
newplayer = Manager.createPlayer(dsource);
System.out.println("created player " + newplayer);
} catch (Exception e) {
System.err.println("NewReceiveStreamEvent exception " +
e.getMessage());
return;
}
if (newplayer == null) return;
playerlist.addElement(newplayer);
newplayer.addControllerListener(this);
// send this player to player GUI
playerWindow = new RTPPacket(newplayer, cname);
}
}
public void controllerUpdate(ControllerEvent evt) {
// get a handle over controller, remove it from the player
// list.
// if player list is empty, close the sesssion manager.
if ((evt instanceof ControllerClosedEvent) ||
(evt instanceof ControllerErrorEvent) ||
(evt instanceof DeallocateEvent)) {
Player p = (Player) evt.getSourceController();
if (!terminatedbyClose) {
if (playerlist.contains(p))
playerlist.removeElement(p);
if ((playerlist.size() == 0) && (mgr != null))
mgr.closeSession("All players are closed");
}
}
}
public void closeManager() {
terminatedbyClose = true;
// first close all the players
for (int i = 0; i < playerlist.size(); i++) {
((Player) playerlist.elementAt(i)).close();
}
if (mgr != null) {
mgr.closeSession("RTP Session Terminated");
mgr = null;
}
}
class RTPPacket extends RTPSourceStream {
public RTPPacket(Player newplayer, String cname) {
// TODO Auto-generated constructor stub
}
}
}