Android download and play video file simultaneously by using videoView - android

I had start download file and start playing that file. If the file size is small (less than 15 MB). Following source download and play video perfectly.
If the file size is big (50 MB or more than that),Video playing getting started. For my case,File size is 50 MB,downloaded size is 10 MB and video view is played when the download is started, once the 10 MB size video is played,It waiting for the upcoming bytes. When it gets downloaded, audio only get played. Video frame is not get updated.
public class VideoDemo extends Activity {
private MediaController ctlr;
VideoView videoView = null;
Context context = null;
long totalRead = 0;
int bytesToRead = 50 * 1024;
private int mPlayerPosition;
private File mBufferFile;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
setContentView(R.layout.main);
videoView = (VideoView) findViewById(R.id.videoview);
ctlr = new MediaController(this);
ctlr.setMediaPlayer(videoView);
videoView.setMediaController(ctlr);
videoView.requestFocus();
new GetYoutubeFile().start();
}
private class GetYoutubeFile extends Thread {
private String mUrl;
private String mFile;
public GetYoutubeFile() {
}
#Override
public void run() {
super.run();
try {
File bufferingDir = new File(
Environment.getExternalStorageDirectory()
+ "/YoutubeBuff");
InputStream stream = getAssets().open("famous.3gp");
if (stream == null)
throw new RuntimeException("stream is null");
File temp = File.createTempFile("test", "mp4");
System.out.println("hi");
temp.deleteOnExit();
String tempPath = temp.getAbsolutePath();
File bufferFile = File.createTempFile("test", "mp4");
BufferedOutputStream bufferOS = new BufferedOutputStream(
new FileOutputStream(bufferFile));
InputStream is = getAssets().open("famous.3gp");
BufferedInputStream bis = new BufferedInputStream(is, 2048);
byte[] buffer = new byte[16384];
int numRead;
boolean started = false;
while ((numRead = bis.read(buffer)) != -1) {
bufferOS.write(buffer, 0, numRead);
bufferOS.flush();
totalRead += numRead;
if (totalRead > 120000 && !started) {
Log.e("Player", "BufferHIT:StartPlay");
setSourceAndStartPlay(bufferFile);
started = true;
}
}
mBufferFile = bufferFile;
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void setSourceAndStartPlay(File bufferFile) {
try {
mPlayerPosition=videoView.getCurrentPosition();
videoView.setVideoPath(bufferFile.getAbsolutePath());
videoView.start();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public void onCompletion(MediaPlayer mp) {
mPlayerPosition = mp.getCurrentPosition();
try {
mp.reset();
videoView.setVideoPath(new File("mnt/sdcard/YoutubeBuff/"
+ mBufferFile).getAbsolutePath());
mp.seekTo(mPlayerPosition);
videoView.start();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}

#Karthick
Your code seems ok!!
just register the OnCompletionListener,OnErrorListener to your VideoView.
videoView.setOnCompletionListener(this);
videoView.setOnErrorListener(this);
so once media player played the buffered stream it will goes to onCompletion or throws an error(Video Can't be played), Which can be handle by again changing seek position and playing through same buffered file.
(Please make sure that video bitrate and transfer rate needs to be equivalent) so buffering and streaming will run simultaneously.
implementations of methods could be like below,
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
// TODO Auto-generated method stub
mPlayerErrorPos = mp.getCurrentPosition();
mPlayerPosition=mPlayerCompletionPos > mPlayerErrorPos ? mPlayerCompletionPos : mPlayerErrorPos;
try {
mp.reset();
videoView.setVideoPath(video.getAbsolutePath());
videoView.seekTo(mPlayerPosition);
videoView.start();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return true;
}
#Override
public void onCompletion(MediaPlayer mp) {
mPlayerCompletionPos = mp.getCurrentPosition();
}

Related

com.googlecode.mp4parser fails for mp3 audio file?

I am using the com.googlecode.mp4parser library to merge audio files. I have an external audio mp3 file which I store in raw resources. This file fails to merge due to following exception, Below is my code :
Reading a file from raw folder :
InputStream is = context.getResources().openRawResource(R.raw.my_mp3_file);
OutputStream output = null;
try {
File file = new File(context.getFilesDir(), "silence.mp3");
if(!file.exists()) {
file.createNewFile();
}
output = new FileOutputStream(file);
byte[] buffer = new byte[4 * 1024]; // or other buffer size
int read;
while ((read = is.read(buffer)) != -1) {
output.write(buffer, 0, read);
}
output.flush();
output.close();
fileReference= file;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace(); // handle exception, define IOException and others
} finally {
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
Code that reads movie ( Which is failing ) :
if(fileReference.exists()) {
Movie m = new MovieCreator().build(fileReference.getAbsolutePath());
}
While getting this Movie m my code fails throwing the exception :
java.lang.NullPointerException: Attempt to invoke virtual method 'java.util.List com.coremedia.iso.boxes.MovieBox.getBoxes(java.lang.Class)' on a null object reference
It works for some mp3 files fails for raw resource files ? What's wrong here ?
Here are my conclusion and solution after a lot of research
MP4Parser for merging audio and video only use .m4a extension
String root = Environment.getExternalStorageDirectory().toString();
String audio = root + "/" + "tests.m4a";
String video = root + "/" + "output.mp4";
String output = root + "/" + "aud_vid.mp4";
mux(video, audio, output);
and here is the method
public boolean mux(String videoFile, String audioFile, String outputFile) {
Movie video;
try {
video = new MovieCreator().build(videoFile);
} catch (RuntimeException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
Movie audio;
try {
audio = new MovieCreator().build(audioFile);
} catch (IOException e) {
e.printStackTrace();
return false;
} catch (NullPointerException e) {
e.printStackTrace();
return false;
}
Track audioTrack = audio.getTracks().get(0);
video.addTrack(audioTrack);
Container out = new DefaultMp4Builder().build(video);
FileOutputStream fos;
try {
fos = new FileOutputStream(outputFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
}
BufferedWritableFileByteChannel byteBufferByteChannel = new BufferedWritableFileByteChannel(fos);
try {
out.writeContainer(byteBufferByteChannel);
byteBufferByteChannel.close();
fos.close();
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
private static class BufferedWritableFileByteChannel implements WritableByteChannel {
private static final int BUFFER_CAPACITY = 1000000;
private boolean isOpen = true;
private final OutputStream outputStream;
private final ByteBuffer byteBuffer;
private final byte[] rawBuffer = new byte[BUFFER_CAPACITY];
private BufferedWritableFileByteChannel(OutputStream outputStream) {
this.outputStream = outputStream;
this.byteBuffer = ByteBuffer.wrap(rawBuffer);
}
#Override
public int write(ByteBuffer inputBuffer) throws IOException {
int inputBytes = inputBuffer.remaining();
if (inputBytes > byteBuffer.remaining()) {
dumpToFile();
byteBuffer.clear();
if (inputBytes > byteBuffer.remaining()) {
throw new BufferOverflowException();
}
}
byteBuffer.put(inputBuffer);
return inputBytes;
}
#Override
public boolean isOpen() {
return isOpen;
}
#Override
public void close() throws IOException {
dumpToFile();
isOpen = false;
}
private void dumpToFile() {
try {
outputStream.write(rawBuffer, 0, byteBuffer.position());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
Seem like this issue happens because Google devs have forgotten to handle that NullPointerException case. After several hours diving into the code base, I finally found the solution and It works very fine, you can try this:
Movie movie;
try{
movie = MovieCreator.build(videoPath);
}catch(NullPointerException e){
Log.d("AsyncTask", "Catch null getMovieBoxes");
FileDataSourceImpl fileDataSource = new FileDataSourceImpl(new File(videoPath));
IsoFile isoFile = new IsoFile(fileDataSource);
List<TrackBox> trackBoxes = isoFile.getBoxes(TrackBox.class);
for (TrackBox trackBox : trackBoxes) {
SchemeTypeBox schm = Path.getPath(trackBox, "mdia[0]/minf[0]/stbl[0]/stsd[0]/enc.[0]/sinf[0]/schm[0]");
if (schm != null && (schm.getSchemeType().equals("cenc") || schm.getSchemeType().equals("cbc1"))) {
movie.addTrack(new CencMp4TrackImplImpl(fileDataSource.toString() + "[" + trackBox.getTrackHeaderBox().getTrackId() + "]", trackBox));
} else {
movie.addTrack(new Mp4TrackImpl(fileDataSource.toString() + "[" + trackBox.getTrackHeaderBox().getTrackId() + "]" , trackBox));
}
}
}

How to stream audios?

I have listview, mediaplayer and mediacontroller. I'd like to make mediaplayer play stream musics from url when I touch list in listview.
My problem is mediaplayer plays only one song. When I touch second list while playing first list audio, it does not play second list audio.But I do not know the cause, mediaplayer plays second audio while playing first audio, when mediacontroller hides.
I want mediaplayer to stop and play another audio while mediacontroller shows and plays audio.
I tried two patterns, but these were same result.Please teach me what is wrong and sorry bad english skill.First pattern.
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
switch (position) {
case 0:
try {
mp.reset();
mp.setDataSource(url);
mp.prepare();
} catch (IllegalArgumentException e) {
//
e.printStackTrace();
} catch (IllegalStateException e) {
//
e.printStackTrace();
} catch (IOException e) {
//
e.printStackTrace();
}
break;
case 1:
try {
mp.reset();
mp.setDataSource(url2);
mp.prepare();
} catch (IllegalArgumentException e) {
//
e.printStackTrace();
} catch (IllegalStateException e) {
//
e.printStackTrace();
} catch (IOException e) {
//
e.printStackTrace();
}
break;
case 2:
try {
mp.reset();
mp.setDataSource(url3);
mp.prepare();
} catch (IllegalArgumentException e) {
//
e.printStackTrace();
} catch (IllegalStateException e) {
//
e.printStackTrace();
} catch (IOException e) {
//
e.printStackTrace();
}
break;
}
Second pattern.
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
try {
mp.reset();
mp.setDataSource(url[position]);
mp.prepare();
} catch (IllegalArgumentException e) {
//
e.printStackTrace();
} catch (IllegalStateException e) {
//
e.printStackTrace();
} catch (IOException e) {
//
e.printStackTrace();
}
Preparedlistner.
mp.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mediaPlayer) {
handler.post(new Runnable() {
#Override
public void run() {
controller.setEnabled(true);
controller.show(mp.getDuration());
mp.start();
try {
Method m = android.widget.MediaController.class.getDeclaredMethod("updatePausePlay");
m.setAccessible(true);
m.invoke(controller);
} catch (Exception e) {
}
}
});
}
});
}
onCreate
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_songs);
ItemBean sample1 = new ItemBean();
fishing.setName("sample1");
ItemBean sample2 = new ItemBean();
hana.setName("sample2");
ItemBean sample3 = new ItemBean();
takane.setName("sample3");
List<ItemBean> list = new ArrayList<ItemBean>();
list.add(sample1);
list.add(sample2);
list.add(sample3);
sampleurl = getResources().getStringArray(R.array.sampleurl);
mp = new MediaPlayer();
controller = new MediaController(this);
controller.setAnchorView(findViewById(R.id.mediaController));
controller.setMediaPlayer(this);
mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
samplelist = (ListView) findViewById(R.id.song_list);
SonglistAdapter_test adapter = new SonglistAdapter_test(getApplicationContext(),list);
samplelist.setAdapter(adapter);
samplelist.setOnItemClickListener(this);
}
Do you call mcontroller = new MediaController(context); and doClean() somewhere? Most likely the controller needs to be reset before you put a new media source. Add this before your handler.post(new Runnable() {:
doClean();
mcontroller = new MediaController(getActivity());//or this, depending whats your context

Play midi file in an asynctask on Android

Here is how my app works:
After the app received the music it will first save the file into the SD card and then play it.
I tried to play the music with an asynctask called by a class (not an activity but a handler). However, the music can be played for only 1-2 seconds. Here is the code for the call back of AsyncTask:
fos = new FileOutputStream(file);
fos.write(receivedMusicPayload);
fos.flush();
fos.close();
PlayMusicManager pmm = new PlayMusicManager(qrC);
pmm.execute();
and here is the playermanager:
public class PlayMusicManager extends AsyncTask<Void, Void, Void> {
private QRConnection qrC;
public PlayMusicManager(QRConnection qrC) {
this.qrC = qrC;
}
#Override
protected Void doInBackground(Void... params) {
MediaPlayer mediaPlayer = new MediaPlayer();
File dir = Environment.getExternalStorageDirectory();
File file = new File(dir, "music.mid");
if (file.exists()) // check if file exist
{
FileInputStream fis;
try {
fis = new FileInputStream(file);
FileDescriptor fd = fis.getFD();
mediaPlayer.setDataSource( fd);
mediaPlayer.prepare();
mediaPlayer.start();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
qrC.getQrActivity().showResult("No such file");
}
return null;
}
#Override
protected void onPostExecute(Void parms) {
qrC.getQrActivity().showResult("Music Done.");
}
Thanks for your help!
You need to wait until the file is played.
Because the MediaPlayer mediaPlayer is created as local variable it will be release at the end of
protected Void doInBackground {
You have two options.
1) Make the mediaPlayer variable of class that live long enough
2) OR, put a loop reading mediaPlayer.status. Something like this:
while (mediaPlayer.status==MediaPlayer.Status.PLAYING)
sleep(100);

Android how to record music from media player

I'm working on a project where I'm playing music over url using android build in media player. The things that I want to achieve this is to be able to save the streaming data in a file on sd card. I've tried to do this using android build in media recorder, but it's recording everything around the phone, not only the sound which is coming from media player.
So my question is which is the best way to achieve this?
Here is an example which I've tested already, but I can't play the mp3 file after that to see if everything went ok :
Log.e("URL AGAIN","url : "+url);
try {
if(!isrecording){
URL urlStream = new URL(url);
InputStream inputStream = urlStream.openStream();
Log.d("", "urlStream.openStream()");
String filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename += "/deliciousradio.mp3";
File outputSource= new File(filename);
fileOutputStream = new FileOutputStream(outputSource);
Log.d("", "FileOutputStream: " + outputSource);
int bytesRead = -1;
isrecording = true;
byte[] buffer = new byte[30 * 1024];
while ((bytesRead = inputStream.read(buffer)) > 0) {
byte[] buffer2 = new byte[bytesRead];
fileOutputStream.write(buffer2);
Log.d("","bytes size :"+buffer2.length);
Log.d("","bytesRead : "+bytesRead);
}
} else if(isrecording){
fileOutputStream.close();
}
} catch(Exception e){}
The problem here is that I'm receiving 30 as length of buffer2, and i cannot undersand why.
Thanks for any kind of help!
Button recstart, recstop,replay;
//onCreate
File f;
File externalStorage;
String path = "";
MediaRecorder recorder;
Timer time;
String filename1 = "";
boolean s = false;
externalStorage = Environment.getExternalStorageDirectory();
String sdCardPath = externalStorage.getAbsolutePath();
recorder = new MediaRecorder();
path = sdCardPath + "/";
recstart.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
String state = android.os.Environment.getExternalStorageState();
if (!state.equals(android.os.Environment.MEDIA_MOUNTED)) {
try {
throw new IOException("SD Card is not mounted. It is "
+ state + ".");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Date dt = new Date();
int hours = dt.getHours();
int minutes = dt.getMinutes();
int seconds = dt.getSeconds();
String curTime = hours + "_" + minutes + "_" + seconds;
filename1 = "phonecall_at" + curTime + ".mp4";
if (recorder == null) {
recorder = new MediaRecorder();
}
f = new File(path, filename1);
try {
s = f.createNewFile();
} catch (IOException e1) {
// TODO Auto-generated catch block
// Toast.makeText(AudioRecording.this,"hiiiii...."+e1.getMessage(),2000).show();
e1.printStackTrace();
}
// MediaRecorder.AudioSource.VOICE_CALL +
// MediaRecorder.AudioSource.MIC
// recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_UPLINK
// + MediaRecorder.AudioSource.VOICE_DOWNLINK );
if (s == true) {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(f.getAbsolutePath());
// record.setText("stop");
// record.setBackgroundColor( Color.BLUE);
// Toast.makeText(AudioRecording.this, String.valueOf(s),
// 3000).show();
try {
recorder.prepare();
Toast.makeText(AudioRecording.this, "Recording starts",
5000).show();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Log.e(".................................",
"" + e.toString());
}
recorder.start();
} else {
Toast.makeText(getApplicationContext(),
"No space Left on device", 2000).show();
}
/*
* try { recorder.prepare(); Toast.makeText(Recordingvoice
* .this,"Recording starts",5000).show(); recorder.start();
*
*
* } catch (IllegalStateException e) { // TODO Auto-generated
* catch block e.printStackTrace(); } catch (IOException e) { //
* TODO Auto-generated catch block // e.printStackTrace(); }
*/
}
});
recstop.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
if (s == true) {
Toast.makeText(AudioRecording.this, "Recording stopped",
2000).show();
if (recorder == null) {
recorder = new MediaRecorder();
}
if (recorder != null) {
recorder.stop();
recorder.release();
}
recorder = null;
// time.cancel();
// uploadCode();
}
}
});
replay.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
if (f.exists()) {
Toast showMsg = Toast.makeText(getApplicationContext(), "Playing", Toast.LENGTH_SHORT);
showMsg.show();
String path = f.getAbsolutePath();
Uri myUri = Uri.parse(path);
MediaPlayer mp = new MediaPlayer();
mp.setLooping(false);
mp = MediaPlayer.create(AudioRecording.this, myUri);
mp.start();
}
}
});

Receiving Info of a ShoutCast stream on Android

I am currently making an app to go with my online radio site, I am coding it with Android 2.2 (API 8) and I have got the Shoutcast Stream working with two buttons.
Here is the code on my main class:
public class GrooveOfMusicRadioActivity extends Activity {
/** Called when the activity is first created. */
MediaPlayer mediaPlayer;
Button start, stop;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start = (Button) findViewById(R.id.button1);
stop = (Button) findViewById(R.id.button2);
start.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
mediaPlayer.start();
}
});
stop.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
mediaPlayer.pause();
}
});
String url = "http://67.212.165.106:8161"; // your URL here
mediaPlayer = new MediaPlayer();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mediaPlayer.setAudioStreamType(AudioManager.STREAM_NOTIFICATION);
try {
mediaPlayer.setDataSource(url);
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
mediaPlayer.prepare();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
So I was wondering so how do I receive the stream title,song,artist etc.. and make it appear
The main XML is in a relative layout
Thanks, I am a total noob when it comes to programming.
Thanks mark :)
I just had to get meta data myself, I basically did the same stuff from: Pulling Track Info From an Audio Stream Using PHP. A lot of data is in the headers so you can use those, but all I wanted was the Stream Title, so thats what I got.
Activity mainAct = this;
public void getNowPlaying(View v) {
Log.w("getNowPlaying", "fired");
new Thread(new Runnable() {
public void run() {
String title = null, djName = null;
try {
URL updateURL = new URL(YOUR_STREAM_URL_HERE);
URLConnection conn = updateURL.openConnection();
conn.setRequestProperty("Icy-MetaData", "1");
int interval = Integer.valueOf(conn.getHeaderField("icy-metaint")); // You can get more headers if you wish. There is other useful data.
InputStream is = conn.getInputStream();
int skipped = 0;
while (skipped < interval) {
skipped += is.skip(interval - skipped);
}
int metadataLength = is.read() * 16;
int bytesRead = 0;
int offset = 0;
byte[] bytes = new byte[metadataLength];
while (bytesRead < metadataLength && bytesRead != -1) {
bytesRead = is.read(bytes, offset, metadataLength);
offset = bytesRead;
}
String metaData = new String(bytes).trim();
title = metaData.substring(metaData.indexOf("StreamTitle='") + 13, metaData.indexOf(" / ", metaData.indexOf("StreamTitle='"))).trim();
djName = metaData.substring(metaData.indexOf(" / ", metaData.indexOf("StreamTitle='")) + 3, metaData.indexOf("';", metaData.indexOf("StreamTitle='"))).trim();
Log.w("metadata", metaData);
is.close();
} catch (MalformedURLException e) { e.printStackTrace();
} catch (IOException e) { e.printStackTrace(); }
final String titleFin = title;
final String djNameFin = djName;
mainAct.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(mainAct, titleFin + "\n" + djNameFin, Toast.LENGTH_SHORT).show();
}
});
}
}).start();
}
What you're using to play the stream has no knowledge of (and doesn't care about) the metadata. You're going to have to deal with that separately.
See these posts for something you can easily adapt to Android:
Pulling Track Info From an Audio Stream Using PHP
http://www.smackfu.com/stuff/programming/shoutcast.html

Categories

Resources