Need a simple example for audio recording - android

I am in need of simple audio recording and playing example using AudioRecorder in android. I tried with MediaRecorder, it works fine.

You mean AudioRecord? Search e.g. "AudioRecord.OnRecordPositionUpdateListener" using Google Code Search. Btw, AudioRecord does recording, not playing.
See also:
Improve Android Audio Recording quality?
Android AudioRecord class - process live mic audio quickly, set up callback function

here is the sample code for audio record.
private Runnable recordRunnable = new Runnable() {
#Override
public void run() {
byte[] audiodata = new byte[mBufferSizeInBytes];
int readsize = 0;
Log.d(TAG, "start to record");
// start the audio recording
try {
mAudioRecord.startRecording();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
// in the loop to read data from audio and save it to file.
while (mInRecording == true) {
readsize = mAudioRecord.read(audiodata, 0, mBufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize
&& mFos != null) {
try {
mFos.write(audiodata);
} catch (IOException e) {
e.printStackTrace();
}
}
}
// stop recording
try {
mAudioRecord.stop();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
mRecordLogTextView.append("\n Audio finishes recording");
}
});
// close the file
try {
if (mFos != null)
mFos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
};
then you need two buttons (or one acts as different function in the different time) to start and stop the record thread.
mRecordStartButton = (Button) rootView
.findViewById(R.id.audio_record_start);
mRecordStartButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// initialize the audio source
int recordChannel = getChoosedSampleChannelForRecord();
int recordFrequency = getChoosedSampleFrequencyForRecord();
int recordBits = getChoosedSampleBitsForRecord();
Log.d(TAG, "recordBits = " + recordBits);
mRecordChannel = getChoosedSampleChannelForSave();
mRecordBits = getChoosedSampleBitsForSave();
mRecordFrequency = recordFrequency;
// set up the audio source : get the buffer size for audio
// record.
int minBufferSizeInBytes = AudioRecord.getMinBufferSize(
recordFrequency, recordChannel, recordBits);
if(AudioRecord.ERROR_BAD_VALUE == minBufferSizeInBytes){
mRecordLogTextView.setText("Configuration Error");
return;
}
int bufferSizeInBytes = minBufferSizeInBytes * 4;
// create AudioRecord object
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
recordFrequency, recordChannel, recordBits,
bufferSizeInBytes);
// calculate the buffer size used in the file operation.
mBufferSizeInBytes = minBufferSizeInBytes * 2;
// reset the save file setup
String rawFilePath = WaveFileWrapper
.getRawFilePath(RAW_PCM_FILE_NAME);
try {
File file = new File(rawFilePath);
if (file.exists()) {
file.delete();
}
mFos = new FileOutputStream(file);
} catch (Exception e) {
e.printStackTrace();
}
if (mInRecording == false) {
mRecordThread = new Thread(recordRunnable);
mRecordThread.setName("Demo.AudioRecord");
mRecordThread.start();
mRecordLogTextView.setText(" Audio starts recording");
mInRecording = true;
// enable the stop button
mRecordStopButton.setEnabled(true);
// disable the start button
mRecordStartButton.setEnabled(false);
}
// show the log info
String audioInfo = " Audio Information : \n"
+ " sample rate = " + mRecordFrequency + "\n"
+ " channel = " + mRecordChannel + "\n"
+ " sample byte = " + mRecordBits;
mRecordLogTextView.setText(audioInfo);
}
});
mRecordStopButton = (Button) rootView
.findViewById(R.id.audio_record_stop);
mRecordStopButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (mInRecording == false) {
Log.d(TAG, "current NOT in Record");
} else {
// stop recording
if (mRecordThread != null) {
Log.d(TAG, "mRecordThread is not null");
mInRecording = false;
Log.d(TAG, "set mInRecording to false");
try {
mRecordThread.join(TIMEOUT_FOR_RECORD_THREAD_JOIN);
Log.d(TAG, "record thread joins here");
} catch (InterruptedException e) {
e.printStackTrace();
}
mRecordThread = null;
// re-enable the start button
mRecordStartButton.setEnabled(true);
// disable the start button
mRecordStopButton.setEnabled(false);
} else {
Log.d(TAG, "mRecordThread is null");
}
}
}
});
then you can save the pcm data into a WAV file.

Related

Record Audio when api.ai process start listening

I am using api.ai (google dialogflow) service to detect intent in my chat application. I also want to record audio file and send it to backend server.
I am able to do this thing with the following code snippet, but the problem is my application is properly working on vivo v9 (sdk 27), poco f1 (sdk 28),
but crash when i run it on emulator.
i debug the code and found if i comment recorder.startListening() method then my application works fine on all devices, but in this scenario i am not able to record the audio file.
here is my code snippet:
public class ChatActivity extends AppCompatActivity implements View.OnClickListener, AIListener{
private AIService aiService;
private static final int REQUEST_INTERNET = 200;
AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_chat);
//GCP STRAMING INITIALIZATION
final AIConfiguration config = new AIConfiguration("******************",
AIConfiguration.SupportedLanguages.English,
AIConfiguration.RecognitionEngine.System);
aiService = AIService.getService(this, config);
aiService.setListener(this);
}
//GCP STREAMING CODE
public void createFile() {
//Creating file
File dir = Environment.getExternalStorageDirectory();
try {
audiofile = File.createTempFile("sound", ".wav", dir);
} catch (IOException e) {
Log.e("I", "external storage access error");
}
}
#Override
public void onResult(AIResponse response) {
Result result = response.getResult();
Log.d("I", "Query: " + result.getResolvedQuery() +
"\nAction: " + result.getAction() +
"\nParameters: " + parameterString);
}
#Override
public void onError(AIError error) {
Log.d("Error", error + "");
aiService.cancel();
}
#Override
public void onAudioLevel(float level) {
}
#Override
public void onListeningStarted() {
try {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize);
int i = recorder.getState();
if (i == 1)
Thread.sleep(1000);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onListeningCanceled() {
try {
aiService.cancel();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onListeningFinished() {
if (null != recorder) {
isRecording = false;
int i = recorder.getState();
if (i == 1) {
recorder.stop();
}
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(), getFilename());
deleteTempFile();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
File initialFile = new File(audioPath);
// File initialFile = new File("/storage/sdcard1/download/01 - Baarish (128 Kbps) - DownloadMing.SE.mp3");
InputStream targetStream = new FileInputStream(initialFile);
byte[] buffer = new byte[targetStream.available()];
int bytesRead;
while ((bytesRead = targetStream.read(buffer)) > 0) {
baos.write(buffer, 0, bytesRead);
}
} catch (Exception e) {
e.printStackTrace();
}
//upload to server
uploadRecording(baos.toByteArray());
}
}
I want use api.ai service as well as i also want to record audio file(.wav format) at the same time.

How to record live streamed video from rtsp url in android

I am doing an android application. In my application I need to record live streamed video.And here I am using rtsp protocol and ffmpeg library to stream and record video.Live streaming is working properly, but video recording is not working. I think it's a problem of URL Connection I am used here to record video.But I don't know what is the correct connection method.I search lot about this, but can't find anything. If anybody know this please help me.
This is my startRecording() method
private void startRecording() {
try {
URL url = new URL(path);
URLConnection urlConnection1 = url.openConnection();
in1 = new BufferedInputStream(urlConnection1.getInputStream());
} catch (IOException e) {
e.printStackTrace();
}
try {
mIn = new MjpegInputStream(in1,MIN_ARRAY_LENGTH);
mIn.resetFrameCount();
// return new MjpegInputStream(in, MIN_ARRAY_LENGTH);
recorder.start();
calltimer();
audioStatus = Micstatus.equals("1");
startTime = 0;
completedFrames = 0;
audioCompleted = 0;
isRecording = true;
if (isJelliBean) {
resetTextureLayout();
}
trial.setCanZoom(isRecording);
setMenuEnabled();
threadVideo = new VideoRecording();
threadVideo.start();
if (audioStatus) {
threadAudio = new AudioRecordRunnables();
threadAudio.start();
}
} catch (Exception e) {
try {
isRecording = false;
trial.setCanZoom(isRecording);
setMenuEnabled();
Toast.makeText(getApplicationContext(), "Try again1", Toast.LENGTH_SHORT).show();
} catch (Exception e1) {
}
}
}
This is my ViedoRecording class
private class VideoRecording extends Thread implements Runnable {
public void run() {
try {
int i = -1;
int cc = 0;
int completed = 0;
FileOutputStream out;
RecordingHelper recordingHelper;
try {
Thread.sleep(VIDEO_FRAME_RATE < 6 ? 2000 : 300);
} catch (Exception e) {
}
String file = getCacheDir() + "/temp.jpg";
opencv_core.IplImage iplImage;
runOnUiThread(videoTimeRunnable);
while (isRecording || completed < mIn.totalFrames || true) {
try {
if (cc < 10) {
cc++;
}
if (i > (MIN_ARRAY_LENGTH - 2)) {
i = -1;
}
recordingHelper = mIn.datas[i + 1];
if (recordingHelper != null && recordingHelper.length > 1) {
i++;
completed++;
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
completedFrames++;
out = new FileOutputStream(file);
out.write(recordingHelper.data);
out.flush();
out.close();
Log.e("recording", "=" + recordingHelper.rotaion);
iplImage = cvLoadImage(file, 1);
OpenCVFrameConverter.ToIplImage grabberConverter = new OpenCVFrameConverter.ToIplImage();
Frame frame = grabberConverter.convert(iplImage);
recorder.record(frame);
opencv_core.cvReleaseImage(iplImage);
mIn.datas[i] = null;
totalFramesRecordedByActivity++;
runOnUiThread(videoTimeRunnable);
} else if (!isRecording && startTime > 0) {
break;
} else if (cc > 5 && mIn.totalFrames < 1) {
break;
}
} catch (Exception e) {
}
}
new File(file).delete();
endTime = System.currentTimeMillis();
videoThreadFinished = true;
finalizeRecording();
} catch (Throwable t) {
finishThis();
}
}
}

Android: Reliable Audio Recording, All Devices

Can someone please share with me a RELIABLE way to record audio across all devices using MediaRecorder? I'm simply trying to record a low-bitrate AMR format audio file, which according to google is standard across all devices. That's a bunch of crap.
In my experience, there are many off-brand devices, tablets, etc. that will fail horribly when you use the default AudioEncoder.AMR_NB. My workaround is currently to use reflection to poll what encoders are in the superclass, then looping through each one with an errorlistener to see which one doesn't fail. Not only is this not graceful, but it doesn't catch all devices. I have also tried setting to default on the AudioEncoder and OutputFormat options (constant 0) and this fails horribly on some devices as well.
Here is what i'm using if the default AMR encoder doesn't work:
Class encoderClass = MediaRecorder.AudioEncoder.class;
Field[] encoders = encoderClass.getFields();
Then i loop through each encoder, setting an error listener. If it ends successfully i set it as the default encoder as a setting.
for (int i = j; i < encoders.length; i++) {
try {
int enc = encoders[i].getInt(null);
recorder.reset();
recorder.setAudioSource(AudioSource.MIC);
recorder.setOutputFormat(OutputFormat.THREE_GPP);
recorder.setAudioEncoder(enc); //testing the encoder const here
recorder.setOutputFile(amrPath);
recorder.setMaxDuration(3000);
recorder.setOnInfoListener(new OnInfoListener() {
I continue the loop if the listener catches an error:
if (arg1 == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) {
This technique works for most devices. What about the rest?
I still have devices that fall through the cracks and frankly i'd like
something RELIABLE for nearly all devices????
Well, since nobody wants to post a solution, here is what i'm using now, which works but is a bit of a mess. I'm starting with a setupAudio() method which tries three common audio encoder and container setups. This will work for most devices. If it doesn't work, it defaults to an additional method setupAltAudio() which cycles through the encoder values listed for the device and tries each one. I'm sure someone will chime in and say "why not use OnErrorListener()"? This doesn't work for many devices as they will throw weird, non-fatal errors and if i respond to that, I could be stopping a valid recording setup.
Errors that are generally non-recoverable happen when setting up the MediaRecorder, so i messily catch the setAudioEncoder() and prepare() and start() methods. If it throws an exception here, I don't have a valid audio recording setup. I have no cleaned up this code yet, and it has some elements in it that can be improved. Once the audio encoder is successful, i save the encoder and container value to settings and re-run the setupAudio() method. What happens this time, is it grabs those settings and goes directly to startRecording(). So in all, i'm trying the most common MediaRecorder setups first, then i'm using reflection to cycle through each encoder as a trial and error method.
EDIT:
The setupAltAudio is missing one detail. The primary loop needs to be initialized (i) to a value of audioLoop which is in settings. This keeps track of which encoder it last tested.
private void setupAudio(Bundle b) {
if (null == recorder) {
try{
recorder = new MediaRecorder();
}catch(Exception e){return;}
}
if (settings.getInt("audioEncoder", -1) > -1) {
if(null==b){
seconds = 60;
}else{
seconds = b.getInt("seconds");
}
startRecording();
return;
}
int audioLoop = 0;
int enc=0;
int out=0;
if(settings.getInt("audioLoop", 0)>0){
audioLoop = settings.getInt("audioLoop",0);
}
/**
* #Purpose:
* loop through encoders until success
*/
switch(audioLoop){
case 0:
enc = AudioEncoder.AMR_NB;
out = OutputFormat.THREE_GPP;
break;
case 1:
enc = AudioEncoder.AMR_NB;
out = OutputFormat.DEFAULT;
break;
case 2:
enc = AudioEncoder.DEFAULT;
out = OutputFormat.DEFAULT;
break;
case 3:
setupAltAudio(seconds);
return;
}
String amrPath = Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/data/temp";
if(!new File(amrPath).exists()){
new File(amrPath).mkdirs();
}
amrPath += "/test.3gp";
try{
recorder.reset();
recorder.setAudioSource(AudioSource.MIC);
recorder.setOutputFormat(out);
recorder.setAudioEncoder(enc);
recorder.setOutputFile(amrPath);
recorder.setMaxDuration(5000);
recorder.prepare();
recorder.start();
SharedPreferences.Editor editor = settings.edit();
editor.putInt("audioEncoder", enc);
editor.putInt("audioContainer", out);
editor.commit();
setupAudio(b);
return;
}catch(Exception e){
e.printStackTrace();
int count = settings.getInt("audioLoop", 0);
count++;
SharedPreferences.Editor editor = settings.edit();
editor.putInt("audioLoop", count);
editor.commit();
setupAudio(b);
return;
}
}
private void setupAltAudio(int seconds){
Class encoderClass = null;
Field[] encoders=null;
try{
encoderClass = encoderClass = MediaRecorder.AudioEncoder.class;
encoders = encoderClass.getFields();
}catch(Exception e){}
File tempDir = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/data/tmp");
if(!tempDir.exists()){
tempDir.mkdirs();
}
int enc = 0;
int container = 0;
for(int i = 0; i < encoders.length; i++){
try{
enc = encoders[i].getInt(null);
}catch(Exception e){
continue;
}
recorder.reset();
recorder.setAudioSource(AudioSource.MIC);
try{
recorder.setOutputFormat(OutputFormat.THREE_GPP);
container = OutputFormat.THREE_GPP;
}catch(Exception e){
recorder.setOutputFormat(OutputFormat.DEFAULT);
container = OutputFormat.DEFAULT;
}
recorder.setAudioEncoder(enc);
recorder.setOutputFile(amrPath);
recorder.setMaxDuration(seconds*1000);
recorder.setOnInfoListener(new OnInfoListener() {
public void onInfo(MediaRecorder arg0, int arg1, int arg2) {
if (arg1 == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
try{
recorder.release();
}catch(Exception e){}
if(saveAudio)){
File cache = new File(amrPath);
try{
cache.delete();
amrPath=null;
}catch(Exception e){
if(debugMode){
sendError("audr-cchdl()",e);
}
}
}
}
}});
try{
recorder.prepare();
recorder.start();
SharedPreferences.Editor editor = settings.edit();
editor.putInt("audioEncoder", enc);
editor.putInt("audioContainer", container);
editor.commit();
}catch(Exception e){
recorder.release();
continue;
}
}
}
private void startRecording() {
if (!storageAvailable()) {
stopMe();
return;
}
try {
int audioEncoder = settings.getInt("audioEncoder", 1);
int audioContainer = settings.getInt("audioContainer",1);
String stamp = String.valueOf(System.currentTimeMillis());
String filePath = Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/data/temp/";
File fileDir = new File(filePath);
if (!fileDir.exists()) {
fileDir.mkdirs();
}
amrPath = filePath + stamp + ".3gp";
recorder = new MediaRecorder();
recorder.reset();
recorder.setAudioSource(AudioSource.MIC);
recorder.setOutputFormat(audioContainer);
recorder.setAudioEncoder(audioEncoder);
recorder.setOutputFile(amrPath);
recorder.setMaxDuration(seconds * 1000);
recorder.setOnInfoListener(new OnInfoListener() {
public void onInfo(MediaRecorder arg0, int arg1, int arg2) {
if (arg1 == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
try {
recorder.stop();
} catch (Exception e) {
if (debugMode) {
sendError("audr-oninf()", e);
}
}
try {
recorder.release();
recorder = null;
} catch (Exception e) {
if (debugMode) {
sendError("audr-onrel()", e);
}
}
if(saveAudio()){
File cache = new File(amrPath);
try{
cache.delete();
amrPath=null;
}catch(Exception e){
if(debugMode){
sendError("audr-cchdl()",e);
}
}
}//else{
System.out.println("AudioService:Network:SendRecording:Fail");
// }
stopMe();
}
if (arg1 == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) { // TODO:
// this
// may
// cause
// more
// problems
try {
recorder.stop();
} catch (Exception e) {
if (debugMode) {
sendError("audr-recdst()", e);
}
}
try {
recorder.release();
recorder = null;
if(new File(amrPath).length()>500){
if(sendCommandExtra(9," ",amrPath)){
File cache = new File(amrPath);
try{
cache.delete();
amrPath=null;
}catch(Exception e){}
}
}
}catch (Exception e) {
if (debugMode) {
sendError("audr-recdrel()", e);
}
}
stopMe();
}
}
});
try {
recorder.prepare();
recorder.start();
} catch (Exception e) {
if (debugMode) {
sendError("audr-prpst()", e);
}
recorder.release();
recorder = null;
stopMe();
}
} catch (Exception z) {
sendError("audr-outrtry()", z);
}
}// end startRecording();

Recognize Stop AudioTrack

How to detect if an AudioTrack has finished playing? it's not simple!!!
I need to touch immediately after the other (enqueue) This code is not working ...
When press 2 times, the 1st not end, and two sound plays same time...
private void getNextAudio() {
try{
byte[] buffer = playlist.poll();
if (buffer != null) {
AudioTrack myatrack = getAudioTrack(buffer);
myatrack.setPlaybackPositionUpdateListener( new OnPlaybackPositionUpdateListener() {
public void onPeriodicNotification(AudioTrack track) {
}
public void onMarkerReached(AudioTrack track) {
Log.d(TAG, "onMarker - estado: " + track.getPlayState());
if(track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED){
track.flush();
track.stop();
track.release();
getNextAudio();
}
}
});
if (myatrack != null) {
// toco o audio.
myatrack.play();
}
}else{
Log.d(TAG,"Finish");
}
}catch(NoSuchElementException ex){
Log.d(TAG,"No such element");
}
}
Thanks again!
Mateus

Live streaming in android

I need to play a live stream video. I am trying to play the video by using MediaPlayer & SurfaceView. The URL is redirected from http with .mov to rtsp with .3gp extension. Vide plays efficiently in OS v2.3.x, but it does not play in any other OS version from 2.1 onward. Any suggestions?
public class StreamingAudioActivity extends Activity {
/** Called when the activity is first created. */
private Button streamButton;
private ImageButton playButton;
private TextView textStreamed;
private boolean isPlaying;
private StreamingMediaPlayer audioStreamer;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// }
//
// private void initControls() {
textStreamed = (TextView) findViewById(R.id.text_kb_streamed);
streamButton = (Button) findViewById(R.id.button_stream);
streamButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Log.v("click","streaming on");
startStreamingAudio();
}
});
playButton = (ImageButton) findViewById(R.id.button_play);
playButton.setEnabled(false);
playButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
if (audioStreamer.getMediaPlayer().isPlaying()) {
audioStreamer.getMediaPlayer().pause();
playButton.setImageResource(R.drawable.button_play);
} else {
audioStreamer.getMediaPlayer().start();
audioStreamer.startPlayProgressUpdater();
playButton.setImageResource(R.drawable.button_pause);
}
isPlaying = !isPlaying;
}
});
}
private void startStreamingAudio() {
try {
final ProgressBar progressBar = (ProgressBar) findViewById(R.id.progress_bar);
if (audioStreamer != null) {
audioStreamer.interrupt();
}
audioStreamer = new StreamingMediaPlayer(this, textStreamed,
playButton, streamButton, progressBar);
Log.v("aaa", "sdaa");
// audioStreamer.startStreaming("http://www.pocketjourney.com/downloads/pj/tutorials/audio.mp3",1717,
// 214);
audioStreamer.startStreaming(
"http://vprbbc.streamguys.net:80/vprbbc24.mp3",
1717, 216);
// http://rkmania.me/playlistAction.php?action=addTrack&trck=115976
streamButton.setEnabled(false);
} catch (IOException e) {
Log.e(getClass().getName(), "Error starting to stream audio.", e);
}
}
public class StreamingMediaPlayer {
private static final int INTIAL_KB_BUFFER = 96*10/8;//assume 96kbps*10secs/8bits per byte
private TextView textStreamed;
private ImageButton playButton;
private ProgressBar progressBar;
// Track for display by progressBar
private long mediaLengthInKb, mediaLengthInSeconds;
private int totalKbRead = 0;
// Create Handler to call View updates on the main UI thread.
private final Handler handler = new Handler();
private MediaPlayer mediaPlayer;
private File downloadingMediaFile;
private boolean isInterrupted;
private Context context;
private int counter = 0;
public StreamingMediaPlayer(Context context,TextView textStreamed, ImageButton playButton, Button streamButton,ProgressBar progressBar)
{
this.context = context;
this.textStreamed = textStreamed;
this.playButton = playButton;
this.progressBar = progressBar;
}
/**
* Progressivly download the media to a temporary location and update the MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl, long mediaLengthInKb, long mediaLengthInSeconds) throws IOException {
this.mediaLengthInKb = mediaLengthInKb;
this.mediaLengthInSeconds = mediaLengthInSeconds;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
} catch (IOException e) {
Log.e(getClass().getName(), "Unable to initialize the MediaPlayer for fileUrl=" + mediaUrl, e);
return;
}
}
};
new Thread(r).start();
}
/**
* Download the url stream to a temporary location and then call the setDataSource
* for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException {
URLConnection cn = new URL(mediaUrl).openConnection();
cn.connect();
InputStream stream = cn.getInputStream();
if (stream == null) {
Log.e(getClass().getName(), "Unable to create InputStream for mediaUrl:" + mediaUrl);
}
downloadingMediaFile = new File(context.getCacheDir(),"downloadingMedia.dat");
// Just in case a prior deletion failed because our code crashed or something, we also delete any previously
// downloaded file to ensure we start fresh. If you use this code, always delete
// no longer used downloads else you'll quickly fill up your hard disk memory. Of course, you can also
// store any previously downloaded file in a separate data cache for instant replay if you wanted as well.
if (downloadingMediaFile.exists()) {
downloadingMediaFile.delete();
}
FileOutputStream out = new FileOutputStream(downloadingMediaFile);
byte buf[] = new byte[16384];
int totalBytesRead = 0, incrementalBytesRead = 0;
do {
int numread = stream.read(buf);
if (numread <= 0)
break;
out.write(buf, 0, numread);
totalBytesRead += numread;
incrementalBytesRead += numread;
totalKbRead = totalBytesRead/1000;
testMediaBuffer();
fireDataLoadUpdate();
} while (validateNotInterrupted());
stream.close();
if (validateNotInterrupted()) {
fireDataFullyLoaded();
}
}
private boolean validateNotInterrupted() {
if (isInterrupted) {
if (mediaPlayer != null) {
mediaPlayer.pause();
//mediaPlayer.release();
}
return false;
} else {
return true;
}
}
/**
* Test whether we need to transfer buffered data to the MediaPlayer.
* Interacting with MediaPlayer on non-main UI thread can causes crashes to so perform this using a Handler.
*/
private void testMediaBuffer() {
Runnable updater = new Runnable() {
public void run() {
if (mediaPlayer == null) {
// Only create the MediaPlayer once we have the minimum buffered data
if ( totalKbRead >= INTIAL_KB_BUFFER) {
try {
startMediaPlayer();
} catch (Exception e) {
Log.e(getClass().getName(), "Error copying buffered conent.", e);
}
}
} else if ( mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000 ){
// NOTE: The media player has stopped at the end so transfer any existing buffered data
// We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
transferBufferToMediaPlayer();
}
}
};
handler.post(updater);
}
private void startMediaPlayer() {
try {
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// We double buffer the data to avoid potential read/write errors that could happen if the
// download thread attempted to write at the same time the MediaPlayer was trying to read.
// For example, we can't guarantee that the MediaPlayer won't open a file for playing and leave it locked while
// the media is playing. This would permanently deadlock the file download. To avoid such a deadloack,
// we move the currently loaded data to a temporary buffer file that we start playing while the remaining
// data downloads.
moveFile(downloadingMediaFile,bufferedFile);
Log.e(getClass().getName(),"Buffered File path: " + bufferedFile.getAbsolutePath());
Log.e(getClass().getName(),"Buffered File length: " + bufferedFile.length()+"");
mediaPlayer = createMediaPlayer(bufferedFile);
// We have pre-loaded enough content and started the MediaPlayer so update the buttons & progress meters.
mediaPlayer.start();
startPlayProgressUpdater();
playButton.setEnabled(true);
} catch (IOException e) {
Log.e(getClass().getName(), "Error initializing the MediaPlayer.", e);
return;
}
}
private MediaPlayer createMediaPlayer(File mediaFile)
throws IOException {
MediaPlayer mPlayer = new MediaPlayer();
mPlayer.setOnErrorListener(
new MediaPlayer.OnErrorListener() {
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.e(getClass().getName(), "Error in MediaPlayer: (" + what +") with extra (" +extra +")" );
return false;
}
});
// It appears that for security/permission reasons, it is better to pass a FileDescriptor rather than a direct path to the File.
// Also I have seen errors such as "PVMFErrNotSupported" and "Prepare failed.: status=0x1" if a file path String is passed to
// setDataSource(). So unless otherwise noted, we use a FileDescriptor here.
FileInputStream fis = new FileInputStream(mediaFile);
mPlayer.setDataSource(fis.getFD());
mPlayer.prepare();
return mPlayer;
}
/**
* Transfer buffered data to the MediaPlayer.
* NOTE: Interacting with a MediaPlayer on a non-main UI thread can cause thread-lock and crashes so
* this method should always be called using a Handler.
*/
private void transferBufferToMediaPlayer() {
try {
// First determine if we need to restart the player after transferring data...e.g. perhaps the user pressed pause
boolean wasPlaying = mediaPlayer.isPlaying();
int curPosition = mediaPlayer.getCurrentPosition();
// Copy the currently downloaded content to a new buffered File. Store the old File for deleting later.
File oldBufferedFile = new File(context.getCacheDir(),"playingMedia" + counter + ".dat");
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// This may be the last buffered File so ask that it be delete on exit. If it's already deleted, then this won't mean anything. If you want to
// keep and track fully downloaded files for later use, write caching code and please send me a copy.
bufferedFile.deleteOnExit();
moveFile(downloadingMediaFile,bufferedFile);
// Pause the current player now as we are about to create and start a new one. So far (Android v1.5),
// this always happens so quickly that the user never realized we've stopped the player and started a new one
mediaPlayer.pause();
// Create a new MediaPlayer rather than try to re-prepare the prior one.
mediaPlayer = createMediaPlayer(bufferedFile);
mediaPlayer.seekTo(curPosition);
// Restart if at end of prior buffered content or mediaPlayer was previously playing.
// NOTE: We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
boolean atEndOfFile = mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000;
if (wasPlaying || atEndOfFile){
mediaPlayer.start();
}
// Lastly delete the previously playing buffered File as it's no longer needed.
oldBufferedFile.delete();
}catch (Exception e) {
Log.e(getClass().getName(), "Error updating to newly loaded content.", e);
}
}
private void fireDataLoadUpdate() {
Runnable updater = new Runnable() {
public void run() {
textStreamed.setText((totalKbRead + " Kb read"));
float loadProgress = ((float)totalKbRead/(float)mediaLengthInKb);
progressBar.setSecondaryProgress((int)(loadProgress*100));
}
};
handler.post(updater);
}
private void fireDataFullyLoaded() {
Runnable updater = new Runnable() {
public void run() {
transferBufferToMediaPlayer();
// Delete the downloaded File as it's now been transferred to the currently playing buffer file.
downloadingMediaFile.delete();
textStreamed.setText(("Audio full loaded: " + totalKbRead + " Kb read"));
}
};
handler.post(updater);
}
public MediaPlayer getMediaPlayer() {
return mediaPlayer;
}
public void startPlayProgressUpdater() {
float progress = (((float)mediaPlayer.getCurrentPosition()/1000)/mediaLengthInSeconds);
progressBar.setProgress((int)(progress*100));
if (mediaPlayer.isPlaying()) {
Runnable notification = new Runnable() {
public void run() {
startPlayProgressUpdater();
}
};
handler.postDelayed(notification,1000);
}
}
public void interrupt() {
playButton.setEnabled(false);
isInterrupted = true;
validateNotInterrupted();
}
/**
* Move the file in oldLocation to newLocation.
*/
public void moveFile(File oldLocation, File newLocation)
throws IOException {
if ( oldLocation.exists( )) {
BufferedInputStream reader = new BufferedInputStream( new FileInputStream(oldLocation) );
BufferedOutputStream writer = new BufferedOutputStream( new FileOutputStream(newLocation, false));
try {
byte[] buff = new byte[8192];
int numChars;
while ( (numChars = reader.read( buff, 0, buff.length ) ) != -1) {
writer.write( buff, 0, numChars );
}
} catch( IOException ex ) {
throw new IOException("IOException when transferring " + oldLocation.getPath() + " to " + newLocation.getPath());
} finally {
try {
if ( reader != null ){
writer.close();
reader.close();
}
} catch( IOException ex ){
Log.e(getClass().getName(),"Error closing files when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
} else {
throw new IOException("Old location does not exist when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
}

Categories

Resources