Decibel Meter using Media Recorder - android

I was referring to a code that I found previously and tried it myself. It works perfectly however the decibel measured from the code is extremely high even in a quiet room. The value ranged from 0 to 30000. I was expecting the decibel be around 30 ~ 40 when I am in a quiet room. Can someone please tell me what's wrong with the code? Maybe the algorithm in the code is wrong because "soundDB()" is not used. The decibel shown is the app is from "getAmplitudeEMA()" instead.
import android.app.Activity;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.widget.TextView;
public class Noise extends Activity {
TextView mStatusView;
MediaRecorder mRecorder;
Thread runner;
private static double mEMA = 0.0;
static final private double EMA_FILTER = 0.6;
final Runnable updater = new Runnable(){
public void run(){
updateTv();
};
};
final Handler mHandler = new Handler();
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.noiselevel);
mStatusView = (TextView) findViewById(R.id.status);
if (runner == null)
{
runner = new Thread(){
public void run()
{
while (runner != null)
{
try
{
Thread.sleep(1000);
Log.i("Noise", "Tock");
} catch (InterruptedException e) { };
mHandler.post(updater);
}
}
};
runner.start();
Log.d("Noise", "start runner()");
}
}
public void onResume()
{
super.onResume();
startRecorder();
}
public void onPause()
{
super.onPause();
stopRecorder();
}
public void startRecorder(){
if (mRecorder == null){
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile("/dev/null");
try {
mRecorder.prepare();
}catch (java.io.IOException ioe) {
android.util.Log.e("[Monkey]", "IOException: " +
android.util.Log.getStackTraceString(ioe));
}catch (java.lang.SecurityException e) {
android.util.Log.e("[Monkey]", "SecurityException: " +
android.util.Log.getStackTraceString(e));
}
try{
mRecorder.start();
}catch (java.lang.SecurityException e) {
android.util.Log.e("[Monkey]", "SecurityException: " +
android.util.Log.getStackTraceString(e));
}
//mEMA = 0.0;
}
}
public void stopRecorder() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
}
public void updateTv(){
mStatusView.setText(Double.toString((getAmplitudeEMA())) + " dB");
}
public double soundDb(double ampl){
return 20 * Math.log10(getAmplitudeEMA() / ampl);
}
public double getAmplitude() {
if (mRecorder != null)
return (mRecorder.getMaxAmplitude());
else
return 0;
}
public double getAmplitudeEMA() {
double amp = getAmplitude();
mEMA = EMA_FILTER * amp + (1.0 - EMA_FILTER) * mEMA;
return mEMA;
}
}

The problem is the following line:
mStatusView.setText(Double.toString((getAmplitudeEMA())) + " dB");
You are setting as text the value returned from getAmplitudeEMA() but this is not a dB value, it's the amplitude returned by mRecorder.getMaxAmplitude() which is then modified by getAmplitudeEMA().
To "convert" amplitude to dB you need to include your soundDb(double ampl) method in this way:
public void updateTv(){
mStatusView.setText(Double.toString(soundDb(getAmplitudeEMA()))+ "dB");
}

You have to call the function soundDb, which transforms the amplitude into dB FS.
mStatusView.setText(soundDb(32767.0) + " dB");

Related

getAmplitude isn't working

When I try to work it's show on textviev "0.0" value.
I get codes from this web site mostly
I get permissions to
<uses-permission android:name="android.permission.RECORD_AUDIO" />
ı give permission on setting-applications-mic
it's returned value "0.0"
how really works getAmplitude();
can some one explain
static final private double EMA_FILTER = 0.6;
TextView text;
Button buton;
Handler handler;
boolean state = false;
#Override
protected void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_karpuz_sec);
text = (TextView) findViewById(R.id.text);
buton = (Button) findViewById(R.id.button);
handler = new Handler();
buton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if(!state){
try {
start();
} catch (IOException e) {
e.printStackTrace();
}
}
else
stop();
}
});
}
Runnable runnable = new Runnable() {
#Override
public void run() {
while (!state){
text.setText(Double.toString(getAmplitudeEMA()));;
handler.postDelayed(this,100);
}
}
};
private MediaRecorder mRecorder = null;
private double mEMA = 0.0;
public void start() throws IOException {
if (mRecorder == null) {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile("/dev/null");
mRecorder.prepare();
mRecorder.start();
mEMA = 0.0;
state = true;
Toast.makeText(getApplicationContext(),"Ses Ölçümü Başladı",Toast.LENGTH_SHORT).show();
buton.setText("DURDUR");
text.setText(Double.toString(getAmplitudeEMA()));
}
}
public void stop() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
state = false;
Toast.makeText(getApplicationContext(),"Ses Ölçümü Durdu",Toast.LENGTH_SHORT).show();
buton.setText("BAŞLAT");
}
}
public double getAmplitude() {
if (mRecorder != null)
return (mRecorder.getMaxAmplitude()/2700.0);
else
return 0;
}
public double getAmplitudeEMA() {
double amp = getAmplitude();
mEMA = EMA_FILTER * amp + (1.0 - EMA_FILTER) * mEMA;
return mEMA;
}
}
From MediaRecorder documentation, getMaxAmplitude() depicts :
Returns the maximum absolute amplitude that was sampled since the last call to this method. Call this only after the setAudioSource().
Returns : the maximum absolute amplitude measured since the last call, or 0 when called for the first time
This is why you get 0 the first time you call it in your onCreate().
Request it on a regular basis if you want as this is what suggests the presence of your Runnable, here is an example scheduling your getAmplitudeEMA() every 500ms :
private Timer mTimer = new Timer();
.....
mTimer.scheduleAtFixedRate(new TimerTask() {
public void run() {
runOnUiThread(new Runnable() {
#Override
public void run() {
text.setText(Double.toString(getAmplitudeEMA()));
}
});
}
}, 0, 500);
And in stop() / onPause() :
mTimer.cancel();
mTimer.purge();

MediaPlayer get max aplitude

I want to implement a VU meter in my recording app when recording AND playing. I know how to do it when recording, but the problem appears when playing. How can I grab the max amplitude at a given point in time from android MediaPlayer? I know there is a way because I saw some widget that do the same when playing some music on my device. I don't want to use android Visualizer for rendering, I want to make my own VU meter to work for devices with OS 2.3+. Basically, I need getMaxAmplitude for MediaPlayer.
//first of all import this library
import android.media.MediaRecorder;
private MediaRecorder mRecorder = null;
public double getAmplitude() {
if (mRecorder != null)
return (mRecorder.getMaxAmplitude()/2700.0);
else
return 0;
}
//if you need further detail here is my class which is doing same thing
package com.spaidevelopers.noisealert;
import java.io.IOException;
import android.media.MediaRecorder;
public class SoundMeter {
// This file is used to record voice
static final private double EMA_FILTER = 0.6;
private MediaRecorder mRecorder = null;
private double mEMA = 0.0;
public void start() {
if (mRecorder == null) {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile("/dev/null");
try {
mRecorder.prepare();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mRecorder.start();
mEMA = 0.0;
}
}
public void stop() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
}
public double getAmplitude() {
if (mRecorder != null)
return (mRecorder.getMaxAmplitude()/2700.0);
else
return 0;
}
public double getAmplitudeEMA() {
double amp = getAmplitude();
mEMA = EMA_FILTER * amp + (1.0 - EMA_FILTER) * mEMA;
return mEMA;
}
}

Change back ground color according to Voice intensity

Currently my Code is detecting voice intensity (using media recorder) i want to change background color to white when there is no voice , when user speak then background color must b light or dark according to voice intensity
here is my code im having problem to make color light and dark according to voice intensity .
final Runnable updater = new Runnable(){
public void run(){
updateTv();
TextView tv = (TextView)findViewById(R.id.status);
int tvStatus= Integer.parseInt(tv.getText().toString());
if(tvStatus > 1000)
updateBackground();
else
mScreen.setBackgroundColor(Color.WHITE);
};
};
final Handler mHandler = new Handler();
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mStatusView = (TextView) findViewById(R.id.status);
mScreen = (LinearLayout) findViewById(R.id.myScreen);
if (runner == null)
{
runner = new Thread(){
public void run()
{
while (runner != null)
{
try
{
Thread.sleep(1000);
Log.i("Noise", "Tock");
} catch (InterruptedException e) { };
mHandler.post(updater);
}
}
};
runner.start();
Log.d("Noise", "start runner()");
}
}
private void updateBackground()
{
int ampl =(int)getAmplitude();
int color;
Random rnd = new Random();
mScreen.setBackgroundColor(
color );
}
public void onResume()
{
super.onResume();
startRecorder();
}
public void onPause()
{
super.onPause();
stopRecorder();
}
public void startRecorder(){
if (mRecorder == null)
{
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile("/dev/null");
try
{
mRecorder.prepare();
}catch (java.io.IOException ioe) {
android.util.Log.e("[Monkey]", "IOException: " + android.util.Log.getStackTraceString(ioe));
}catch (java.lang.SecurityException e) {
android.util.Log.e("[Monkey]", "SecurityException: " + android.util.Log.getStackTraceString(e));
}
try
{
mRecorder.start();
}catch (java.lang.SecurityException e) {
android.util.Log.e("[Monkey]", "SecurityException: " + android.util.Log.getStackTraceString(e));
}
//mEMA = 0.0;
}
}
public void stopRecorder() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
}
public void updateTv(){
mStatusView.setText(Integer.toString((getAmplitude())));
}
public double soundDb(double ampl){
return 20 * Math.log10(getAmplitudeEMA() / ampl);
}
public int getAmplitude() {
if (mRecorder != null)
return (mRecorder.getMaxAmplitude());
else
return 0;
}
Your current implementation of updateBackground() uses color without initializing it:
private void updateBackground() {
int ampl = (int) getAmplitude();
int color;
Random rnd = new Random();
mScreen.setBackgroundColor(color);
}
If the minimum amplitude is 0 and the maximum amplitude is MAX_AMPLITUDE, and if you want white to represent minimum amplitude, and black, maximum amplitude, then something like this should do the trick:
private static final int MAX_RGB = 255;
private static final int MAX_AMPLITUDE = 32767;
private void updateBackground() {
float amplF = (float) getAmplitude();
int ampl = MAX_RGB - (int) (amplF / MAX_AMPLITUDE * MAX_RGB);
mScreen.setBackgroundColor(Color.rgb(ampl, ampl, ampl));
}
If you find that the highest amplitude values you see in practice are significantly lower than 32767, you can account for this with:
private static final int MAX_RGB = 255;
private static final int int MAX_AMPLITUDE = 1500; // Set to some reasonable value
private void updateBackground() {
int actual = getAmplitude();
if (actual > MAX_AMPLITUDE)
actual = MAX_AMPLITUDE;
float amplF = (float) actual;
int ampl = MAX_RGB - (int) (amplF / MAX_AMPLITUDE * MAX_RGB);
mScreen.setBackgroundColor(Color.rgb(ampl, ampl, ampl));
}
If you do that, it would probably be a good idea to make MAX_AMPLITUDE no longer a constant, and make it configurable by offering a "calibrate" option, where users can make whatever they consider to be a loud noise.

Problem with AudioRecord class

I am recording audio using AudioRecord class.I want to record audio into a particular file in my asset folder or resource folder.I think there is no problem in recording.but while reading buffer it is showing some problem(it is throwing NullPointerException).Can anyone suggest what may be the problem?
You can not save file inside Asset folder. Assets folder is read only instead of it you will have to save it in the internal or external storage of your device
Below there is a core to record the media file.
package com.example.media.record;
import java.io.File;
import java.io.IOException;
import android.app.Activity;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Handler.Callback;
import android.os.Message;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.Toast;
public class MediaRecorderActivity extends Activity implements OnClickListener {
Button btnPlay;
Button btnRecord;
ProgressBar progress;
MediaPlayer mPlayer;
MediaRecorder mRecorder;
String mFileName;
boolean mStartRecording = true;
boolean mStartPlaying = true;
Thread mThreadProgress;
int duration = 1;
private void onRecord(boolean start) {
if(start) {
startRecording();
}else {
stopRecording();
}
}
private void onPlay(boolean start) {
if(start) {
startPlaying();
}else {
stopPlaying();
}
}
private void startRecording() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFile(mFileName);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOnErrorListener(errorListenerForRecorder);
try {
mRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
try {
mRecorder.start();
} catch (Exception e) {
Toast.makeText(getApplicationContext(), "Error :: " + e.getMessage(), Toast.LENGTH_LONG).show();
}
}
private void stopRecording() {
if(mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
}
private void startPlaying() {
mPlayer = new MediaPlayer();
try {
mPlayer.setDataSource(mFileName);
mPlayer.setOnCompletionListener(completionListener);
mPlayer.setOnErrorListener(errorListenerForPlayer);
mPlayer.prepare();
mPlayer.start();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopPlaying() {
if(mPlayer != null) {
mPlayer.stop();
mPlayer.release();
mPlayer = null;
}
}
OnCompletionListener completionListener = new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
btnRecord.setEnabled(true);
btnPlay.setText("Start playing");
mStartPlaying = !mStartPlaying;
}
};
OnErrorListener errorListenerForPlayer = new OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Toast.makeText(getApplicationContext(), "Error during playing file", 3000).show();
return false;
}
};
android.media.MediaRecorder.OnErrorListener errorListenerForRecorder = new android.media.MediaRecorder.OnErrorListener() {
#Override
public void onError(MediaRecorder mr, int what, int extra) {
Toast.makeText(getApplicationContext(), "Error during recoding file", 3000).show();
}
};
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
btnPlay = (Button)findViewById(R.id.btnPlay);
btnRecord = (Button)findViewById(R.id.btnRecord);
progress = (ProgressBar)findViewById(R.id.progressRecorder);
mFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
mFileName += "/audiorecordtest.3gp";
File file = new File(mFileName);
if(!file.exists()) btnPlay.setEnabled(false);
btnPlay.setOnClickListener(this);
btnRecord.setOnClickListener(this);
}
#Override
protected void onPause() {
super.onPause();
if(mRecorder != null) {
mRecorder.stop();
}
if(mPlayer != null) {
mPlayer.pause();
}
}
#Override
protected void onResume() {
super.onResume();
if(mRecorder != null) {
mRecorder.start();
}
if(mPlayer != null) {
mPlayer.start();
}
}
#Override
protected void onStop() {
super.onStop();
if(mRecorder != null) {
mRecorder.stop();
}
if(mPlayer != null) {
mPlayer.stop();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
if(mRecorder != null) {
mRecorder.release();
mRecorder = null;
}
if(mPlayer != null) {
mPlayer.release();
mPlayer = null;
}
}
#Override
public void onClick(View v) {
if(v == btnPlay) {
onPlay(mStartPlaying);
if(mStartPlaying) {
duration = mPlayer.getDuration();
mThreadProgress = new ThreadProgress();
mThreadProgress.start();
((Button)v).setText("Stop Playing");
btnRecord.setEnabled(false);
}
else {
((Button)v).setText("Start Playing");
btnRecord.setEnabled(true);
if(mThreadProgress != null && !mThreadProgress.isAlive()) mThreadProgress.stop();
// t.interrupt();
}
mStartPlaying = !mStartPlaying;
} else if(v == btnRecord) {
onRecord(mStartRecording);
if(mStartRecording) {
mThreadProgress = new ThreadProgress();
mThreadProgress.start();
((Button)v).setText("Stop Recording");
btnPlay.setEnabled(false);
// t.start();
}
else {
((Button)v).setText("Start Recording");
btnPlay.setEnabled(true);
// t.interrupt();
if(mThreadProgress != null && !mThreadProgress.isAlive()) mThreadProgress.stop();
}
mStartRecording = !mStartRecording;
}
}
Handler handler = new Handler(new Callback() {
#Override
public boolean handleMessage(final Message msg) {
if(msg.what == 0) {
runOnUiThread(new Runnable() {
public void run() {
progress.setProgress(msg.arg1);
}
});
}
return false;
}
});
public class ThreadProgress extends Thread implements Runnable {
public int i = 0;
#Override
public void run() {
while((!this.isInterrupted() && mPlayer != null && mPlayer.isPlaying()) || (!this.isInterrupted() && mRecorder != null)) {
try {
if(duration == 1) i+=1;
else i += 100000 /duration;
Message message = new Message();
message.what = 0;
message.arg1 = i;
handler.sendMessage(message);
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
}
This is the example you can record audio as well as play audio
You can store recorded file in following places
1) File directory of your app
2) External directory(SD card)
3) Network

How to put media controller in streaming video?

I am using SurfaceView to play streaming video and Media Player for streaming video. Can any one tell me how can I place media controller in surfaceview?
Here's my code:
package com.menu.donationvideos;
import android.app.Activity;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.MediaController;
import android.widget.MediaController.MediaPlayerControl;
import android.widget.Toast;
import android.widget.VideoView;
import com.menu.R;
public class VideoPlay extends Activity implements OnBufferingUpdateListener, OnCompletionListener,
OnPreparedListener, OnVideoSizeChangedListener, SurfaceHolder.Callback
{
private static final String TAG = "MediaPlayerDemo";
private int mVideoWidth;
private int mVideoHeight;
SurfaceHolder holder;
VideoInfo videoInfo;
String pos,videoURL;
private MediaPlayer mMediaPlayer;
private SurfaceView mPreview;
private String path;
private boolean mIsVideoSizeKnown = false;
private boolean mIsVideoReadyToBePlayed = false;
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.video_play);
Bundle bundle=new Bundle();
bundle=this.getIntent().getExtras();
pos=bundle.getString("position");
videoInfo=VideoList.m_video.get(Integer.parseInt(pos));
videoURL=videoInfo.getVideoFile();
try
{
mPreview = (SurfaceView)findViewById(R.id.mySurfaceView);
holder=mPreview.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
catch(Exception e)
{
e.printStackTrace();
}
}
private void playVideo()
{
doCleanUp();
try
{
path = videoURL;
if (path == "")
{
Toast.makeText(VideoPlay.this,"URL Not found", Toast.LENGTH_LONG).show();
}
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setDataSource(path);
mMediaPlayer.setDisplay(holder);
mMediaPlayer.prepare();
mMediaPlayer.setOnBufferingUpdateListener(this);
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.setOnVideoSizeChangedListener(this);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
}
catch (Exception e)
{
e.printStackTrace();
}
}
public void onBufferingUpdate(MediaPlayer arg0, int percent)
{
Log.d(TAG,"Buffering:"+percent);
}
public void onCompletion(MediaPlayer arg0)
{
Log.d(TAG, "onCompletion called");
finish();
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height)
{
Log.v(TAG, "onVideoSizeChanged called");
if (width == 0 || height == 0)
{
Log.e(TAG, "invalid video width(" + width + ") or height(" + height + ")");
return;
}
mIsVideoSizeKnown = true;
mVideoWidth = width;
mVideoHeight = height;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown)
{
startVideoPlayback();
}
}
public void onPrepared(MediaPlayer mediaplayer)
{
Log.d(TAG, "onPrepared called");
mIsVideoReadyToBePlayed = true;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown)
{
startVideoPlayback();
}
}
public void surfaceChanged(SurfaceHolder surfaceholder, int i, int j, int k)
{
Log.d(TAG, "surfaceChanged called");
}
public void surfaceDestroyed(SurfaceHolder surfaceholder)
{
Log.d(TAG, "surfaceDestroyed called");
}
public void surfaceCreated(SurfaceHolder holder)
{
Log.d(TAG, "surfaceCreated called");
playVideo();
}
#Override
protected void onPause()
{
super.onPause();
releaseMediaPlayer();
doCleanUp();
}
#Override
protected void onDestroy()
{
super.onDestroy();
releaseMediaPlayer();
doCleanUp();
}
private void releaseMediaPlayer()
{
if (mMediaPlayer != null)
{
mMediaPlayer.release();
mMediaPlayer = null;
}
}
private void doCleanUp()
{
mVideoWidth = 0;
mVideoHeight = 0;
mIsVideoReadyToBePlayed = false;
mIsVideoSizeKnown = false;
}
private void startVideoPlayback()
{
Log.v(TAG, "startVideoPlayback");
holder.setFixedSize(mVideoWidth, mVideoHeight);
mMediaPlayer.start();
}
}
do {
numread = stream.read(buf);
if (numread <= 0)
break;
totalBytesRead += numread;
totalKbRead = totalBytesRead/1000;
Log.e(getClass().getName(),"Buffered byte: " +totalBytesRead+"");
if(totalBytesRead>=100000){
if(mediaPlayer!=null){
int dura = mediaPlayer.getDuration();
if(mediaPlayer.getDuration()>=90000){
isInterrupted =false;
stream.close();
out.close();
out.flush();
break;
}
}
}
This is my code but I realized that media player doesnt start until streaming in progress.
I would suggest you take a look at the source code of VideoView, which combines everything at first place (or use VideoView instead of MediaPlayer itself)
hey nirav this code try it for media streaming
package com.pocketjourney.media;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Handler;
import android.util.Log;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
/**
* MediaPlayer does not yet support streaming from external URLs so this class provides a pseudo-streaming function
* by downloading the content incrementally & playing as soon as we get enough audio in our temporary storage.
*/
public class StreamingMediaPlayer {
private static final int INTIAL_KB_BUFFER = 96*10/8;//assume 96kbps*10secs/8bits per byte
private TextView textStreamed;
private ImageButton playButton;
private ProgressBar progressBar;
// Track for display by progressBar
private long mediaLengthInKb, mediaLengthInSeconds;
private int totalKbRead = 0;
// Create Handler to call View updates on the main UI thread.
private final Handler handler = new Handler();
private MediaPlayer mediaPlayer;
private File downloadingMediaFile;
private boolean isInterrupted;
private Context context;
private int counter = 0;
public StreamingMediaPlayer(Context context,TextView textStreamed, ImageButton playButton, Button streamButton,ProgressBar progressBar)
{
this.context = context;
this.textStreamed = textStreamed;
this.playButton = playButton;
this.progressBar = progressBar;
}
/**
* Progressivly download the media to a temporary location and update the MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl, long mediaLengthInKb, long mediaLengthInSeconds) throws IOException {
this.mediaLengthInKb = mediaLengthInKb;
this.mediaLengthInSeconds = mediaLengthInSeconds;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
} catch (IOException e) {
Log.e(getClass().getName(), "Unable to initialize the MediaPlayer for fileUrl=" + mediaUrl, e);
return;
}
}
};
new Thread(r).start();
}
/**
* Download the url stream to a temporary location and then call the setDataSource
* for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException {
URLConnection cn = new URL(mediaUrl).openConnection();
cn.connect();
InputStream stream = cn.getInputStream();
if (stream == null) {
Log.e(getClass().getName(), "Unable to create InputStream for mediaUrl:" + mediaUrl);
}
downloadingMediaFile = new File(context.getCacheDir(),"downloadingMedia.dat");
// Just in case a prior deletion failed because our code crashed or something, we also delete any previously
// downloaded file to ensure we start fresh. If you use this code, always delete
// no longer used downloads else you'll quickly fill up your hard disk memory. Of course, you can also
// store any previously downloaded file in a separate data cache for instant replay if you wanted as well.
if (downloadingMediaFile.exists()) {
downloadingMediaFile.delete();
}
FileOutputStream out = new FileOutputStream(downloadingMediaFile);
byte buf[] = new byte[16384];
int totalBytesRead = 0, incrementalBytesRead = 0;
do {
int numread = stream.read(buf);
if (numread <= 0)
break;
out.write(buf, 0, numread);
totalBytesRead += numread;
incrementalBytesRead += numread;
totalKbRead = totalBytesRead/1000;
testMediaBuffer();
fireDataLoadUpdate();
} while (validateNotInterrupted());
stream.close();
if (validateNotInterrupted()) {
fireDataFullyLoaded();
}
}
private boolean validateNotInterrupted() {
if (isInterrupted) {
if (mediaPlayer != null) {
mediaPlayer.pause();
//mediaPlayer.release();
}
return false;
} else {
return true;
}
}
/**
* Test whether we need to transfer buffered data to the MediaPlayer.
* Interacting with MediaPlayer on non-main UI thread can causes crashes to so perform this using a Handler.
*/
private void testMediaBuffer() {
Runnable updater = new Runnable() {
public void run() {
if (mediaPlayer == null) {
// Only create the MediaPlayer once we have the minimum buffered data
if ( totalKbRead >= INTIAL_KB_BUFFER) {
try {
startMediaPlayer();
} catch (Exception e) {
Log.e(getClass().getName(), "Error copying buffered conent.", e);
}
}
} else if ( mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000 ){
// NOTE: The media player has stopped at the end so transfer any existing buffered data
// We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
transferBufferToMediaPlayer();
}
}
};
handler.post(updater);
}
private void startMediaPlayer() {
try {
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// We double buffer the data to avoid potential read/write errors that could happen if the
// download thread attempted to write at the same time the MediaPlayer was trying to read.
// For example, we can't guarantee that the MediaPlayer won't open a file for playing and leave it locked while
// the media is playing. This would permanently deadlock the file download. To avoid such a deadloack,
// we move the currently loaded data to a temporary buffer file that we start playing while the remaining
// data downloads.
moveFile(downloadingMediaFile,bufferedFile);
Log.e(getClass().getName(),"Buffered File path: " + bufferedFile.getAbsolutePath());
Log.e(getClass().getName(),"Buffered File length: " + bufferedFile.length()+"");
mediaPlayer = createMediaPlayer(bufferedFile);
// We have pre-loaded enough content and started the MediaPlayer so update the buttons & progress meters.
mediaPlayer.start();
startPlayProgressUpdater();
playButton.setEnabled(true);
} catch (IOException e) {
Log.e(getClass().getName(), "Error initializing the MediaPlayer.", e);
return;
}
}
private MediaPlayer createMediaPlayer(File mediaFile)
throws IOException {
MediaPlayer mPlayer = new MediaPlayer();
mPlayer.setOnErrorListener(
new MediaPlayer.OnErrorListener() {
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.e(getClass().getName(), "Error in MediaPlayer: (" + what +") with extra (" +extra +")" );
return false;
}
});
// It appears that for security/permission reasons, it is better to pass a FileDescriptor rather than a direct path to the File.
// Also I have seen errors such as "PVMFErrNotSupported" and "Prepare failed.: status=0x1" if a file path String is passed to
// setDataSource(). So unless otherwise noted, we use a FileDescriptor here.
FileInputStream fis = new FileInputStream(mediaFile);
mPlayer.setDataSource(fis.getFD());
mPlayer.prepare();
return mPlayer;
}
/**
* Transfer buffered data to the MediaPlayer.
* NOTE: Interacting with a MediaPlayer on a non-main UI thread can cause thread-lock and crashes so
* this method should always be called using a Handler.
*/
private void transferBufferToMediaPlayer() {
try {
// First determine if we need to restart the player after transferring data...e.g. perhaps the user pressed pause
boolean wasPlaying = mediaPlayer.isPlaying();
int curPosition = mediaPlayer.getCurrentPosition();
// Copy the currently downloaded content to a new buffered File. Store the old File for deleting later.
File oldBufferedFile = new File(context.getCacheDir(),"playingMedia" + counter + ".dat");
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// This may be the last buffered File so ask that it be delete on exit. If it's already deleted, then this won't mean anything. If you want to
// keep and track fully downloaded files for later use, write caching code and please send me a copy.
bufferedFile.deleteOnExit();
moveFile(downloadingMediaFile,bufferedFile);
// Pause the current player now as we are about to create and start a new one. So far (Android v1.5),
// this always happens so quickly that the user never realized we've stopped the player and started a new one
mediaPlayer.pause();
// Create a new MediaPlayer rather than try to re-prepare the prior one.
mediaPlayer = createMediaPlayer(bufferedFile);
mediaPlayer.seekTo(curPosition);
// Restart if at end of prior buffered content or mediaPlayer was previously playing.
// NOTE: We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
boolean atEndOfFile = mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000;
if (wasPlaying || atEndOfFile){
mediaPlayer.start();
}
// Lastly delete the previously playing buffered File as it's no longer needed.
oldBufferedFile.delete();
}catch (Exception e) {
Log.e(getClass().getName(), "Error updating to newly loaded content.", e);
}
}
private void fireDataLoadUpdate() {
Runnable updater = new Runnable() {
public void run() {
textStreamed.setText((totalKbRead + " Kb read"));
float loadProgress = ((float)totalKbRead/(float)mediaLengthInKb);
progressBar.setSecondaryProgress((int)(loadProgress*100));
}
};
handler.post(updater);
}
private void fireDataFullyLoaded() {
Runnable updater = new Runnable() {
public void run() {
transferBufferToMediaPlayer();
// Delete the downloaded File as it's now been transferred to the currently playing buffer file.
downloadingMediaFile.delete();
textStreamed.setText(("Audio full loaded: " + totalKbRead + " Kb read"));
}
};
handler.post(updater);
}
public MediaPlayer getMediaPlayer() {
return mediaPlayer;
}
public void startPlayProgressUpdater() {
float progress = (((float)mediaPlayer.getCurrentPosition()/1000)/mediaLengthInSeconds);
progressBar.setProgress((int)(progress*100));
if (mediaPlayer.isPlaying()) {
Runnable notification = new Runnable() {
public void run() {
startPlayProgressUpdater();
}
};
handler.postDelayed(notification,1000);
}
}
public void interrupt() {
playButton.setEnabled(false);
isInterrupted = true;
validateNotInterrupted();
}
/**
* Move the file in oldLocation to newLocation.
*/
public void moveFile(File oldLocation, File newLocation)
throws IOException {
if ( oldLocation.exists( )) {
BufferedInputStream reader = new BufferedInputStream( new FileInputStream(oldLocation) );
BufferedOutputStream writer = new BufferedOutputStream( new FileOutputStream(newLocation, false));
try {
byte[] buff = new byte[8192];
int numChars;
while ( (numChars = reader.read( buff, 0, buff.length ) ) != -1) {
writer.write( buff, 0, numChars );
}
} catch( IOException ex ) {
throw new IOException("IOException when transferring " + oldLocation.getPath() + " to " + newLocation.getPath());
} finally {
try {
if ( reader != null ){
writer.close();
reader.close();
}
} catch( IOException ex ){
Log.e(getClass().getName(),"Error closing files when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
} else {
throw new IOException("Old location does not exist when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
}

Categories

Resources