How to detect if an AudioTrack has finished playing? it's not simple!!!
I need to touch immediately after the other (enqueue) This code is not working ...
When press 2 times, the 1st not end, and two sound plays same time...
private void getNextAudio() {
try{
byte[] buffer = playlist.poll();
if (buffer != null) {
AudioTrack myatrack = getAudioTrack(buffer);
myatrack.setPlaybackPositionUpdateListener( new OnPlaybackPositionUpdateListener() {
public void onPeriodicNotification(AudioTrack track) {
}
public void onMarkerReached(AudioTrack track) {
Log.d(TAG, "onMarker - estado: " + track.getPlayState());
if(track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED){
track.flush();
track.stop();
track.release();
getNextAudio();
}
}
});
if (myatrack != null) {
// toco o audio.
myatrack.play();
}
}else{
Log.d(TAG,"Finish");
}
}catch(NoSuchElementException ex){
Log.d(TAG,"No such element");
}
}
Thanks again!
Mateus
Related
I have a hard issue about audio recording in android. I used AudioTrack to play my sound when I speak to my phone. I want to record a part in my voice if i press button record. For example, I will speak within 10 seconds. However, I only want to record my sound iff I press record button. It can be from 3th second to 8th second. However, my phone need to play my sound during I speaking (from 1st to 10th second).
Currently, I used a thread to play my sound as following code. I created a flag in the recording thread to decide when I will record. When I press button record, the flag will be set true. And I click stop, It will set false and write to file
public class AudioSoundThread extends Thread {
private short[] audioBuffer;
private boolean isRecording = false;
private boolean isSounding = true;
private AudioRecordingHandler handler = null;
private AudioRecord record;
private AudioTrack track;
public AudioSoundThread(AudioTrack mtrack,AudioRecord mrecord,short[] maudioBuffer, AudioRecordingHandler handler) {
this.handler = handler;
this.audioBuffer = maudioBuffer;
this.record=mrecord;
this.track=mtrack;
}
#Override
public void run() {
record.startRecording();
DataOutputStream output =null;
if(isRecording){
output = prepareWriting();
if (output == null) { return; }
}
track.play();
///////////Play during recording
int readSize =0;
while (isSound) {
readSize=record.read(audioBuffer, 0, audioBuffer.length);
if ((readSize == AudioRecord.ERROR_INVALID_OPERATION) ||
(readSize == AudioRecord.ERROR_BAD_VALUE) ||
(readSize <= 0)) {
continue;
}
if(AudioRecord.ERROR_INVALID_OPERATION != readSize){
track.write(audioBuffer, 0, readSize);
}
if(isRecording)
write(output,readSize);
//Finished to write
if(!isRecording&&output!=null)
{
finishWriting(output);
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
deleteTempFile(mRecording);
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
}
}
}
record.stop();
record.release();
}
public synchronized void stopSound() {
isSound = false;
}
public synchronized void startSound() {
isSound = true;
}
public synchronized void startRecordingFlag() {
isRecording = true;
}
public synchronized void stopRecording() {
isRecording = false;
}
private void finishWriting(DataOutputStream out) {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
if (handler != null) {
handler.onRecordingError();
}
}
}
private DataOutputStream prepareWriting() {
if (mRecording.exists()) { mRecording.delete(); }
DataOutputStream out = null;
try {
out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(mRecording)));
} catch (FileNotFoundException e) {
e.printStackTrace();
if (handler != null) {
handler.onRecordingError();
}
}
return out;
}
In mainActivity, I have two buttons that are play my sound and button recording
private AudioSoundThread recordingThread;
//Play button
btnPlay = (Button) findViewById(R.id.btnPlay);
btnPlay.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startSounding();
}
});
//Record button
btnRecord = (Button) findViewById(R.id.btnRecord);
btnRecord.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
//record();
recordingThread.startRecordingFlag();
}
});
//Stop Record button
btnStopRecord = (Button) findViewById(R.id.btnStopRecord);
btnStopRecord .setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
//record();
recordingThread.stopRecording();
}
});
private void startSounding() {
soundingThread = new AudioSoundThread(track,mRecorder,mBuffer,new AudioRecordingHandler() {
});
soundingThread.start();
}
However, my scheme does not work. I think that my flag cannot send to the thread. Could you look at my code and give me one solution?
I an developing an android app. I want to accomplish below feature.
I will use my phone's built-in mic to record and at the same time i want the recorded audio to be played through either phone's speakers or headphones.
Is it feasible? If yes, please help me in this.
Here is a simple Recording and Playback application.
Uses Android AudioRecord and AudioTrack,
Design:
The recorded audio is written to a buffer and played back from the same buffer, This mechanism runs in a loop (using Android thread) controlled by buttons.
Code
private String TAG = "AUDIO_RECORD_PLAYBACK";
private boolean isRunning = true;
private Thread m_thread; /* Thread for running the Loop */
private AudioRecord recorder = null;
private AudioTrack track = null;
int bufferSize = 320; /* Buffer for recording data */
byte buffer[] = new byte[bufferSize];
/* Method to Enable/Disable Buttons */
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
The GUI has two Buttons START and STOP.
Enable the Button:
enableButton(R.id.StartButton,true);
enableButton(R.id.StopButton,false);
/* Assign Button Click Handlers */
((Button)findViewById(R.id.StartButton)).setOnClickListener(btnClick);
((Button)findViewById(R.id.StopButton)).setOnClickListener(btnClick);
Mapping START and STOP Button for OnClickListener
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.StartButton:
{
Log.d(TAG, "======== Start Button Pressed ==========");
isRunning = true;
do_loopback(isRunning);
enableButton(R.id.StartButton,false);
enableButton(R.id.StopButton,true);
break;
}
case R.id.StopButton:
{
Log.d(TAG, "======== Stop Button Pressed ==========");
isRunning = false;
do_loopback(isRunning);
enableButton(R.id.StopButton,false);
enableButton(R.id.StartButton,true);
break;
}
}
}
Start the Thread:
private void do_loopback(final boolean flag)
{
m_thread = new Thread(new Runnable() {
public void run() {
run_loop(flag);
}
});
m_thread.start();
}
Method for Initializing AudioRecord and AudioTrack:
public AudioTrack findAudioTrack (AudioTrack track)
{
Log.d(TAG, "===== Initializing AudioTrack API ====");
int m_bufferSize = AudioTrack.getMinBufferSize(8000,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (m_bufferSize != AudioTrack.ERROR_BAD_VALUE)
{
track = new AudioTrack(AudioManager.STREAM_MUSIC, 8000,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, m_bufferSize,
AudioTrack.MODE_STREAM);
if (track.getState() == AudioTrack.STATE_UNINITIALIZED) {
Log.e(TAG, "===== AudioTrack Uninitialized =====");
return null;
}
}
return track;
}
public AudioRecord findAudioRecord (AudioRecord recorder)
{
Log.d(TAG, "===== Initializing AudioRecord API =====");
int m_bufferSize = AudioRecord.getMinBufferSize(8000,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (m_bufferSize != AudioRecord.ERROR_BAD_VALUE)
{
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 8000,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, m_bufferSize);
if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) {
Log.e(TAG, "====== AudioRecord UnInitilaised ====== ");
return null;
}
}
return recorder;
}
The Values for findAudioRecord or findAudioTrack can change based on device.
Please refer this question.
Code for Running the loop:
public void run_loop (boolean isRunning)
{
/** == If Stop Button is pressed == **/
if (isRunning == false) {
Log.d(TAG, "===== Stop Button is pressed ===== ");
if (AudioRecord.STATE_INITIALIZED == recorder.getState()){
recorder.stop();
recorder.release();
}
if (AudioTrack.STATE_INITIALIZED == track.getState()){
track.stop();
track.release();
}
return;
}
/** ======= Initialize AudioRecord and AudioTrack ======== **/
recorder = findAudioRecord(recorder);
if (recorder == null) {
Log.e(TAG, "======== findAudioRecord : Returned Error! =========== ");
return;
}
track = findAudioTrack(track);
if (track == null) {
Log.e(TAG, "======== findAudioTrack : Returned Error! ========== ");
return;
}
if ((AudioRecord.STATE_INITIALIZED == recorder.getState()) &&
(AudioTrack.STATE_INITIALIZED == track.getState()))
{
recorder.startRecording();
Log.d(TAG, "========= Recorder Started... =========");
track.play();
Log.d(TAG, "========= Track Started... =========");
}
else
{
Log.d(TAG, "==== Initilazation failed for AudioRecord or AudioTrack =====");
return;
}
/** ------------------------------------------------------ **/
/* Recording and Playing in chunks of 320 bytes */
bufferSize = 320;
while (isRunning == true)
{
/* Read & Write to the Device */
recorder.read(buffer, 0, bufferSize);
track.write(buffer, 0, bufferSize);
}
Log.i(TAG, "Loopback exit");
return;
}
Please include the following in AndroidManifest.xml
<uses-permission android:name="android.permission.RECORD_AUDIO" > </uses-permission>
This above procedure is also possible by Writing/Reading from a file using the same APIs.
Why use audioRecord over mediaRecorder - See here.
The Code is tested (on Google Nexus 5) and working perfectly.
Note: Please add some error-checking code for recorder.read and track.write, in case you fail. Same applies for findAudioRecord and findAudioTrack.
First create objects in onCreate method, MediaRecorder class object and the path to file where you want to save the recorded data.
String outputFile = Environment.getExternalStorageDirectory().
getAbsolutePath() + "/myrecording.3gp"; // Define outputFile outside onCreate method
MediaRecorder myAudioRecorder = new MediaRecorder(); // Define this outside onCreate method
myAudioRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
myAudioRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
myAudioRecorder.setAudioEncoder(MediaRecorder.OutputFormat.AMR_NB);
myAudioRecorder.setOutputFile(outputFile);
These three function you can call it on any button, in order to play Rec, stop Rec and start Rec;
public void start(View view){
try {
myAudioRecorder.prepare();
myAudioRecorder.start();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
start.setEnabled(false);
stop.setEnabled(true);
Toast.makeText(getApplicationContext(), "Recording started", Toast.LENGTH_LONG).show();
}
public void stop(View view){
myAudioRecorder.stop();
myAudioRecorder.release();
myAudioRecorder = null;
stop.setEnabled(false);
play.setEnabled(true);
Toast.makeText(getApplicationContext(), "Audio recorded successfully",
Toast.LENGTH_LONG).show();
}
public void play(View view) throws IllegalArgumentException,
SecurityException, IllegalStateException, IOException{
MediaPlayer m = new MediaPlayer();
m.setDataSource(outputFile);
m.prepare();
m.start();
Toast.makeText(getApplicationContext(), "Playing audio", Toast.LENGTH_LONG).show();
}
As I read Developer document here , Android supports RTSP protocol (for real time streaming) and also HTTP/HTTPS live streaming draft protocol.
There is also an example here. You must have base knowledge about Streaming server, like Red5 or Wowza.
I'm building, inside my existing app, a player using the AudioTrack class,in MODE_STATIC, because i want to implement the timestretch and the loop points features.
The code is ok for start() and stop(), but when paused, if i try to resume, calling play() again, the status bar remain fixed and no audio is played.
Now, from the docs :
Public void pause ()Pauses the playback of the audio data. Data that has not been played >back will not be discarded. Subsequent calls to play() will play this data back. See >flush() to discard this data.
It seems so easy to understand but there is something that escapes me.
Can some one help me?
Is it necessary to create boolean variables like start, play, pause, stopAudio etc?
If yes, where is the utility of the methods inherited from the AudioTrack class?
In MODE_STREAM i have realized the project, using the above boolean variables., but i need the MODE_STATIC.
This is the code, thanks:
Button playpause, stop;
SeekBar posBar;
int sliderval=0;
int headerOffset = 0x2C;
File file =new File(Environment.getExternalStorageDirectory(), "raw.pcm");
int fileSize = (int) file.length();
int dataSize = fileSize-headerOffset ;
byte[] dataArray = new byte[dataSize];
int posValue;
int dataBytesRead = initializeTrack();
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, dataBytesRead , AudioTrack.MODE_STATIC);
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
playpause= (Button)(findViewById(R.id.playpause));
stop= (Button)(findViewById(R.id.stop));
posBar=(SeekBar)findViewById(R.id.posBar);
// create a listener for the slider bar;
OnSeekBarChangeListener listener = new OnSeekBarChangeListener() {
public void onStopTrackingTouch(SeekBar seekBar) { }
public void onStartTrackingTouch(SeekBar seekBar) { }
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) { sliderval = progress;}
}
};
// set the listener on the slider
posBar.setOnSeekBarChangeListener(listener); }
public void toggleButtonSound(View button)
{
switch (button.getId())
{
case R.id.playpause:
play();
break;
case R.id.stop:
stop();
break;
}
}
private void stop() {
if(audioTrack.getState()==AudioTrack.PLAYSTATE_PLAYING ||
audioTrack.getState()==AudioTrack.PLAYSTATE_PAUSED || audioTrack.getState()==AudioTrack.PLAYSTATE_STOPPED)
{ audioTrack.stop();
resetPlayer();}
}
Context context;
private double actualPos=0;
public void pause() {}
public void play()
{
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PLAYING)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack.pause();
}
else if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PAUSED)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack.play();
}
else if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_STOPPED)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, dataSize, AudioTrack.MODE_STATIC);
audioTrack.write(dataArray, 0, dataBytesRead);
audioTrack.play();
}
posBar.setMax((int) (dataBytesRead/2)); // Set the Maximum range of the
audioTrack.setNotificationMarkerPosition((int) (dataSize/2));
audioTrack.setPositionNotificationPeriod(1000);
audioTrack.setPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener() {
#Override
public void onPeriodicNotification(AudioTrack track) {
posBar.setProgress(audioTrack.getPlaybackHeadPosition());
Log.i("", " " + audioTrack.getPlaybackHeadPosition() + " " + dataBytesRead/2);
}
#Override
public void onMarkerReached(AudioTrack track) {
Log.i("", " End reached ");
audioTrack.pause();
audioTrack.flush();
audioTrack.release();
posBar.setProgress(0);
resetPlayer();}
});
}
private int initializeTrack() {
InputStream is;
BufferedInputStream bis;
DataInputStream dis;
int temp = 0;
try {
is = new FileInputStream(file);
bis = new BufferedInputStream(is);
dis = new DataInputStream(bis);
temp = dis.read(dataArray, 0, dataSize);
dis.close();
bis.close();
is.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return temp;
}
public void resetPlayer() {
audioTrack.flush();
audioTrack.release();
posBar.setProgress(0);
sliderval=0;
}
You see, you did implement AudioTrack so that even when its paused the contents of file still uploads to AudioTrack:
I don't know how it manage it but in my case I also pause data uploading to AT. Like:
while (byteOffset < fileLengh) {
if(isPaused)
continue;
ret = in.read(byteData, 0, byteCount);
if (ret != -1) { // Write the byte array to the track
audioTrack.write(byteData, 0, ret);
byteOffset += ret;
} else
break;
}
So then I unpause the AT the file uploading while cycle resumes too. I guess that's it. Also I have to mention that even when AT is playing the following:
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PLAYING)
and
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PAUSED)
doesn't work for me and getPlayState() always returns 1 (AudioTrack.PLAYSTATE_STOPPED) for me, no matter if its playing or has been paused.
I am stuck with a weird problem. I have an activity with a MediaPlayer that should play a just recorded audio file. At first the mediaplayer is initialized ok and the file can be played.
When I rotate the screen, the activity is destroyed and then reinitialized for the new orientation. Therefore, I re-initialize the mediaplayer too.
this works a couple of times, but at some point mediaPlayer.setDataSource() throws a NullPointerException because the file is suddenly gone. Sadly, I haven't seen any other error in the logs.
Here are some Code snippets:
player creation:
/**
* Creates and initializes the player with the proper file.
*/
private void createPlayer() {
synchronized (playerMutex) {
player = new MediaPlayer();
player.setLooping(false);
player.setOnPreparedListener(this);
player.setOnErrorListener(this);
player.setOnCompletionListener(this);
}
readGreeting();
}
player initialization:
isPrepared = false;
try {
final File file = new File(audioFilename);
in = new FileInputStream(file);
synchronized (playerMutex) {
player.setDataSource(in.getFD());
}
// using a runnable instead of prepareAsync to not accidentally call pause on media player while preparing
Runnable preparer = new Runnable() {
#Override
public void run() {
try {
synchronized (playerMutex) {
if (player != null) {
player.prepare();
}
}
} catch (Exception ex) {
Log.e(TAG, "Error preparing player for file " + file.getAbsolutePath(), ex);
}
}
};
new Thread(preparer).start();
} catch (Exception ex) {
btnPlayback.setEnabled(false);
Log.e(TAG, "Error preparing player", ex);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
Log.e(TAG, "initPlayer: ", e);
}
}
}
saving instance state...
#Override
protected void onPause() {
synchronized (playerMutex) {
if (isPlaying()) {
getIntent().putExtra(EXTRA_KEY_SEEK, player.getCurrentPosition());
pause();
}
}
setAudioModeBackToNormal();
super.onPause();
}
private void pause() {
synchronized (playerMutex) {
if (isPlaying()) {
player.pause();
}
}
btnPlayback.setVisibility(View.VISIBLE);
btnPause.setVisibility(View.GONE);
}
#Override
protected void onSaveInstanceState(Bundle outState) {
final Bundle extras = getIntent().getExtras();
outState.putBundle("extras", extras);
super.onSaveInstanceState(outState);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
getIntent().putExtras(savedInstanceState.getBundle("extras"));
}
cleanup:
private void stopPlayerAndFreeResources() {
synchronized (playerMutex) {
isPrepared = false;
if (player != null) {
player.stop();
player.release();
player = null;
}
}
if (in != null) {
try {
in.close();
in = null;
} catch (IOException e) {
Log.e(TAG, "Unexpected error", e);
}
}
}
Maybe I'm looking at the problem from the wrong angle and it has nothing to do with player. Has anybody ever had issues with disappearing files?
I had playlist files disappear once. The cause of the problem turned out to be certain media players that had an option to manage my playlists. In this case manage meant deleting the playlists I already had :(
I am in need of simple audio recording and playing example using AudioRecorder in android. I tried with MediaRecorder, it works fine.
You mean AudioRecord? Search e.g. "AudioRecord.OnRecordPositionUpdateListener" using Google Code Search. Btw, AudioRecord does recording, not playing.
See also:
Improve Android Audio Recording quality?
Android AudioRecord class - process live mic audio quickly, set up callback function
here is the sample code for audio record.
private Runnable recordRunnable = new Runnable() {
#Override
public void run() {
byte[] audiodata = new byte[mBufferSizeInBytes];
int readsize = 0;
Log.d(TAG, "start to record");
// start the audio recording
try {
mAudioRecord.startRecording();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
// in the loop to read data from audio and save it to file.
while (mInRecording == true) {
readsize = mAudioRecord.read(audiodata, 0, mBufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize
&& mFos != null) {
try {
mFos.write(audiodata);
} catch (IOException e) {
e.printStackTrace();
}
}
}
// stop recording
try {
mAudioRecord.stop();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
mRecordLogTextView.append("\n Audio finishes recording");
}
});
// close the file
try {
if (mFos != null)
mFos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
};
then you need two buttons (or one acts as different function in the different time) to start and stop the record thread.
mRecordStartButton = (Button) rootView
.findViewById(R.id.audio_record_start);
mRecordStartButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// initialize the audio source
int recordChannel = getChoosedSampleChannelForRecord();
int recordFrequency = getChoosedSampleFrequencyForRecord();
int recordBits = getChoosedSampleBitsForRecord();
Log.d(TAG, "recordBits = " + recordBits);
mRecordChannel = getChoosedSampleChannelForSave();
mRecordBits = getChoosedSampleBitsForSave();
mRecordFrequency = recordFrequency;
// set up the audio source : get the buffer size for audio
// record.
int minBufferSizeInBytes = AudioRecord.getMinBufferSize(
recordFrequency, recordChannel, recordBits);
if(AudioRecord.ERROR_BAD_VALUE == minBufferSizeInBytes){
mRecordLogTextView.setText("Configuration Error");
return;
}
int bufferSizeInBytes = minBufferSizeInBytes * 4;
// create AudioRecord object
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
recordFrequency, recordChannel, recordBits,
bufferSizeInBytes);
// calculate the buffer size used in the file operation.
mBufferSizeInBytes = minBufferSizeInBytes * 2;
// reset the save file setup
String rawFilePath = WaveFileWrapper
.getRawFilePath(RAW_PCM_FILE_NAME);
try {
File file = new File(rawFilePath);
if (file.exists()) {
file.delete();
}
mFos = new FileOutputStream(file);
} catch (Exception e) {
e.printStackTrace();
}
if (mInRecording == false) {
mRecordThread = new Thread(recordRunnable);
mRecordThread.setName("Demo.AudioRecord");
mRecordThread.start();
mRecordLogTextView.setText(" Audio starts recording");
mInRecording = true;
// enable the stop button
mRecordStopButton.setEnabled(true);
// disable the start button
mRecordStartButton.setEnabled(false);
}
// show the log info
String audioInfo = " Audio Information : \n"
+ " sample rate = " + mRecordFrequency + "\n"
+ " channel = " + mRecordChannel + "\n"
+ " sample byte = " + mRecordBits;
mRecordLogTextView.setText(audioInfo);
}
});
mRecordStopButton = (Button) rootView
.findViewById(R.id.audio_record_stop);
mRecordStopButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (mInRecording == false) {
Log.d(TAG, "current NOT in Record");
} else {
// stop recording
if (mRecordThread != null) {
Log.d(TAG, "mRecordThread is not null");
mInRecording = false;
Log.d(TAG, "set mInRecording to false");
try {
mRecordThread.join(TIMEOUT_FOR_RECORD_THREAD_JOIN);
Log.d(TAG, "record thread joins here");
} catch (InterruptedException e) {
e.printStackTrace();
}
mRecordThread = null;
// re-enable the start button
mRecordStartButton.setEnabled(true);
// disable the start button
mRecordStopButton.setEnabled(false);
} else {
Log.d(TAG, "mRecordThread is null");
}
}
}
});
then you can save the pcm data into a WAV file.