I am trying to use AudioRecorder to record a wav file for a certain interval of time (corresponding to a certain number of samples). to stop the recording i used Timer().schedule(delay) to stop after a while. but no matter what is the value of that delay the file is always the same size, and not readble.
public class Record extends Activity {
private static final int RECORDER_SAMPLERATE = 44100;
int compt=0;
int duration=1;
//int numSample= duration*RECORDER_SAMPLERATE;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private volatile Thread recordingThread=null;
private volatile boolean isRecording = false;
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
long mySubChunk1Size = 16;
short myBitsPerSample= 16;
int myFormat = 1;
int myChannels = 1;
long myByteRate =RECORDER_SAMPLERATE* myChannels * myBitsPerSample/8;
int myBlockAlign = myChannels * myBitsPerSample/8;
Handler handler;
String filePath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/recordedSound"+compt+".wav";
DataOutputStream dd;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_raw_rec);
try
{
try {
startRec();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
new Timer().schedule(new TimerTask()
{
#Override
public void run()
{
recordingThread.interrupt();
recordingThread=null;
isRecording=false;
recorder.stop();
recorder.release();
recorder = null;
}
}
,5000); //execute after 5 seconds of recording
}//end try
catch(IllegalStateException e) {
e.printStackTrace();
}
}
private void startRec() throws IOException{
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeToFile() {
// Write the output audio in byte
byte[] data= new byte[bufferSize];
while (isRecording) {
try{
recorder.read(data, 0, bufferSize);
Log.d("recorder value", "value"+recorder);
}
catch (NullPointerException e)
{
e.printStackTrace();
}
try {
// // writes the data to file from buffer
// // stores the voice buffer
dd=new DataOutputStream( new FileOutputStream(filePath));
dd.writeBytes("RIFF");
dd.writeInt(0); // Final file size not known yet, write 0
dd.writeBytes("WAVE");
dd.writeBytes("fmt ");
dd.writeInt(Integer.reverseBytes(16)); // Sub-chunk size, 16 for PCM
dd.writeShort(Short.reverseBytes((short) 1)); // AudioFormat, 1 for PCM
dd.writeShort(Short.reverseBytes( (short) myChannels));// Number of channels, 1 for mono, 2 for stereo
dd.writeInt(Integer.reverseBytes(RECORDER_SAMPLERATE)); // Sample rate
dd.writeInt(Integer.reverseBytes( (RECORDER_SAMPLERATE*myBitsPerSample*myChannels/8))); // Byte rate, SampleRate*NumberOfChannels*BitsPerSample/8
dd.writeShort(Short.reverseBytes((short) (myChannels*myBitsPerSample/8))); // Block align, NumberOfChannels*BitsPerSample/8
dd.writeShort(Short.reverseBytes( myBitsPerSample)); // Bits per sample
dd.writeBytes("data");
dd.write(data);
//dd.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
what am i doing wrong?
You're creating a new WAV file during every iteration of the while loop in writeToFile.
A better approach would be to write just the PCM data buffers (without any header) to a temporary file. And make sure you only create the FileOutputStream once.Then, when the recording has ended, generate the RIFF header and write it to the destination WAV file followed by the contents of your temporary file.
Related
I'm developing an android (compileSdkVersion 23) app to record audio by using AudioRecord and the reason of using it is to get frequency after FFT in real time.
Not only this work, I need to save the recorded sound to check the sound(In this process, tracking the frequency is unnecessary.)
How to save recorded sound to file by using the AudioRecord in android?
Thus, am I using the AudioRecord correctly?
Here is code:
public class MainActivity extends Activity {
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
AudioRecord audioRecord;
RecordAudio recordTask;
int blockSize;// = 256;
boolean started = false;
boolean CANCELLED_FLAG = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
blockSize = 256;
final Button btRec = (Button) findViewById(R.id.btRec);
btRec.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (started == true) {
//started = false;
CANCELLED_FLAG = true;
//recordTask.cancel(true);
try{
audioRecord.stop();
}
catch(IllegalStateException e){
Log.e("Stop failed", e.toString());
}
btRec.setText("Start");
// canvasDisplaySpectrum.drawColor(Color.BLACK);
}
else {
started = true;
CANCELLED_FLAG = false;
btRec.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
});
}
private class RecordAudio extends AsyncTask<Void, double[], Boolean> {
#Override
protected Boolean doInBackground(Void... params) {
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
audioRecord = new AudioRecord(
MediaRecorder.AudioSource.DEFAULT, frequency,
channelConfiguration, audioEncoding, bufferSize);
int bufferReadResult;
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
try {
audioRecord.startRecording();
} catch (IllegalStateException e) {
Log.e("Recording failed", e.toString());
}
while (started) {
if (isCancelled() || (CANCELLED_FLAG == true)) {
started = false;
//publishProgress(cancelledResult);
Log.d("doInBackground", "Cancelling the RecordTask");
break;
} else {
bufferReadResult = audioRecord.read(buffer, 0, blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
//transformer.ft(toTransform);
//publishProgress(toTransform);
}
}
return true;
}
}
}
You have to download your file and save in cache, than for any request you have to check for cahce file if it is available use otherwise request new file
For complete help look into one of my answer Download and cache media files
I want to capture audio from an Android device. My code below seems to successfully make a wav file on the SD card but it cannot be played. I tried to play it using different media players but none work. There is an issue in my code that is causing this problem.
code
public class MainActivity extends ActionBarActivity {
private static final String LOG_TAG = "AudioRecordTest";
static final int AUDIO_PORT = 2048;
static final int SAMPLE_RATE = 8000;
static final int SAMPLE_INTERVAL = 20; // milliseconds
static final int SAMPLE_SIZE = 2; // bytes per sample
static final int BUF_SIZE = SAMPLE_INTERVAL * SAMPLE_INTERVAL * SAMPLE_SIZE * 2;
private static int[] mSampleRates = new int[]{44100, 44056, 47250, 48000, 22050, 16000, 11025, 8000};
private Thread recordingThread = null;
private boolean isRecording = false;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private int bufferSize;
private AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startRecording();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
private void startRecording() {
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
// convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) {
for (short channelConfig : new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) {
try {
Log.d(LOG_TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e(LOG_TAG, rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
private void writeAudioDataToFile() {
/*// Write the output audio in byte
short sData[] = new short[BufferElements2Rec];
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
// // stores the voice buffer
byte bData[] = short2byte(sData);
sendLiveAudio(bData);
}*/
String filePath = "/sdcard/test.wav";
short sData[] = new short[bufferSize / 2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, bufferSize / 2);
Log.d("eray", "Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopRecording();
}
}
Try This.....
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
I am developing simple producer-consumer example. One thread records audio samples using AudioRecord class and writes them into buffer. Second one just reads the buffer and does nothing. When user wants to stop recording first thread writes special characters into the buffer and its a indicator for the other that reading is over. Here is my code
public class SpellCollectorActivity extends Activity implements OnClickListener{
private ArrayBlockingQueue<byte[] > audioq;
boolean needToBeStopped = false;
Button generate, action;
private MyRecorder rec;
private MyReader mr;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
action = (Button) findViewById(R.id.actionButton);
action.setOnClickListener(this);
needToBeStopped = false;
audioq = new ArrayBlockingQueue<byte[]>(CAPACITY);
}
public void onClick(View arg0){
switch(arg0.getId()){
case R.id.generateButton:
generateContentToSpell();
break;
case R.id.actionButton:
if(needToBeStopped){
rec.stopThread();
needToBeStopped = false;
action.setText(this.getString(R.string.start));
}else{
rec = new MyRecorder(audioq);
mr = new MyReader(audioq);
rec.start();
mr.start();
needToBeStopped = true;
action.setText(this.getString(R.string.stop));
}
break;
}
}
private class MyRecorder extends Thread{
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private final BlockingQueue<byte[] > myRecAudioq;
private AudioRecord recorder;
private boolean recording = false;
int bufferSize;
/*konstruktor*/
public MyRecorder(BlockingQueue<byte[]> q ){
bufferSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
myRecAudioq = q;
}
public void run(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
freq, channelConfiguration,
audioEncoding, 3*bufferSize);
recorder.startRecording();
recording = true;
byte[] buffer = new byte[bufferSize];
while(recording){
int readBufferSize = recorder.read(buffer, 0, bufferSize);
if(readBufferSize>0){
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public void stopThread(){
recorder.stop();
recording = false;
byte[] buffer = new byte[bufferSize];
for(int i=0;i<bufferSize;i++){
buffer[i] =(byte) 0xff;
}
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private class MyReader extends Thread{
private final BlockingQueue<byte[]> bq;
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int counter = 0;
public MyReader(BlockingQueue<byte[]> q){
bq = q;
}
public void run(){
int buffSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
byte[] compareBuffer= new byte[buffSize];
for(int i=0;i<buffSize;i++){
compareBuffer[i] = (byte)0xff;
}
boolean reading = true;
byte[] buffer = null;
do{
try {
buffer = bq.take();
reading = buffer.equals(compareBuffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if(reading){
int c=1;
}
}while(!reading);
int a=5;
}
}
}
I tried to put a breakpoints in MyReader class at line
int c=1;
but it' s never reached. I wrote this code using this example. What could be the problem?
The value of 'reading' is a comparison between 2 byte[].
This can only be true if they both are the same object (pointer, if you will), that is, if you first call buffer = compareBuffer
What you actually want to do is compare all elements of the buffer, for example using the java.util.Arrays class :
reading = Arrays.equals(compareBuffer, buffer);
I've been working on an app that could playback audio from the microphone in real time.
It sets up an AudioRecorder, that inits without any errors. However, it just returns a bunch of zeors, or a lot of number close to the max value of short, when performing read operations. I'm really stuck, it would be very kind if anyone could help me. This is my code:
public class AudioIn extends Thread {
public static final int ERROR_RECORD_INIT = -1;
public static final int ERROR_RECORD_NOTIFICATION = -2;
public static final int ERROR_RECORD_READ = -3;
public static final int SUCCESS = 0;
public static final int audioFrequency = 44100;
public static final int channelConfig = AudioFormat.CHANNEL_IN_MONO;
public static final int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
final int ShortsReadPerCycle = 1024;
private boolean capture = true;
private AudioRecord recorder;
private int effectiveCaptureBufferSize;
private short[] buffer;
private AudioInHandler handler;
public AudioIn()
{
int minDeviceBuffer = AudioRecord.getMinBufferSize(audioFrequency, channelConfig, audioFormat);
Log.d("AudioIn", "Minimum device capture buffer is: " + Integer.toString(minDeviceBuffer) + " bytes");
effectiveCaptureBufferSize = minDeviceBuffer;
Log.d("AudioIn", "Setting capture buffer size to " + effectiveCaptureBufferSize + " bytes");
}
public void close()
{
capture = false;
}
public int samplesPerBuffer()
{
return effectiveCaptureBufferSize / 2;
}
#Override
public void run() {
// TODO Auto-generated method stub
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
try
{
recorder = new AudioRecord(AudioSource.MIC, audioFrequency, channelConfig, audioFormat, effectiveCaptureBufferSize);
}
catch (Throwable t)
{
handler.onError(ERROR_RECORD_INIT);
return;
}
recorder.startRecording();
while(capture)
{
buffer = new short[ShortsReadPerCycle];
int shortsRead = recorder.read(buffer, 0, buffer.length);
if (shortsRead < 0)
{
new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
handler.onError(ERROR_RECORD_READ);
}
}.run();
this.close();
}
else
{
new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
handler.onNewData(buffer);
}
}.run();
}
}
Log.d("AudioIn.run()", "Stopping AudioRecord...");
recorder.stop();
Log.d("AudioIn.run()", "Stopped AudioRecord, now releasing...");
recorder.release();
Log.d("AudioIn.run()", "AudioRecord released");
recorder = null;
}
public void setHandler(AudioInHandler handler) {
this.handler = handler;
}
}
Calling read() on AudioRecord once does not ensure the whole short[] to be filled. You should check the returned "shortsRead" for actual short read and keep calling read() until the short[] is filled. Sample code given below.
private void readFully(short[] data, int off, int length) {
int read;
while (length > 0) {
read = mRec.read(data, off, length);
length -= read;
off += read;
}
}
In your recording loop
while (!released) {
// fill the pktBuf
short[] pktBuf = new short[pktSize];
readFully(pktBuf, 0, pktSize);
// Do something
}
In this way, everytime a call to read() is made, we increment the offset by "read" and decrement the length remaining by "read", keep reading until the length remaining reaches 0. You will then get a filled short[] with recorded audio data.
Similarly, when you are writing data into an AudioTrack, you have to do the same thing "writeFully()", to ensure the whole short[] is written into the AudioTrack.
Hope it helps.
Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 6 years ago.
Improve this question
I am designing an Android app and I need to implement an AudioRecord class to record the user's sound. After some research (that didn't provide enough information) and few failed attempts, I was wondering if anyone could help me by posting an example (code) on how to capture high quality sound using AudioRecord. I would really appreciate it. Thank you
Here I am posting you the some code example which record good quality of sound using AudioRecord API.
Note: If you use in emulator the sound quality will not much good because we are using sample rate 8k which only supports in emulator. In device use sample rate to 44.1k for better quality.
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short writing to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
For more detail try this AUDIORECORD BLOG.
Here is an end to end solution I implemented for streaming Android microphone audio to a server for playback: Android AudioRecord to Server over UDP Playback Issues