Android - How to convert raw data of audio to wav? - android

I have implemented a feature of recording audio in my android app. My app is working fine on linux. but when i run same app in mac osx and do recording audio, it crashes my app.
private AudioRecordingThread recordingThread;
recordingThread = new AudioRecordingThread(fileName,
new AudioRecordingHandler() {
#Override
public void onFftDataCapture(final byte[] bytes) {
runOnUiThread(new Runnable() {
public void run() {
if (visualizerView != null) {
visualizerView.updateVisualizerFFT(bytes);
}
}
});
}
#Override
public void onRecordSuccess() {
}
#Override
public void onRecordingError() {
runOnUiThread(new Runnable() {
public void run() {
recordStop();
NotificationUtils.showInfoDialog(
ActivityCropImage.this,
"Error in saving");
}
});
}
#Override
public void onRecordSaveError() {
runOnUiThread(new Runnable() {
public void run() {
recordStop();
NotificationUtils.showInfoDialog(
ActivityCropImage.this,
"Error in recording");
}
});
}
});
PcmAudioHelper.convertRawToWav(WavAudioFormat.mono16Bit(SAMPLING_RATE), file_raw, file_wav);
This is the line of code where my app crashed. I have used https://github.com/steelkiwi/AndroidRecording library in my project.

package com.record.util;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Environment;
import android.text.format.Time;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class RecordWavMaster {
private static final int samplingRates[] = {16000, 11025, 11000, 8000, 6000};
public static int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private String audioFilePath;
private boolean mIsRecording = false;
private String RECORD_WAV_PATH = Environment.getExternalStorageDirectory() + File.separator + "AudioRecord";
/* Initializing AudioRecording MIC */
public RecordWavMaster() {
initRecorder();
}
/* Get Supported Sample Rate */
public static int getValidSampleRates() {
for (int rate : samplingRates) {
int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_CONFIGURATION_DEFAULT, AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize > 0) {
return rate;
}
}
return SAMPLE_RATE;
}
/* Start AudioRecording */
public void recordWavStart() {
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
}
/* Stop AudioRecording */
public String recordWavStop() {
try {
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
rawToWave(mRecording, waveFile);
Log.e("path_audioFilePath",audioFilePath);
return audioFilePath;
} catch (Exception e) {
Log.e("Error saving file : ", e.getMessage());
}
return null;
}
/* Release device MIC */
public void releaseRecord() {
mRecorder.release();
}
/* Initializing AudioRecording MIC */
private void initRecorder() {
SAMPLE_RATE = getValidSampleRates();
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
new File(RECORD_WAV_PATH).mkdir();
}
/* Writing RAW file */
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0, mBuffer.length);
for (int i = 0; i < readSize; i++) {
output.writeShort(mBuffer[i]);
sum += mBuffer[i] * mBuffer[i];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
}
}
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
} finally {
try {
output.close();
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
}
}
}
}
}
}).start();
}
/* Converting RAW format To WAV Format*/
private void rawToWave(final File rawFile, final File waveFile) throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
rawFile.delete();
}
}
}
/* Get file name */
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
audioFilePath = time.format("%Y%m%d%H%M%S");
return new File(RECORD_WAV_PATH, time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value) throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value) throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value) throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
public String getFileName (final String time_suffix) {
return (RECORD_WAV_PATH+time_suffix+ "." + "wav");
}
public Boolean getRecordingState () {
if( mRecorder.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
return false;
}
return true;
}
}
Use the following permissions in manifest
<!--Permission record-->
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />

Related

How to record audio with different frequency?

I am trying to record audio with different frequency. Though I have been trying to implement it by the following code but its not working (app unexpectedly stopped).
//importing packages
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.view.KeyEvent;
import android.view.View;
import android.widget.Button;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class MainActivity extends Activity {
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setButtonHandlers();//handling buttons
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.Start)).setOnClickListener(btnClick);
((Button) findViewById(R.id.Stop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.Start, !isRecording);
enableButton(R.id.Stop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
public void Record()
{
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/myrec.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void Record_Stop()
{
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.Start: {
enableButtons(true);
Record();
break;
}
case R.id.Stop: {
enableButtons(false);
Record_Stop();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
As soon as I press the start button the app gets unexpectedly stopped.
com.example.kaushal.learningfft E/AndroidRuntime: FATAL EXCEPTION:
main Process: com.example.kaushal.learningfft, PID: 4705
java.lang.IllegalStateException: startRecording() called on an
uninitialized AudioRecord. at
android.media.AudioRecord.startRecording(AudioRecord.java:97‌​6) at
com.example.kaushal.learningfft.MainActivity.Record(MainActi‌​vity.java:57)
at
com.example.kaushal.learningfft.MainActivity$2.onClick(MainA‌​ctivity.java:126)
at android.view.View.performClick(View.java:5610) at
android.view.View$PerformClick.run(View.java:22265) at
android.os.Handler.handleCallback(Handler.java:751) at
android.os.Handler.dispatchMessage(Handler.java:95) at
android.os.Looper.loop(Looper.java:154)
android.app.ActivityThread.main(ActivityThread.java:6077) at
java.lang.reflect.Method.invoke(Native Method) at
com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(Z‌​ygoteInit.java:865)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:755)

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously?

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously? I finished to record audio from handset device by this code. And I am doing the second step- play this sound in android speaker. Please help me to resolve it. Thank you so much
_audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
_audioManager.startBluetoothSco();
_recorder = new MediaRecorder();
_recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
_recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
_recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
_recorder.setOutputFile(file.toString());
_recorder.prepare();
_recorder.start();
Recording using AudioRecord
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
Button showPref;
OnBluetoothRecording bluetoothRecording;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
showPref = (Button)findViewById(R.id.showPreferece);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
showPref.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(getApplicationContext(),BluetoothPreferenceActivity.class));
}
});
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new OnClickListener() {
#Override
public void onClick(final View v) {
BluetoothRecordingManager.checkAndRecord(getApplicationContext(), new OnBluetoothRecording() {
#Override
public void onStartRecording(boolean state, boolean bluetoothFlag) {
Log.d("CallBack","starting Recording");
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onCancelRecording() {
}
}, true);
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

How to record audio using AudioRecorder in Android

I want to capture audio from an Android device. My code below seems to successfully make a wav file on the SD card but it cannot be played. I tried to play it using different media players but none work. There is an issue in my code that is causing this problem.
code
public class MainActivity extends ActionBarActivity {
private static final String LOG_TAG = "AudioRecordTest";
static final int AUDIO_PORT = 2048;
static final int SAMPLE_RATE = 8000;
static final int SAMPLE_INTERVAL = 20; // milliseconds
static final int SAMPLE_SIZE = 2; // bytes per sample
static final int BUF_SIZE = SAMPLE_INTERVAL * SAMPLE_INTERVAL * SAMPLE_SIZE * 2;
private static int[] mSampleRates = new int[]{44100, 44056, 47250, 48000, 22050, 16000, 11025, 8000};
private Thread recordingThread = null;
private boolean isRecording = false;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private int bufferSize;
private AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startRecording();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
private void startRecording() {
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
// convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) {
for (short channelConfig : new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) {
try {
Log.d(LOG_TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e(LOG_TAG, rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
private void writeAudioDataToFile() {
/*// Write the output audio in byte
short sData[] = new short[BufferElements2Rec];
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
// // stores the voice buffer
byte bData[] = short2byte(sData);
sendLiveAudio(bData);
}*/
String filePath = "/sdcard/test.wav";
short sData[] = new short[bufferSize / 2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, bufferSize / 2);
Log.d("eray", "Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopRecording();
}
}
Try This.....
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}

Button.onTouch() not being called

I have a button btnSpeak which i find by ID, i then try to set the MotionEventListener
package com.ctc.android.widget;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.widget.Button;
import com.ctc.android.widget.ImageMap.Area;
public class ImageMapTestActivity extends Activity{
/********** Dan's Variables *********/
private static final String LOG_TAG = "AudioRecordTest";
private static String mFileName = null;
private MediaRecorder mRecorder = null;
private MediaPlayer mPlayer = null;
boolean isRecording = false;
boolean isStreaming = false;
private String hostIP = "192.168.0.14";
private static final int IDLE_EMPTY_BUFFER = 0;
private static final int IDLE_BUFFER_READY = 1;
private static final int RECORDING_STATE = 2;
private static final int PREVIEW_STATE = 3;
private static final int PLAYOUT_STATE = 4;
private int currentState = IDLE_EMPTY_BUFFER;
private static InetAddress multicastaddress = null;
private List<String> IPs = new ArrayList<String>();
private ArrayList<InetAddress> SelectedIPs = new ArrayList<InetAddress>();
private boolean listUpdated = false;
/********** ****** *********/
ImageMap mImageMap;
String strCoordinates;
int intSelectedAreaID;
Button btnPlaySiren;
// Button btnSpeak;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// find the image map in the view
mImageMap = (ImageMap)findViewById(R.id.map);
btnPlaySiren = (Button)findViewById(R.id.btnPlaySiren);
Button btnSpeak = (Button)findViewById(R.id.btnSpeak);
btnSpeak.setOnTouchListener(new OnTouchListener(){
public boolean onTouch(View v, MotionEvent event){
switch(event.getAction())
{
case MotionEvent.ACTION_DOWN:
startStreamingRun();
break;
case MotionEvent.ACTION_UP:
stopStreamingRun();
break;
}
return true;
}
});
// add a click handler to react when areas are tapped
mImageMap.addOnImageMapClickedHandler(new ImageMap.OnImageMapClickedHandler() {
#Override
public void onImageMapClicked(int id) {
// when the area is tapped, show the name in a
// text bubble
intSelectedAreaID = id;
mImageMap.showBubble(id);
// TODO: Change colour of selected Area.
// TODO: Change state of selected Area.
}
#Override
public void onBubbleClicked(int id) {
// react to info bubble for area being tapped
}
});
// TODO: Set bitmap as Area decoration for each area added.
// TODO: Add an onClickListener for each area here.
ArrayList<Area> mAreas = mImageMap.GetAllAreas();
for(Area objArea : mAreas){
Bitmap objBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.remvox_green);
objArea.setBitmap(objBitmap);
Log.v("Area ID: :", Integer.toString(objArea._id)); // 07-30 14:40:05.409: V/Area ID: :(28449): 2131230738
} // End of for(Area...)
} // End of onCreate(Bundle ...)
public void btnPlaySirenClicked(View v){
if(v.getId() == R.id.btnPlaySiren){
Log.v("Siren Button Clicked", "Playing siren");
Thread thread = new Thread(new Runnable() {
public void run() {
playOverSocket(); // To resolve "Network on main thread" error!
}
});
thread.start();
}
}
private void playOverSocket() {
Log.e("AudioRecord", "Top of network play");
/*****
// Get the file we want to playback.
File file = new File("android.resource://com.ctc.android.widget.ImageMapTestActivity/res/raw/british"); // Do not add extension (file type)
File objSirenFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/reverseme.pcm");
// Get the length of the audio stored in the file (16 bit so 2 bytes per
// short)
// and create a short array to store the recorded audio.
int musicLength = (int) (file.length() / 2);
// short[] music = new short[musicLength];
* *****/
try {
// Create a DataInputStream to read the audio data back from the
// saved file.
InputStream is = getResources().openRawResource(R.raw.british);
//InputStream objSiren = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
Socket s = null;
try {
s = new Socket("192.168.1.101", 6666); // hostIP - Hard coded until implemented.
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
BufferedOutputStream buff = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dataOutputStreamInstance = new DataOutputStream(
buff);
// Read the file into the music array.
short lastByte = dis.readShort();
while(lastByte != -1)
{
dataOutputStreamInstance.writeChar(lastByte);
lastByte = dis.readShort();
}
dataOutputStreamInstance.flush();
dataOutputStreamInstance.close();
buff.flush();
buff.close();
s.close();
// Close the input streams.
dis.close();
} catch (Throwable t) {
Log.e("AudioTrack", "Playback Failed" + t.getMessage());
}
}
public void btnRecordClicked(View v){
//Button btnStopRecordToggle = (Button) findViewById(R.id.btnRecord);
if(v.getId() == R.id.btnRecord){
Log.v("Record Button Clicked", "Recording");
Thread thread = new Thread(new Runnable() {
public void run() {
record();
}
});
isRecording = true;
thread.start();
}
}
private void record() {
Log.v("AudioRecord", "Top of Record");
int frequency = 11025;
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
File file = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/reverseme.pcm");
// Delete any previous recording.
if (file.exists())
file.delete();
// Create the new file.
try {
file.createNewFile();
} catch (IOException e) {
throw new IllegalStateException("Failed to create "
+ file.toString());
}
try {
// Create a DataOuputStream to write the audio data into the saved
// file.
OutputStream os = new FileOutputStream(file);
BufferedOutputStream bos = new BufferedOutputStream(os);
DataOutputStream dos = new DataOutputStream(bos);
// Create a new AudioRecord object to record the audio.
int bufferSize = 8 * 1024;// AudioRecord.getMinBufferSize(frequency,
// channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[2048];
audioRecord.startRecording();
while (isRecording) {
int bufferReadResult = audioRecord.read(buffer, 0, 2048);
for (int i = 0; i < bufferReadResult; i++)
dos.writeShort(buffer[i]);
}
audioRecord.stop();
dos.close();
Log.e("AudioRecord", "Recording Finished");
} catch (Throwable t) {
Log.e("AudioRecord", "Recording Failed");
Log.e("AudioRecord Error", t.getLocalizedMessage());
}
}
private void playRecordedOverSocket(InetAddress objAddress) {
Log.e("AudioRecord", "Top of network play recorded");
// Get the file we want to playback.
File file = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/reverseme.pcm");
// Get the length of the audio stored in the file (16 bit so 2 bytes per
// short)
// and create a short array to store the recorded audio.
int musicLength = (int) (file.length() / 2);
// short[] music = new short[musicLength];
try {
// Create a DataInputStream to read the audio data back from the
// saved file.
InputStream is = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
Socket s = new Socket(objAddress, 6666);
BufferedOutputStream buff = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dataOutputStreamInstance = new DataOutputStream(
buff);
// Read the file into the music array.
for (int i = 0; i < musicLength; i++) {
// music[i] =
dataOutputStreamInstance.writeChar(dis.readShort());
}
dataOutputStreamInstance.flush();
dataOutputStreamInstance.close();
buff.flush();
buff.close();
s.close();
// Close the input streams.
dis.close();
} catch (Throwable t) {
Log.e("AudioTrack", "Playback Failed" + t.getMessage());
}
}
public void btnRecordedMessageClicked(View v)
{
for(final InetAddress oneSpeaker : mImageMap.arrSelectedAddresses)
{
Thread thread = new Thread(new Runnable() {
public void run() {
playRecordedOverSocket(oneSpeaker);
}
});
thread.start();
}
}
public void btnStopClicked(View v)
{
isRecording = false;
}
public void btnSelectAllClicked(View v)
{
for(Area objOneArea : mImageMap.mAreaList)
{
objOneArea.blnIsSelected = false;
objOneArea.touched(SelectedIPs);
}
mImageMap.mBubbleMap.clear();
mImageMap.invalidate();
}
public void btnCallClicked(View v)
{
}
/*
*
* Multi-Cast Streaming implementation
*
*/
private void startStreamingRun() {
Thread thread = new Thread(new Runnable() {
public void run() {
doTestStream();
//doMCastStream();
}
});
// isStreaming = true;
thread.start();
}
private void stopStreamingRun() {
isStreaming = false;
}
private void doTestStream() {
int frequency = 11025;
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
try {
// Create a DataOuputStream to write the audio data into the saved
// file.
Socket s = new Socket(hostIP, 6666);
BufferedOutputStream bos = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dos = new DataOutputStream(bos);
// Create a new AudioRecord object to record the audio.
int bufferSize = 8 * 1024;// AudioRecord.getMinBufferSize(frequency,
// channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
//short[] buffer = new short[bufferSize];
byte[] buffer = new byte[bufferSize];
audioRecord.startRecording();
byte[] MiniBuff = new byte[2];
while (isStreaming) {
int bufferReadResult = audioRecord.read(buffer, 0, bufferSize);
for (int i = 0; i < bufferReadResult; i++) {
MiniBuff[1] = buffer[i];
i++;
MiniBuff[0] = buffer[i];
dos.write(MiniBuff);
}
/*
for (int i = 0; i < bufferReadResult; i++) {
short val=(short)( ((buffer[i+1]&0xFF)<<8) | (buffer[i]&0xFF) );
i++;
dos.writeShort(val);
}
*/
//dos.write(buffer[i]);
dos.flush();
}
dos.flush();
dos.close();
s.close();
// Close the input streams.
audioRecord.stop();
Log.e("AudioRecord", "Streaming Finished");
} catch (Throwable t) {
Log.e("AudioRecord", "Streaming Failed");
Log.e("AudioRecord Error", t.getLocalizedMessage());
}
}
} // End of class
This is done in the onCreate method of the activity, but when the activity runs and the button is touched, the event is never triggered and I can't see where I am going wrong. Not sure if its a case of not being able to see the wood for the trees.
I have copied your class, commenting the part of code i don't have relation (owner classes, own files...).
I made a test main.xml and everything works. Have you tried a "Project/Clean", maybe the ID from your btnSpeak is not updated
main.xml:
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<Button android:id="#+id/btnPlaySiren" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Siren" />
<Button android:id="#+id/btnSpeak" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Speak" />
<Button android:id="#+id/btnRecord" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Record" />
</LinearLayout>

Recording .Wav with Android AudioRecorder

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Categories

Resources