Related
I'm trying to generate a wav file with ulaw encoding from raw pcm data.
I have tried some of the solutions by finding in google and here, but I can't seem to make the audio nicely playable. Its noisy and sort of screenching sound. May be the audio is playing too fast, not sure.
So, here is the code that I have tried so far.
I have my AudioRecorder.java that has the recording and saving that data as raw pcm file, converting of raw pcm data to ulaw encoded data, creation of wav header and then combining both the streams of data and saving as one .wav file
public class AudioRecorder {
private static final String TAG = "AudioRecorder";
private int audioInput = MediaRecorder.AudioSource.MIC;
private int audioSampleRate = 8000; //frequency which ranges from 8K Hz to 44.1K Hz
private int audioChannel = AudioFormat.CHANNEL_IN_MONO;
private int audioEncode = AudioFormat.ENCODING_PCM_16BIT;
private int bufferSizeInBytes = 0;
private AudioRecord audioRecord;
private Status status = Status.STATUS_NO_READY;
protected String pcmFileName;
private int currentPosition = 0;
private int lastVolumn = 0;
private FileOutputStream fosPcm = null;
public AudioRecorder() {
pcmFileName = AudioFileUtils.getPcmFileAbsolutePath(RECORDED_FILE_NAME);
status = Status.STATUS_READY;
}
public void setAudioInput(int audioInput) {
this.audioInput = audioInput;
}
public void setAudioSampleRate(int audioSampleRate) {
this.audioSampleRate = audioSampleRate;
}
public void setAudioChannel(int audioChannel) {
this.audioChannel = audioChannel;
}
/**
* This method is to start recording using AudioRecord, also has NoiseSuppressor and AutomaticGainControl enabled
*/
public void startRecord() {
bufferSizeInBytes = AudioRecord.getMinBufferSize(audioSampleRate,
audioChannel, audioEncode);
audioRecord = new AudioRecord(audioInput, audioSampleRate, audioChannel, audioEncode, bufferSizeInBytes);
if (status == Status.STATUS_NO_READY) {
throw new IllegalStateException("not init");
}
if (status == Status.STATUS_START) {
throw new IllegalStateException("is recording ");
}
Log.d("AudioRecorder", "===startRecord===" + audioRecord.getState());
audioRecord.startRecording();
new Thread(new Runnable() {
#Override
public void run() {
NoiseSuppressor noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
AutomaticGainControl automaticGainControl = AutomaticGainControl.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
recordToFile();
}
}).start();
}
public void stop() {
if (status != Status.STATUS_START) {
throw new IllegalStateException("not recording");
} else {
stopRecorder();
// convertPCmFile();
makeDestFile();
status = Status.STATUS_READY;
}
}
private void convertPCmFile() {
File file = new File(AudioFileUtils.getPcmFileAbsolutePath(RECORDED_GREETING_FILE_NAME)); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
try {
FileInputStream in = new FileInputStream(file);
in.read(byteData);
in.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(audioSampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, audioSampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (at != null) {
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
} else
Log.d("TCAudio", "audio track is not initialised ");
}
private void makeDestFile() {
new Thread() {
#Override
public void run() {
File file = new File(pcmFileName);
try {
//Step 1: create input stream from generated pcm audio data
byte[] pcmData = new byte[(int) file.length()];
FileInputStream inputStream1 = new FileInputStream(file);
int readBytes = inputStream1.read(pcmData);
inputStream1.close();
//Step 2: calculate the size that has to be sent to constructor which is half the size of actual pcm audio data
int size = UlawEncoderInputStream.maxAbsPcm(pcmData, 0, pcmData.length / 2);
//Step 3: send the input stream as well as the size to the constructor
FileInputStream inputStream2 = new FileInputStream(file);
UlawEncoderInputStream ulawEncoderInputStream = new UlawEncoderInputStream(inputStream2, size);
//Step 4: create byte[] with size of half of bytes of pcm audio data
byte[] ulawData = new byte[pcmData.length / 2];
//Step 5: call read from UlawEncoderInputStream with above pcmData which is newly created
int nRead;
nRead = ulawEncoderInputStream.read(ulawData);
//Step 6: create wav header
byte[] wavHeader = wavFileHeader(ulawData.length, ulawData.length + 36, audioSampleRate, audioChannel, audioSampleRate);
//Step 7: combine wav header and encodedUlawBuffer in one byte[]
byte[] allByteArray = new byte[wavHeader.length + ulawData.length];
ByteBuffer buff = ByteBuffer.wrap(allByteArray);
buff.put(wavHeader);
buff.put(ulawData);
//Step 8 : writing the combined data into a new file
OutputStream outputStream = new FileOutputStream(new File(AudioFileUtils.getWavFileAbsolutePath(RECORDED_FILE_NAME)));
outputStream.write(allByteArray);
outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
releaseRecorder();
}
}.run();
}
private byte[] wavFileHeader(long totalAudioLen, long totalDataLen, long longSampleRate,
int channels, long byteRate) {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 7; // format = 7, for ulaw
header[21] = 0;
header[22] = (byte) (channels & 0xff);
header[23] = (byte) ((channels >> 8) & 0xFF);
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) ((channels * 8) / 8);//
// block align
header[33] = 0;
header[34] = 8; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
return header;
}
public void release() {
stopRecorder();
releaseRecorder();
status = Status.STATUS_READY;
}
private void releaseRecorder() {
if (audioRecord != null) {
audioRecord.release();
audioRecord = null;
}
}
private void stopRecorder() {
if (audioRecord != null) {
try {
audioRecord.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
}
/**
* linear PCM data recorded to file
*/
private void recordToFile() {
byte[] audiodata = new byte[bufferSizeInBytes];
int readsize = 0;
try {
fosPcm = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fosPcm != null) {
try {
//get the volumn 1--10
int sum = 0;
for (int i = 0; i < readsize; i++) {
sum += Math.abs(audiodata[i]);
}
if (readsize > 0) {
int raw = sum / readsize;
lastVolumn = raw > 32 ? raw - 32 : 0;
Log.i(TAG, "writeDataTOFile: volumn -- " + raw + " / lastvolumn -- " + lastVolumn);
}
if (readsize > 0 && readsize <= audiodata.length)
fosPcm.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fosPcm != null) {
fosPcm.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
public Status getStatus() {
return status;
}
public enum Status {
STATUS_NO_READY,
STATUS_READY,
STATUS_START,
STATUS_PAUSE,
STATUS_STOP
}
}
I have AudioFileUtils.java that just provides the path and appending of the extension to save
public class AudioFileUtils {
private static String rootPath = "audiorecord";
private final static String AUDIO_PCM_BASEPATH = "/" + rootPath + "/pcm/";
private final static String AUDIO_WAV_BASEPATH = "/" + rootPath + "/wav/";
private static void setRootPath(String rootPath) {
AudioFileUtils.rootPath = rootPath;
}
public static String getPcmFileAbsolutePath(String fileName) {
if (TextUtils.isEmpty(fileName)) {
throw new NullPointerException("fileName isEmpty");
}
String mAudioRawPath = "";
if (!fileName.endsWith(".pcm")) {
fileName = fileName + ".pcm";
}
String fileBasePath = SampleApp.getInstance().getApplicationContext().getExternalFilesDir(null) + AUDIO_PCM_BASEPATH;
File file = new File(fileBasePath);
if (!file.exists()) {
file.mkdirs();
}
mAudioRawPath = fileBasePath + fileName;
return mAudioRawPath;
}
public static String getWavFileAbsolutePath(String fileName) {
if (fileName == null) {
throw new NullPointerException("fileName can't be null");
}
String mAudioWavPath = "";
if (!fileName.endsWith(".wav")) {
fileName = fileName + ".wav";
}
String fileBasePath = SampleApp.getInstance().getApplicationContext().getExternalFilesDir(null) + AUDIO_WAV_BASEPATH;
CoxApplication.getInstance().getAccountManager().setMessageFilePath(fileBasePath);
File file = new File(fileBasePath);
if (!file.exists()) {
file.mkdirs();
}
mAudioWavPath = fileBasePath + fileName;
return mAudioWavPath;
}
}
I am using UlawEncoderInputStream.java which I found here with some changes to it, can be found below
public class UlawEncoderInputStream extends InputStream {
private final static String TAG = "UlawEncoderInputStream";
private final static int MAX_ULAW = 8192;
private final static int SCALE_BITS = 16;
private InputStream mIn;
private int mMax = 0;
// this buffer needs to be LARGER than the largest possible file size for
// a 30 second PCM file recorded at either 8000 Hz or 44.1 KHz.
// If it is smaller, the file will be cut off.
private final byte[] mBuf = new byte[1048576];
private int mBufCount = 0; // should be 0 or 1
private final byte[] mOneByte = new byte[1];
public static void encode(byte[] pcmBuf, int pcmOffset,
byte[] ulawBuf, int ulawOffset, int length, int max) {
// from 'ulaw' in wikipedia
// +8191 to +8159 0x80
// +8158 to +4063 in 16 intervals of 256 0x80 + interval number
// +4062 to +2015 in 16 intervals of 128 0x90 + interval number
// +2014 to +991 in 16 intervals of 64 0xA0 + interval number
// +990 to +479 in 16 intervals of 32 0xB0 + interval number
// +478 to +223 in 16 intervals of 16 0xC0 + interval number
// +222 to +95 in 16 intervals of 8 0xD0 + interval number
// +94 to +31 in 16 intervals of 4 0xE0 + interval number
// +30 to +1 in 15 intervals of 2 0xF0 + interval number
// 0 0xFF
// -1 0x7F
// -31 to -2 in 15 intervals of 2 0x70 + interval number
// -95 to -32 in 16 intervals of 4 0x60 + interval number
// -223 to -96 in 16 intervals of 8 0x50 + interval number
// -479 to -224 in 16 intervals of 16 0x40 + interval number
// -991 to -480 in 16 intervals of 32 0x30 + interval number
// -2015 to -992 in 16 intervals of 64 0x20 + interval number
// -4063 to -2016 in 16 intervals of 128 0x10 + interval number
// -8159 to -4064 in 16 intervals of 256 0x00 + interval number
// -8192 to -8160 0x00
// set scale factors
if (max <= 0) max = MAX_ULAW;
int coef = MAX_ULAW * (1 << SCALE_BITS) / max;
for (int i = 0; i < length; i++) {
int pcm = (0xff & pcmBuf[pcmOffset++]) + (pcmBuf[pcmOffset++] << 8);
pcm = (pcm * coef) >> SCALE_BITS;
int ulaw;
if (pcm >= 0) {
ulaw = pcm <= 0 ? 0xff :
pcm <= 30 ? 0xf0 + (( 30 - pcm) >> 1) :
pcm <= 94 ? 0xe0 + (( 94 - pcm) >> 2) :
pcm <= 222 ? 0xd0 + (( 222 - pcm) >> 3) :
pcm <= 478 ? 0xc0 + (( 478 - pcm) >> 4) :
pcm <= 990 ? 0xb0 + (( 990 - pcm) >> 5) :
pcm <= 2014 ? 0xa0 + ((2014 - pcm) >> 6) :
pcm <= 4062 ? 0x90 + ((4062 - pcm) >> 7) :
pcm <= 8158 ? 0x80 + ((8158 - pcm) >> 8) :
0x80;
} else {
ulaw = -1 <= pcm ? 0x7f :
-31 <= pcm ? 0x70 + ((pcm - -31) >> 1) :
-95 <= pcm ? 0x60 + ((pcm - -95) >> 2) :
-223 <= pcm ? 0x50 + ((pcm - -223) >> 3) :
-479 <= pcm ? 0x40 + ((pcm - -479) >> 4) :
-991 <= pcm ? 0x30 + ((pcm - -991) >> 5) :
-2015 <= pcm ? 0x20 + ((pcm - -2015) >> 6) :
-4063 <= pcm ? 0x10 + ((pcm - -4063) >> 7) :
-8159 <= pcm ? 0x00 + ((pcm - -8159) >> 8) :
0x00;
}
ulawBuf[ulawOffset++] = (byte)ulaw;
}
}
/**
* Compute the maximum of the absolute value of the pcm samples.
* The return value can be used to set ulaw encoder scaling.
* #param pcmBuf array containing 16 bit pcm data.
* #param offset offset of start of 16 bit pcm data.
* #param length number of pcm samples (not number of input bytes)
* #return maximum abs of pcm data values
*/
public static int maxAbsPcm(byte[] pcmBuf, int offset, int length) {
int max = 0;
for (int i = 0; i < length; i++) {
int pcm = (0xff & pcmBuf[offset++]) + (pcmBuf[offset++] << 8);
if (pcm < 0) pcm = -pcm;
if (pcm > max) max = pcm;
}
return max;
}
/**
* Create an InputStream which takes 16 bit pcm data and produces ulaw data.
* #param in InputStream containing 16 bit pcm data.
* #param max pcm value corresponding to maximum ulaw value.
*/
public UlawEncoderInputStream(InputStream in, int max) {
mIn = in;
mMax = max;
}
#Override
public int read(byte[] buf, int offset, int length) throws IOException {
if (mIn == null) throw new IllegalStateException("not open");
// return at least one byte, but try to fill 'length'
while (mBufCount < 2) {
int n = mIn.read(mBuf, mBufCount, Math.min(length * 2, mBuf.length - mBufCount));
if (n == -1) return -1;
mBufCount += n;
}
// compand data
int n = Math.min(mBufCount / 2, length);
encode(mBuf, 0, buf, offset, n, mMax);
// move data to bottom of mBuf
mBufCount -= n * 2;
for (int i = 0; i < mBufCount; i++) mBuf[i] = mBuf[i + n * 2];
return n;
}
/*public byte[] getUpdatedBuffer(){
return mBuf;
}*/
#Override
public int read(byte[] buf) throws IOException {
return read(buf, 0, buf.length);
}
#Override
public int read() throws IOException {
int n = read(mOneByte, 0, 1);
if (n == -1) return -1;
return 0xff & (int)mOneByte[0];
}
#Override
public void close() throws IOException {
if (mIn != null) {
InputStream in = mIn;
mIn = null;
in.close();
}
}
#Override
public int available() throws IOException {
return (mIn.available() + mBufCount) / 2;
}
}
After doing all of this, as I said the voice is too screeching and noisy. Not sure where am I wrong.
Any help, is appreciated! Thank you in advance.
Please help me to figure out that i am recording sound .
I want to record via service which produces jerks in sound while recording.I have buttons in activity from where i Start Recording and Stop Recording.
The Same code work absolutely fine if i do in activity.
The Service i am providing below
public class ServiceMusic extends Service {
private static String LOG_TAG = "BoundService";
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private static ServiceMusic self = null;
#Override
public void onCreate() {
super.onCreate();
Log.v(LOG_TAG, "in onCreate");
}
public int onStartCommand(Intent intent, int flags, int startId)
{
Log.i("onStartComman?d", "onStartCommand");
self = ServiceMusic.this;
self.bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING);
self.recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
TelephonyManager mgr=(TelephonyManager) getSystemService(TELEPHONY_SERVICE);
if (mgr!=null)
{
mgr.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
}
return START_REDELIVER_INTENT;
}
#Override
public void onRebind(Intent intent) {
Log.v(LOG_TAG, "in onRebind");
super.onRebind(intent);
}
#Override
public boolean onUnbind(Intent intent) {
Log.v(LOG_TAG, "in onUnbind");
return true;
}
#Override
public void onDestroy() {
super.onDestroy();
Log.v(LOG_TAG, "in onDestroy");
// mChronometer.stop();
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "Ravindra" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
void startRecording(final boolean b){
if (self.recorder==null) {
//Log.i("startRecording", "iffffffffffffffffffff");
self.recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
}
self.recorder.startRecording();
isRecording = true;
self.recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile(b);
}
},"AudioRecorder Thread");
self.recordingThread.start();
}
void stopRecording(boolean b){
Log.i("stopRecording", "stopRecording");
if(self.recorder != null){
isRecording = false;
self.recorder.stop();
self.recorder.release();
self.recorder = null;
self.recordingThread = null;
Log.i("stopRecording", "nulll");
}
Log.i("stopRecording", "outer");
if(b == true){
Log.i("stopRecording", "true");
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void writeAudioDataToFile(boolean b){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename,b);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if(os != null){
while(isRecording){
read = self.recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 44;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 44;
//AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
Acquire a wakelock when you start recording, and release it when you stop. Otherwise the device may go to sleep. For an example, see line 820 onwards of the android sound recorder app https://android.googlesource.com/platform/packages/apps/SoundRecorder/+/master/src/com/android/soundrecorder/SoundRecorder.java
(You may consider using a partial wake lock instead of a screen dim wake lock in the code above, if you are running in a background service)
See http://developer.android.com/reference/android/os/PowerManager.html
I have two method in my activity one is setOnClickListener() and other is setOnLongClickListener()
For the same button which used to start audio recording.
Now i don't know how to use condition, if i use setOnClickListener() then recording should start for 1 minute and stop automatically, and if i use setOnLongClickListener() then recording start for 2 minute and then stop automatically.
buttonStart.setOnLongClickListener(new OnLongClickListener() {
#Override
public boolean onLongClick(View v) {
Toast.makeText(Record_Audio.this,
"Start Recording With LongClick", Toast.LENGTH_SHORT)
.show();
enableButtons(true);
startRecording();
return true;
}
});
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
Toast.makeText(Record_Audio.this, "Start Recording",
Toast.LENGTH_SHORT).show();
enableButtons(true);
startRecording();
break;
}
}
});
private void startRecording() {
displayAlertDialog();
}
private void displayAlertDialog() {
AlertDialog.Builder alertDialog = new AlertDialog.Builder(
Record_Audio.this);
alertDialog.setTitle("Would you Like to save your Recording");
alertDialog.setMessage("Enter Audio Name");
alertDialog.setIcon(R.drawable.save_icon);
final EditText editTextAudioName = new EditText(Record_Audio.this);
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(
RelativeLayout.LayoutParams.MATCH_PARENT,
RelativeLayout.LayoutParams.MATCH_PARENT);
editTextAudioName.setLayoutParams(lp);
alertDialog.setView(editTextAudioName);
alertDialog.setPositiveButton("Save",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
Code.audioName = editTextAudioName.getText().toString()
.trim();
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(output_formats[currentFormat]);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(getFilename());
recorder.setOnErrorListener(errorListener);
recorder.setOnInfoListener(infoListener);
try {
recorder.prepare();
recorder.start();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
myChronometer.setBase(SystemClock.elapsedRealtime());
myChronometer.start();
}
});
alertDialog.setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// Write your code here to execute after dialog
dialog.cancel();
}
});
alertDialog.show();
}
I know i have to use setMaxDuration for particular time interval, but i don't know how to use with this two method.
Thanks in advance.
Anyhow the MediaRecorder Api is not good way to use record audio.Use the AudioRecorder API.
Let me explain
1.This is class for recording audio
public class Taukyrecorder {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".mp3";
private static final String AUDIO_RECORDER_FOLDER = "/'/'";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
public static boolean isRecording = false;
int numCrossing, p;
public int frequency;
int blockSize = 3500;
private Context mcontext;
public static String final_sound_path = null;
Handler handler;
RecorderListener mylistener;
public Taukyrecorder(Context cntxt) {
// TODO Auto-generated constructor stub
mcontext = cntxt;
// Log.i("Recorder", "helllllo");
InitRecord();
}
public void RegisterListener(RecorderListener listener) {
this.mylistener = listener;
}
public void InitRecord()
{
handler = new Handler();
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
recorder.setRecordPositionUpdateListener(
new OnRecordPositionUpdateListener() {
#Override
public void onPeriodicNotification(AudioRecord recorder) {
// TODO Auto-generated method stub
}
#Override
public void onMarkerReached(AudioRecord recorder) {
// TODO Auto-generated method stub
}
}, handler);
}
// Get the file for saving sound into the folder
public File GetFileTOwriteSound() {
File tempPicFile = null;
String ext_storage_state = Environment.getExternalStorageState();
File mediaStorage = new File(Environment.getExternalStorageDirectory()
+ "/TAUKY/SOUNDS");
if (ext_storage_state.equalsIgnoreCase(Environment.MEDIA_MOUNTED)) {
if (!mediaStorage.exists()) {
mediaStorage.mkdirs();
} else {
// do nothing
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
tempPicFile = new File(mediaStorage.getPath() + File.separator
+ "SOUND_" + timeStamp + ".mp3");
} else {
Toast.makeText(mcontext, "NO SDCARD MOUNTED", 1).show();
}
return tempPicFile;
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
public void startRecording() {
//InitRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writedataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writedataToFile() {
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
if (null != os) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
//int bufferReadResult = recorder.read(buffer, 0, blockSize);
//byte[] bData= {};
short[] sData= new short[data.length/2];
// to turn bytes to shorts as either big endian or little endian.
ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(sData);
for (int i = 0; i < blockSize && i < read; i++) {
toTransform[i] = sData[i] / 32768.0;
}
mylistener.Updatevalues(toTransform);
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void stopRecording() {
String getfilename = GetFileTOwriteSound().getAbsolutePath();
final_sound_path = getfilename;
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(), getfilename);
deleteTempFile();
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFilename) {
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
// /AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while (in.read(data) != -1) {
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
}
Start and stop the record
MainActivity
TaukkyRecordr taukyrecorder=new Taukyrecorder(mcontext);
taukyrecorder.startRecording();
//For stop
taukyrecorder.stopRecording();
If you want stop automatically after few second please create one thread.
public class StopRecord extends Thread {
int i = 0;
#Override
public void run() {
// TODO Auto-generated method stub
super.run();
handler.post(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
taukyrecorder.stopRecording();
}
});
}
}
Then call the thread using handler
Handker mhandler=new Handeler;
mhandler.postDelayed(mRunnable, 1000);//put your secodn here
I recording audio with class audoiRecord. Now I want convert audio raw file to *flac format. I convert *raw file to wav next way:
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = sampleRate;
int channels = 2;
long byteRate = RECORDER_BPP * sampleRate * channels/8;
byte[] data_pcm = new byte[mAudioBufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
Log.i(TAG,"File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data_pcm) != -1){
out.write(data_pcm);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
This piece of code is responsible for the file header
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
I do not understand what should be the parameters of the *flac file
You need an encoder to convert pcm data to flac format. You cannot just change the header and expect the content to work as flac.
Android (at least till 4.1) does not include a FLAC encoder, although there is a decoder supported from 3.1 onwards (Source: http://developer.android.com/guide/appendix/media-formats.html).
I do not have direct experience, but have seen people use ffmpeg as a flac encoder. This project audioboo-android, which contains the native libFLAC/libFLAC++ encoder, looks interesting.
So, from Android 4.1 you can do so:
Initializing:
MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/flac");
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, NUM_CHANNELS);
String codecname = mcl.findEncoderForFormat(format);
Log.w(TAG, "Codec: "+codecname);
MediaCodec codec = null;
try
{
codec = MediaCodec.createByCodecName(codecname);
} catch (IOException e)
{
e.printStackTrace();
}
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
MediaFormat outputFormat = codec.getOutputFormat(); // option B
long usec = 1000000000L * FRAME_SIZE/SAMPLE_RATE;
MediaCodec.BufferInfo bufinfo = new MediaCodec.BufferInfo();
bufinfo.set(0, FRAME_SIZE * NUM_CHANNELS * 2, usec, 0);
codec.start();
byte[] inBuf = new byte[FRAME_SIZE * NUM_CHANNELS * 2];
byte[] encBuf = new byte[10240];
In the recorder loop:
int encoded = 0;
int inputBufferId = codec.dequeueInputBuffer(1000);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
inputBuffer.put(inBuf, 0, inBuf.length);
codec.queueInputBuffer(inputBufferId, 0, inBuf.length, usec, 0);
}
int outputBufferId = codec.dequeueOutputBuffer(bufinfo, 1000);
if (outputBufferId >= 0) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is identical to outputFormat
// outputBuffer is ready to be processed or rendered.
outputBuffer.rewind();
encoded = outputBuffer.remaining();
outputBuffer.get(encBuf, 0, encoded);
codec.releaseOutputBuffer(outputBufferId, false);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
outputFormat = codec.getOutputFormat(); // option B
}
if (encoded > 0)
{
// Process data in encBuf
}
Here's a pure java FLAC encoder: http://javaflacencoder.sourceforge.net
Some of the classes use the javax apis, but they can be safely deleted without affecting the main encoder classes.
Here's some sample code. The record object is of type AudioRecord
try {
// Path to write files to
String path = Environment.getExternalStoragePublicDirectory("/test").getAbsolutePath();
String fileName = name+".flac";
String externalStorage = path;
File file = new File(externalStorage + File.separator + fileName);
// if file doesnt exists, then create it
if (!file.exists()) {
file.createNewFile();
}
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
FLACEncoder flacEncoder = new FLACEncoder();
StreamConfiguration streamConfiguration = new StreamConfiguration(1,StreamConfiguration.MIN_BLOCK_SIZE,StreamConfiguration.MAX_BLOCK_SIZE,44100,16);
FLACFileOutputStream flacOut = new FLACFileOutputStream(os);
flacEncoder.setStreamConfiguration(streamConfiguration);
flacEncoder.setOutputStream(flacOut);
flacEncoder.openFLACStream();
record.startRecording();
int totalSamples = 0;
while (isRecording) {
record.read(sData, 0, BufferElements2Rec);
totalSamples+=BufferElements2Rec;
flacEncoder.addSamples(short2int(sData),BufferElements2Rec);
flacEncoder.encodeSamples(BufferElements2Rec, false);
}
int available = flacEncoder.samplesAvailableToEncode();
while(flacEncoder.encodeSamples(available,true) < available) {
available = flacEncoder.samplesAvailableToEncode();
}
try {
flacOut.close();
} catch (IOException e) {
e.printStackTrace();
}
record.stop();
} catch(IOException ex) {
ex.printStackTrace();
}
record.release();
record = null;
}
For converting the short data into int data:
private int[] short2int(short[] sData) {
int length = sData.length;
int[] iData = new int[length];
for(int i=0;i<length;i++) {
iData[i] = sData[i];
}
return iData;
}
Based on https://github.com/nieldeokar/WhatsappAudioRecorder/blob/master/app/src/main/java/com/nieldeokar/whatsappaudiorecorder/recorder/AudioRecordThread.java
My solution for save recording in .m4a file while working speech recognition :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.HashSet;
import timber.log.Timber;
public class SpeechRecognizer {
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
protected static final String TAG = SpeechRecognizer.class.getSimpleName();
public int bufferSize;
public final Collection<RecognitionListener> listeners = new HashSet();
public final Handler mainHandler = new Handler(Looper.getMainLooper());
public final Recognizer recognizer;
private Thread recognizerThread;
public final AudioRecord recorder;
private SoundAmplitudeCallback soundAmplitudeCallback;
private File recordFile = null;
private boolean isRecordingToFileEnabled = false;
private boolean isRecordingToFilePrepared = false;
private boolean isContinueRecordingToFile = false;
public interface SoundAmplitudeCallback {
void onAmplitude(int amplitude);
}
public void setSoundAmplitudeCallback(SoundAmplitudeCallback callback) {
soundAmplitudeCallback = callback;
}
public SpeechRecognizer(Mabcd model) throws IOException {
this.recognizer = new Recognizer(model, SAMPLE_RATE);
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.recorder = createAudioRecorder(this.bufferSize);
if (this.recorder.getState() == 0) {
this.recorder.release();
throw new IOException("Failed to initialize recorder. Microphone might be already in use.");
}
}
public void addListener(RecognitionListener listener) {
synchronized (this.listeners) {
this.listeners.add(listener);
}
}
public void removeListener(RecognitionListener listener) {
synchronized (this.listeners) {
this.listeners.remove(listener);
}
}
public boolean startListening() {
if (this.recognizerThread != null) {
return false;
}
this.recognizerThread = new RecognizerThread(this);
this.recognizerThread.start();
return true;
}
public boolean startListening(int timeout) {
if (this.recognizerThread != null) {
return false;
}
this.recognizerThread = new RecognizerThread(timeout);
this.recognizerThread.start();
return true;
}
private boolean stopRecognizerThread() {
if (this.recognizerThread == null) {
return false;
}
try {
this.recognizerThread.interrupt();
this.recognizerThread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
this.recognizerThread = null;
return true;
}
public void startRecordToFile(File fileRecord) {
this.recordFile = fileRecord;
isRecordingToFileEnabled = true;
}
public void resumeRecordToFile(File fileRecord) {
this.recordFile = fileRecord;
isContinueRecordingToFile = true;
isRecordingToFileEnabled = true;
isRecordingToFilePrepared = false;
}
public void stopRecordToFile() {
isRecordingToFileEnabled = false;
isRecordingToFilePrepared = false;
isContinueRecordingToFile = false;
}
public boolean stop() {
boolean result = stopRecognizerThread();
if (result) {
this.mainHandler.post(new ResultEvent(this.recognizer.Rabcd(), true));
}
return result;
}
public boolean cancel() {
boolean result = stopRecognizerThread();
this.recognizer.Rabcd();
return result;
}
public void shutdown() {
this.recorder.release();
}
private final class RecognizerThread extends Thread {
private static final int NO_TIMEOUT = -1;
private int remainingSamples;
private int timeoutSamples;
VoiceRecorder voiceRecorder = null;
public RecognizerThread(int timeout) {
if (timeout != NO_TIMEOUT) {
this.timeoutSamples = (SpeechRecognizer.SAMPLE_RATE * timeout) / 1000;
} else {
this.timeoutSamples = NO_TIMEOUT;
}
this.remainingSamples = this.timeoutSamples;
}
public RecognizerThread(SpeechRecognizer speechRecognizer) {
this(NO_TIMEOUT);
}
public void run() {
voiceRecorder = new VoiceRecorder();
SpeechRecognizer.this.recorder.startRecording();
if (SpeechRecognizer.this.recorder.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
SpeechRecognizer.this.recorder.stop();
SpeechRecognizer.this.mainHandler.post(new OnErrorEvent(new IOException("Failed to start recording. Microphone might be already in use.")));
return;
}
byte[] buffer = new byte[SpeechRecognizer.this.bufferSize];
while (!interrupted() && (this.timeoutSamples == NO_TIMEOUT || this.remainingSamples > 0)) {
int nread = SpeechRecognizer.this.recorder.read(buffer, 0, buffer.length);
if (soundAmplitudeCallback != null) {
int max = 0;
for (short s : buffer) {
if (Math.abs(s) > max) {
max = Math.abs(s);
}
}
soundAmplitudeCallback.onAmplitude(max);
}
if (nread < 0) {
throw new RuntimeException("error reading audio buffer");
}
voiceRecorder.recording(nread, buffer);
if (SpeechRecognizer.this.recognizer.Aabcd(buffer, nread)) {
SpeechRecognizer.this.mainHandler.post(new ResultEvent(SpeechRecognizer.this.recognizer.Rabcd(), true));
} else {
SpeechRecognizer.this.mainHandler.post(new ResultEvent(SpeechRecognizer.this.recognizer.Pabcd(), false));
}
if (this.timeoutSamples != NO_TIMEOUT) {
this.remainingSamples -= nread;
}
}
voiceRecorder.shutdown();
SpeechRecognizer.this.recorder.stop();
SpeechRecognizer.this.mainHandler.removeCallbacksAndMessages((Object) null);
if (this.timeoutSamples != NO_TIMEOUT && this.remainingSamples <= 0) {
SpeechRecognizer.this.mainHandler.post(new TimeoutEvent());
}
}
}
/*
* Voice Recorder to file
* */
private class VoiceRecorder{
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
FileOutputStream fileOutputStream = null;
MediaCodec mediaCodec = null;
void recording(int nread, byte[] buffer){
/*step 1 prepare file*/
if (isRecordingToFileEnabled && !isRecordingToFilePrepared) {
//if we continue recording not create new file
if (recordFile == null) {
throw new IllegalArgumentException("Record file is null");
}
try {
fileOutputStream = new FileOutputStream(recordFile, isContinueRecordingToFile);
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
if (mediaCodec == null){
try {
mediaCodec = createMediaCodec(bufferSize);
mediaCodec.start();
Timber.d("mediaCodec.start()");
} catch (IOException e) {
e.printStackTrace();
}
}
isRecordingToFilePrepared = true;
}
/*prepare file*/
/*step 2 recording*/
if (isRecordingToFileEnabled && isRecordingToFilePrepared) {
try {
if (fileOutputStream != null){
boolean success = handleCodecInput(nread, buffer, mediaCodec, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, bufferInfo, fileOutputStream);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/*recording*/
/*step 3 finish recording and save to file*/
if (!isRecordingToFileEnabled && fileOutputStream != null) {
try {
VoiceRecorder.this.shutdown();
fileOutputStream.flush();
fileOutputStream.close();
fileOutputStream = null;
Timber.d("Finishing file");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/*finish recording and save to file*/
}
void shutdown(){
if (mediaCodec != null){
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
}
}
}
private abstract class RecognitionEvent implements Runnable {
public abstract void execute(RecognitionListener recognitionListener);
private RecognitionEvent() {
}
public void run() {
for (RecognitionListener listener : (RecognitionListener[]) SpeechRecognizer.this.listeners.toArray(new RecognitionListener[0])) {
execute(listener);
}
}
}
private class ResultEvent extends RecognitionEvent {
private final boolean finalResult;
protected final String hypothesis;
ResultEvent(String hypothesis2, boolean finalResult2) {
super();
this.hypothesis = hypothesis2;
this.finalResult = finalResult2;
}
public void execute(RecognitionListener listener) {
if (this.finalResult) {
listener.onResult(this.hypothesis);
} else {
listener.onPartialResult(this.hypothesis);
}
}
}
private class OnErrorEvent extends RecognitionEvent {
private final Exception exception;
OnErrorEvent(Exception exception2) {
super();
this.exception = exception2;
}
public void execute(RecognitionListener listener) {
listener.onError(this.exception);
}
}
private class TimeoutEvent extends RecognitionEvent {
private TimeoutEvent() {
super();
}
public void execute(RecognitionListener listener) {
listener.onTimeout();
}
}
private AudioRecord createAudioRecorder(int bufferSize) {
AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(recorder.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(recorder.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
return recorder;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Timber.tag(TAG).w(e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
private boolean handleCodecInput(int length,
byte[] buffer,
MediaCodec mediaCodec,
boolean running) {
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
Timber.tag(TAG).d( "length != BufferSize calling onRecordFailed");
// if (onRecorderFailedListener != null) {
// Log.d(TAG, "length != BufferSize calling onRecordFailed");
// onRecorderFailedListener.onRecorderFailed();
// }
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = mediaCodec.getInputBuffer(codecInputBufferIndex);
codecBuffer.clear();
codecBuffer.put(buffer);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream) throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = mediaCodec.getOutputBuffer(codecOutputBufferIndex);
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
}
java android speech recognition recording file pcm m4a
I have written code for recording audio file using AudioRecord and while writing file on SD card i am making two version.
Version 1
Recorded file is saved on SD Card as it is.
Version 2
I am applying Gain feature on recorded file and saving on SD card.
This works awesome on Sony Ericson mobiles.Also audio volume is boost to great extent.
But i am struggling to make it work on Samsung Devices.
when i play recorded file it sound like Talking Tom :P
Initially i thought Samusung device did not like the combinations i have used to create
AudioRecorder.
So i used following approach in which i loop into available configuration and use best configuration to initialize AudioRecord.
public AudioRecord findAudioRecord() {
for (int rate: mSampleRates) {
for (short audioFormat: new short[] {
AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT
}) {
for (short channelConfig: new short[] {
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO
}) {
try {
Log.i("vipul", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(
AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
return null;
}
Below is Code that is working good on Sony mobiles.But Struggling to work on Samsung Devices.
public class EnvironmentRecorder extends Activity implements OnClickListener {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "MyRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private Button start, stop;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private static int[] mSampleRates = new int[] {
8000, 11025, 22050, 44100
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start = (Button) findViewById(R.id.start);
stop = (Button) findViewById(R.id.stop);
start.setOnClickListener(this);
stop.setOnClickListener(this);
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.start:
startRecord();
break;
case R.id.stop:
stopRecording();
break;
}
}
public EnvironmentRecorder() {
try {
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
} catch (Exception e) {
e.printStackTrace();
}
}
private String getFilename1() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "NotGained" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getFilename2() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "Gained" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists()) tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
public AudioRecord findAudioRecord() {
for (int rate: mSampleRates) {
for (short audioFormat: new short[] {
AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT
}) {
for (short channelConfig: new short[] {
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO
}) {
try {
Log.v("vipul", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(
AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
return null;
}
public void startRecord() {
/*
* recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
* RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING,
* bufferSize);
*/
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile() {
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if (null != os) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void stopRecording() {
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
copyWaveFile(getTempFilename(), getFilename1(), getFilename2());
deleteTempFile();
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFileName1, String outFileName2) {
FileInputStream in = null;
FileOutputStream out1 = null, out2 = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[bufferSize];
try { in = new FileInputStream(inFilename);
out1 = new FileOutputStream(outFileName1);
out2 = new FileOutputStream(outFileName2);
totalAudioLen = in .getChannel().size();
totalDataLen = totalAudioLen + 36;
WriteWaveFileHeader(out1, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);
WriteWaveFileHeader(out2, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);
while ( in .read(data) != -1) {
out1.write(data); // Writing Non-Gained Data
float rGain = 2.5f;
for (int i = 0; i < data.length / 2; i++) {
short curSample = getShort(data[i * 2], data[i * 2 + 1]);
if (rGain != 1) {
// apply gain
curSample *= rGain;
// convert back from short sample that was "gained" to
// byte data
byte[] a = getByteFromShort(curSample);
// modify buffer to contain the gained sample
data[i * 2] = a[0];
data[i * 2 + 1] = a[1];
}
}
out2.write(data); // Writing Gained Data
}
out1.close();
out2.close(); in .close();
Toast.makeText(this, "Done!!", Toast.LENGTH_LONG).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private short getShort(byte argB1, byte argB2) {
return (short)((argB1 & 0xff) | (argB2 << 8));
}
private byte[] getByteFromShort(short x) {
// variant 1 - noise
byte[] a = new byte[2];
a[0] = (byte)(x & 0xff);
a[1] = (byte)((x >> 8) & 0xff);
// variant 2 - noise and almost broke my ears - very loud
// ByteBuffer buffer = ByteBuffer.allocate(2);
// buffer.putShort(x);
// buffer.flip();
return a;
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen, long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R';
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte)(totalDataLen & 0xff);
header[5] = (byte)((totalDataLen >> 8) & 0xff);
header[6] = (byte)((totalDataLen >> 16) & 0xff);
header[7] = (byte)((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f';
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16;
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1;
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte)(longSampleRate & 0xff);
header[25] = (byte)((longSampleRate >> 8) & 0xff);
header[26] = (byte)((longSampleRate >> 16) & 0xff);
header[27] = (byte)((longSampleRate >> 24) & 0xff);
header[28] = (byte)(byteRate & 0xff);
header[29] = (byte)((byteRate >> 8) & 0xff);
header[30] = (byte)((byteRate >> 16) & 0xff);
header[31] = (byte)((byteRate >> 24) & 0xff);
header[32] = (byte)(2 * 16 / 8);
header[33] = 0;
header[34] = RECORDER_BPP;
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte)(totalAudioLen & 0xff);
header[41] = (byte)((totalAudioLen >> 8) & 0xff);
header[42] = (byte)((totalAudioLen >> 16) & 0xff);
header[43] = (byte)((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
}
I would like to know if i need to add any extra loc to make my AudioRecord comfortable with Samsung Devices.
We're also struggling with audio recording on some Samsung Android Devices. Unfortunately it seems to be very broken, as even different revisions of the same phone model are behaving differently with the same codebase.
Here are my current findings, hoping you find something useful:
1. Broken Initialization:
Unfortunately, the strategy you are using to query for valid recording configurations will fail at least on Samsung Galaxy Young and Ace models running Android 2.3
The problem is that some invalid AudioRecord configurations instead of simply failing, will completely brick the audio capture subsystem if tried. You'll need to reset the phone to recover from this state.
2. Inconsistent Sampling-Rate support along revisions of same phone model
On an older Galaxy Ace Phone, recording # 11025Hz, 16-bit mono will succeed. On newer Ace revisions, this AudioRecord configuration will be accepted as valid, but the resulting recording will be distorted, with a "chipmunk" effect. A very popular guitar tuner app that has hardcoded this sampling rate is failing to give proper tuning readings on these phones precisely because of this problem!
3. Extremely low volume audio capture on some configurations.
In Galaxy Young and Galaxy Ace, recording from the mic or default audio source # 44,100Hz (the supposedly canonical rate where everything should work fine) produces an undistorted, but extremely low-volume recording. I haven't found yet a way to fix this other than software amplification (which is the equivalent of magnifying a very low res image, with the consecuent "jageddnes" of the result).
4. Failure to support the canonical 44,100Hz sampling rate on every audio capture source.
In Galaxy Young and Galaxy Ace, recording from the Camcorder source fails # 44,100Hz. (again, the configuration will be accepted as valid) producing complete garbage. However, recording # 8,000Hz, 16,000Hz and 48,000Hz works fine and produces a recording with very acceptable volume levels. What is frustrating is that according to the Android documentation, 44,100Hz is a sampling rate all devices SHOULD support.
5. OpenSL does not fix any of the problems reported.
Working with the NDK and OpenSL produces the same described results. It seems that the AudioRecorder class is simply wrapping calls to OpenSL, and the problem is either hardware based, or buried at a lower-level tier in the kernel code.
This situation is very unfortunately indeed, as these models are becoming very popular - at least in Mexico.
Good luck - and please report if you had better luck working with these phones.
=)
Audio gain conrol To increase the amplitude of the audio u need to calculate the gain factor and multiply calculated gain factor with every sample captured. The following code does that. P.S. Ignore the unrelated code
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}