I'm trying to record (Capture) audio data to a new wav file.
I'm using Oboe c++ for buffering the audio according to this Example code
This is how I capture audio data from the mixer to RecordBuffer array:
void Mixer::renderAudio(int16_t *audioData, int32_t numFrames) {
int32_t count = numFrames * kChannelCount;
// Zero out the incoming container array
for (int j = 0; j < count; ++j) {
audioData[j] = 0;
}
for (int i = 0; i < mNextFreeTrackIndex; ++i) {
mTracks[i]->renderAudio(mixingBuffer, numFrames);
for (int j = 0; j < count; ++j) {
data = mixingBuffer[j];
data*= volume;
audioData[j] += data;
if(recording && recordFrames < kMaxRecordSize){
if(data != 0)
recordBuffer[recordFrames++] = data;
}
}
}
}
Stop record and get the short array to Java code
jshortArray Mixer::getRecordingData(JNIEnv *env) {
recording = false;
jshortArray result = env->NewShortArray(recordFrames);
env->SetShortArrayRegion(result,0,recordFrames,recordBuffer);
return result;
}
Then in Java code, I create the wav file:
public class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT+ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4*SMALLINT)+(INTEGER*2)+LONGINT+ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE+LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE+ID_STRING_SIZE+WAV_FMT_SIZE+WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
int sampleRate = 48000;
short channels = 2;
public Wave( short[] data, int start, int end) {
nSamples=end-start+1;
cursor=0;
output=new byte[nSamples*SMALLINT+WAV_HDR_SIZE];
buildHeader(sampleRate,channels);
writeData(data,start,end);
}
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
public void writeFormat(int sampleRate, short nChannels){
write("fmt ");
write(WAV_FMT_SIZE-WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short)(nChannels * SAMPLE_SIZE));
write((short)16);
}
public void writeData(short[] data, int start, int end)
{
write("data");
write(nSamples*SMALLINT);
for(int i=start; i<=end; write(data[i++]));
}
private void write(byte b) {
output[cursor++]=b;
}
private void write(String id) {
if(id.length()!=ID_STRING_SIZE) {}
else {
for(int i=0; i<ID_STRING_SIZE; ++i) write((byte)id.charAt(i));
}
}
private void write(int i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
private void write(short i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
public boolean wroteToFile() {
boolean ok;
try {
File path = FileManager.generateNewFile(String.valueOf(sampleRate + " " + channels));
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok=true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok=false;
} catch (IOException e) {
ok=false;
e.printStackTrace();
}
return ok;
}
}
The results are pretty close, but the quality is very bad.
Does anyone know what am I doing wrong?
btw It works well only if I played 1 sample each.
Here is a way to record a Oboe input stream to a .Wav file in C++ for Android :
https://github.com/reuniware/OboeAudioRecorder/blob/master/app/src/main/cpp/OboeAudioRecorder.cpp
Related
I get two different audio samples from two sources.
For microphone sound:
audioRecord =
new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT,
(AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT)*5));
For Internal sound:
audioRecord = new AudioRecord.Builder()
.setAudioPlaybackCaptureConfig(config)
.setAudioFormat(new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(44100)
.setChannelMask(AudioFormat.CHANNEL_IN_STEREO)
.build())
.setBufferSizeInBytes((AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT)*5))
.build();
For reading from the audioRecord object we create individual frame objects(Custom objects called frame)-
private ByteBuffer pcmBuffer = ByteBuffer.allocateDirect(4096);
private Frame read() {
pcmBuffer.rewind();
int size = audioRecord.read(pcmBuffer, pcmBuffer.remaining());
if (size <= 0) {
return null;
}
return new Frame(pcmBuffer.array(),
pcmBuffer.arrayOffset(), size);
}
We create two separate LL(Linked List) for adding these frames that we get from read function.
private LinkedList internalAudioQueue = new LinkedList<>();
private LinkedList microphoneAudioQueue = new LinkedList<>();
public void onFrameReceived(Frame frame, boolean isInternalAudio) {
if (isInternalAudio) {
internalAudioQueue.add(frame);
} else {
microphoneAudioQueue.add(frame);
}
checkAndPoll();
}
Every time we add a frame in the respective LL we call the following checkAndPoll() function and depending upon the case pass the frame to the audioEncoder.
public void checkAndPoll() {
Frame frame1 = internalAudioQueue.poll();
Frame frame2 = microphoneAudioQueue.poll();
if (frame1 == null && frame2 != null) {
audioEncoder.inputPCMData(frame2);
} else if (frame1 != null && frame2 == null) {
audioEncoder.inputPCMData(frame1);
} else if (frame1 != null && frame2 != null) {
Frame frame = new Frame(PCMUtil.mix(frame1.getBuffer(), frame2.getBuffer(), frame1.getSize(), frame2.getSize(), false), frame1.getOrientation(), frame1.getSize());
audioEncoder.inputPCMData(frame);
}
}
Now we mix the audio samples in form of ByteBuffer from the two sources in this way taking Hendrik's help from this link.
public static byte[] mix(final byte[] a, final byte[] b, final boolean bigEndian) {
final byte[] aa;
final byte[] bb;
final int length = Math.max(a.length, b.length);
// ensure same lengths
if (a.length != b.length) {
aa = new byte[length];
bb = new byte[length];
System.arraycopy(a, 0, aa, 0, a.length);
System.arraycopy(b, 0, bb, 0, b.length);
} else {
aa = a;
bb = b;
}
// convert to samples
final int[] aSamples = toSamples(aa, bigEndian);
final int[] bSamples = toSamples(bb, bigEndian);
// mix by adding
final int[] mix = new int[aSamples.length];
for (int i=0; i<mix.length; i++) {
mix[i] = aSamples[i] + bSamples[i];
// enforce min and max (may introduce clipping)
mix[i] = Math.min(Short.MAX_VALUE, mix[i]);
mix[i] = Math.max(Short.MIN_VALUE, mix[i]);
}
// convert back to bytes
return toBytes(mix, bigEndian);
}
private static int[] toSamples(final byte[] byteSamples, final boolean bigEndian) {
final int bytesPerChannel = 2;
final int length = byteSamples.length / bytesPerChannel;
if ((length % 2) != 0) throw new IllegalArgumentException("For 16 bit audio, length must be even: " + length);
final int[] samples = new int[length];
for (int sampleNumber = 0; sampleNumber < length; sampleNumber++) {
final int sampleOffset = sampleNumber * bytesPerChannel;
final int sample = bigEndian
? byteToIntBigEndian(byteSamples, sampleOffset, bytesPerChannel)
: byteToIntLittleEndian(byteSamples, sampleOffset, bytesPerChannel);
samples[sampleNumber] = sample;
}
return samples;
}
private static byte[] toBytes(final int[] intSamples, final boolean bigEndian) {
final int bytesPerChannel = 2;
final int length = intSamples.length * bytesPerChannel;
final byte[] bytes = new byte[length];
for (int sampleNumber = 0; sampleNumber < intSamples.length; sampleNumber++) {
final byte[] b = bigEndian
? intToByteBigEndian(intSamples[sampleNumber], bytesPerChannel)
: intToByteLittleEndian(intSamples[sampleNumber], bytesPerChannel);
System.arraycopy(b, 0, bytes, sampleNumber * bytesPerChannel, bytesPerChannel);
}
return bytes;
}
// from https://github.com/hendriks73/jipes/blob/master/src/main/java/com/tagtraum/jipes/audio/AudioSignalSource.java#L238
private static int byteToIntLittleEndian(final byte[] buf, final int offset, final int bytesPerSample) {
int sample = 0;
for (int byteIndex = 0; byteIndex < bytesPerSample; byteIndex++) {
final int aByte = buf[offset + byteIndex] & 0xff;
sample += aByte << 8 * (byteIndex);
}
return (short)sample;
}
// from https://github.com/hendriks73/jipes/blob/master/src/main/java/com/tagtraum/jipes/audio/AudioSignalSource.java#L247
private static int byteToIntBigEndian(final byte[] buf, final int offset, final int bytesPerSample) {
int sample = 0;
for (int byteIndex = 0; byteIndex < bytesPerSample; byteIndex++) {
final int aByte = buf[offset + byteIndex] & 0xff;
sample += aByte << (8 * (bytesPerSample - byteIndex - 1));
}
return (short)sample;
}
private static byte[] intToByteLittleEndian(final int sample, final int bytesPerSample) {
byte[] buf = new byte[bytesPerSample];
for (int byteIndex = 0; byteIndex < bytesPerSample; byteIndex++) {
buf[byteIndex] = (byte)((sample >>> (8 * byteIndex)) & 0xFF);
}
return buf;
}
private static byte[] intToByteBigEndian(final int sample, final int bytesPerSample) {
byte[] buf = new byte[bytesPerSample];
for (int byteIndex = 0; byteIndex < bytesPerSample; byteIndex++) {
buf[byteIndex] = (byte)((sample >>> (8 * (bytesPerSample - byteIndex - 1))) & 0xFF);
}
return buf;
}
The mixed samples that I am getting have both distortion and noise. Not able to figure out what needs to be done to remove it. Any help here is appreciated.
Thanks in Advance!
I think if you're mixing, you should take the (weighted) average of both.
If you've got a sample 128 and 128, the result would be still 128, not 256, which could be out-of-range.
So just change your code to:
// mix by adding
final int[] mix = new int[aSamples.length];
for (int i=0; i<mix.length; i++) {
// calculating the average
mix[i] = (aSamples[i] + bSamples[i]) >> 1;
}
Does that work for you?
Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously? I finished to record audio from handset device by this code. And I am doing the second step- play this sound in android speaker. Please help me to resolve it. Thank you so much
_audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
_audioManager.startBluetoothSco();
_recorder = new MediaRecorder();
_recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
_recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
_recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
_recorder.setOutputFile(file.toString());
_recorder.prepare();
_recorder.start();
Recording using AudioRecord
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
Button showPref;
OnBluetoothRecording bluetoothRecording;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
showPref = (Button)findViewById(R.id.showPreferece);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
showPref.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(getApplicationContext(),BluetoothPreferenceActivity.class));
}
});
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new OnClickListener() {
#Override
public void onClick(final View v) {
BluetoothRecordingManager.checkAndRecord(getApplicationContext(), new OnBluetoothRecording() {
#Override
public void onStartRecording(boolean state, boolean bluetoothFlag) {
Log.d("CallBack","starting Recording");
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onCancelRecording() {
}
}, true);
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}
I'm recording sound with AudioRecord in PCM16LE format, 8000Hz, 1channel. It records ok in Android versions 2.3.3-4.4.4, but records strange intermittent sound in Android L(5.0) Developer Preview (on nexus 5, nexus 7 and emulator).
Here is the sample of recorded sound (the first half - recording, the second half - playback):
https://www.dropbox.com/s/3wcgufua5pphwtt/android_l_sound_record_error.m4a?dl=0
I tried to play recorded sound using different sample rate (4000, 16000) and as 8bit but sound keeps to be intermittent. What the problem could be with this sound?
I'm using this AudioRecordTask to record audio with getAudioRecord() for initializing input (no errors returned during operation; receiving audio chunks sized equally to internalBufferSize value):
public final int SAMPLING_RATE = 8000;
private AudioRecord getAudioRecord() {
int internalBufferSize = AudioRecord.getMinBufferSize(SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT); //returns 640
internalBufferSize = 8000; //also tried returned value (640) and values 2560, 30000 - no changes
final int SOURCE;
if (Build.VERSION.SDK_INT < 11) {
SOURCE = MediaRecorder.AudioSource.MIC;
} else {
SOURCE = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
}
AudioRecord record = new AudioRecord(SOURCE,
SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
internalBufferSize);
int state = record.getState();
if (state != AudioRecord.STATE_INITIALIZED) {
try {
record.release();
} catch (Exception e) {
}
return null;
}
if (record.getState() == android.media.AudioRecord.STATE_INITIALIZED) {
record.startRecording();
} else {
record.release();
return null;
}
return record;
}
private class AudioRecordTask extends AsyncTask<Void, Void, Void> {
final int PARTIAL_BUFFER_SIZE = SAMPLING_RATE;
final int NECESSARY_BUFFER_SIZE = 15 * PARTIAL_BUFFER_SIZE * Short.SIZE / 8;
final int FULL_BUFFER_SIZE = NECESSARY_BUFFER_SIZE * 2; //XXX: * 2 for the case when system returns more data than needed
short[] mBuffer;
int mTotalSize;
int mTotalSizeInBytes;
boolean mResult;
private Object mLock = new Object();
#Override
protected void onPreExecute()
{
mIsRecording = true;
mBuffer = new short[FULL_BUFFER_SIZE];
mTotalSize = 0;
mTotalSizeInBytes = 0;
mResult = false;
}
#Override
protected Void doInBackground(Void... arg0) {
synchronized (mLock) {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
AudioRecord record = getAudioRecord();
if (record == null) {
mResult = false;
return null;
}
for (int i = 0; i < 15 * 100; i++) { //XXX: * 100 to record enough data (system can return lesser than needed)
int datalen = record.read(mBuffer, mTotalSize, PARTIAL_BUFFER_SIZE);
if (datalen > 0) {
mTotalSize += datalen;
mTotalSizeInBytes = mTotalSize*2;
} else {
Log.w("", "error " + datalen + " in AudioRecord.read");
}
if (isCancelled() || mTotalSizeInBytes > NECESSARY_BUFFER_SIZE) {
break;
}
}
if (record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
record.stop();
}
record.release();
mResult = true;
return null;
}
}
#Override
protected void onPostExecute(Void r) {
synchronized (mLock) {
mIsRecording = false;
fin();
}
}
#Override
protected void onCancelled() {
//XXX: on old Androids (e.g. 2.3.3) onCancelled being called while doInBackground is still running
synchronized (mLock) {
mIsRecording = false;
if (mAbort) {
return;
}
fin();
}
}
private void fin() {
if (mResult && mTotalSizeInBytes > 0) {
sendRecordedAudioToServer(mBuffer, mTotalSize, mTotalSizeInBytes);
} else {
showError(null);
}
}
}
It's a bug in Android L Developer Preview: https://code.google.com/p/android-developer-preview/issues/detail?id=1492
AudioRecord.read for short[] buffer argument returns value in bytes instead of value in shorts.
As a workaround use AudioRecord.read with byte[] buffer.
I am confused. I read here that Android has no support for gain control (this article is dated 2013/09/21). and I found AutomaticGainControl class in AudioEffect. what does "software gain control means"? any difference?
Is it used by default while using AudioRecord or i have to call it?
Audio gain conrol
To increase the amplitude of the audio u need to calculate the gain factor and multiply calculated gain factor with every sample captured. The following code does that.
P.S. Ignore the unrelated code
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}
I have helped from the book Pro Android media...
Here is the code:
public class MicMeter extends Activity implements OnClickListener {
RecordAudio recordTask;
int blocksize = 256;
int frequency = 8000;
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
TextView txt;
Button start;
boolean started = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mic_meter);
start = (Button)findViewById(R.id.button1);
txt = (TextView)findViewById(R.id.textView1);
start.setOnClickListener(this);
}
private class RecordAudio extends AsyncTask <Void,double[],Void>{
#Override
protected Void doInBackground(Void... params) {
try{
int bufferSize = AudioRecord.getMinBufferSize(frequency,channelConfig,audioEncoding);
AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, frequency, channelConfig, audioEncoding, bufferSize);
short[] buffer = new short[blocksize];
double[] meter = new double[blocksize];
audioRecord.startRecording();
while(started){
int bufferReadResult = audioRecord.read(buffer, 0, blocksize);
for (int i = 0; i < blocksize && i < bufferReadResult; i++) {
meter[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
publishProgress(meter);
}
audioRecord.stop();
}catch (Throwable t) {
Log.e("AudioRecord","RecordingFail");
}
return null;
}
#Override
protected void onProgressUpdate(double[]... meter) {
for(int i = 0 ; i < meter[0].length ; i++){
double[] helper = meter[i];
txt.setText(Double.toString(helper));
}
}
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(started){
recordTask.cancel(true);
}else{
started = true;
recordTask = new RecordAudio();
recordTask.execute();
}
}
}
while i press the button.
It shows 255.0 and then it doesn't response...
Are there any way to fix it??
Are there any beter version about this?
thank
for(int i = 0 ; i < meter[0].length ; i++){
double helper = i;
txt.setText(Double.toString(helper));
setText overwrites the old value. So only the last call will show. The last call sets it to helper, which will always be meter[0].length. Since that's a fixed number, it won't change.