A simple Real-Time Mic Meter in Android - android

I have helped from the book Pro Android media...
Here is the code:
public class MicMeter extends Activity implements OnClickListener {
RecordAudio recordTask;
int blocksize = 256;
int frequency = 8000;
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
TextView txt;
Button start;
boolean started = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mic_meter);
start = (Button)findViewById(R.id.button1);
txt = (TextView)findViewById(R.id.textView1);
start.setOnClickListener(this);
}
private class RecordAudio extends AsyncTask <Void,double[],Void>{
#Override
protected Void doInBackground(Void... params) {
try{
int bufferSize = AudioRecord.getMinBufferSize(frequency,channelConfig,audioEncoding);
AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, frequency, channelConfig, audioEncoding, bufferSize);
short[] buffer = new short[blocksize];
double[] meter = new double[blocksize];
audioRecord.startRecording();
while(started){
int bufferReadResult = audioRecord.read(buffer, 0, blocksize);
for (int i = 0; i < blocksize && i < bufferReadResult; i++) {
meter[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
publishProgress(meter);
}
audioRecord.stop();
}catch (Throwable t) {
Log.e("AudioRecord","RecordingFail");
}
return null;
}
#Override
protected void onProgressUpdate(double[]... meter) {
for(int i = 0 ; i < meter[0].length ; i++){
double[] helper = meter[i];
txt.setText(Double.toString(helper));
}
}
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(started){
recordTask.cancel(true);
}else{
started = true;
recordTask = new RecordAudio();
recordTask.execute();
}
}
}
while i press the button.
It shows 255.0 and then it doesn't response...
Are there any way to fix it??
Are there any beter version about this?
thank

for(int i = 0 ; i < meter[0].length ; i++){
double helper = i;
txt.setText(Double.toString(helper));
setText overwrites the old value. So only the last call will show. The last call sets it to helper, which will always be meter[0].length. Since that's a fixed number, it won't change.

Related

How to capture audio data and save to wav file?

I'm trying to record (Capture) audio data to a new wav file.
I'm using Oboe c++ for buffering the audio according to this Example code
This is how I capture audio data from the mixer to RecordBuffer array:
void Mixer::renderAudio(int16_t *audioData, int32_t numFrames) {
int32_t count = numFrames * kChannelCount;
// Zero out the incoming container array
for (int j = 0; j < count; ++j) {
audioData[j] = 0;
}
for (int i = 0; i < mNextFreeTrackIndex; ++i) {
mTracks[i]->renderAudio(mixingBuffer, numFrames);
for (int j = 0; j < count; ++j) {
data = mixingBuffer[j];
data*= volume;
audioData[j] += data;
if(recording && recordFrames < kMaxRecordSize){
if(data != 0)
recordBuffer[recordFrames++] = data;
}
}
}
}
Stop record and get the short array to Java code
jshortArray Mixer::getRecordingData(JNIEnv *env) {
recording = false;
jshortArray result = env->NewShortArray(recordFrames);
env->SetShortArrayRegion(result,0,recordFrames,recordBuffer);
return result;
}
Then in Java code, I create the wav file:
public class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT+ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4*SMALLINT)+(INTEGER*2)+LONGINT+ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE+LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE+ID_STRING_SIZE+WAV_FMT_SIZE+WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
int sampleRate = 48000;
short channels = 2;
public Wave( short[] data, int start, int end) {
nSamples=end-start+1;
cursor=0;
output=new byte[nSamples*SMALLINT+WAV_HDR_SIZE];
buildHeader(sampleRate,channels);
writeData(data,start,end);
}
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
public void writeFormat(int sampleRate, short nChannels){
write("fmt ");
write(WAV_FMT_SIZE-WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short)(nChannels * SAMPLE_SIZE));
write((short)16);
}
public void writeData(short[] data, int start, int end)
{
write("data");
write(nSamples*SMALLINT);
for(int i=start; i<=end; write(data[i++]));
}
private void write(byte b) {
output[cursor++]=b;
}
private void write(String id) {
if(id.length()!=ID_STRING_SIZE) {}
else {
for(int i=0; i<ID_STRING_SIZE; ++i) write((byte)id.charAt(i));
}
}
private void write(int i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
private void write(short i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
public boolean wroteToFile() {
boolean ok;
try {
File path = FileManager.generateNewFile(String.valueOf(sampleRate + " " + channels));
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok=true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok=false;
} catch (IOException e) {
ok=false;
e.printStackTrace();
}
return ok;
}
}
The results are pretty close, but the quality is very bad.
Does anyone know what am I doing wrong?
btw It works well only if I played 1 sample each.
Here is a way to record a Oboe input stream to a .Wav file in C++ for Android :
https://github.com/reuniware/OboeAudioRecorder/blob/master/app/src/main/cpp/OboeAudioRecorder.cpp

How to save recorded sound to file by using AudioRecord in android?

I'm developing an android (compileSdkVersion 23) app to record audio by using AudioRecord and the reason of using it is to get frequency after FFT in real time.
Not only this work, I need to save the recorded sound to check the sound(In this process, tracking the frequency is unnecessary.)
How to save recorded sound to file by using the AudioRecord in android?
Thus, am I using the AudioRecord correctly?
Here is code:
public class MainActivity extends Activity {
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
AudioRecord audioRecord;
RecordAudio recordTask;
int blockSize;// = 256;
boolean started = false;
boolean CANCELLED_FLAG = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
blockSize = 256;
final Button btRec = (Button) findViewById(R.id.btRec);
btRec.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (started == true) {
//started = false;
CANCELLED_FLAG = true;
//recordTask.cancel(true);
try{
audioRecord.stop();
}
catch(IllegalStateException e){
Log.e("Stop failed", e.toString());
}
btRec.setText("Start");
// canvasDisplaySpectrum.drawColor(Color.BLACK);
}
else {
started = true;
CANCELLED_FLAG = false;
btRec.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
});
}
private class RecordAudio extends AsyncTask<Void, double[], Boolean> {
#Override
protected Boolean doInBackground(Void... params) {
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
audioRecord = new AudioRecord(
MediaRecorder.AudioSource.DEFAULT, frequency,
channelConfiguration, audioEncoding, bufferSize);
int bufferReadResult;
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
try {
audioRecord.startRecording();
} catch (IllegalStateException e) {
Log.e("Recording failed", e.toString());
}
while (started) {
if (isCancelled() || (CANCELLED_FLAG == true)) {
started = false;
//publishProgress(cancelledResult);
Log.d("doInBackground", "Cancelling the RecordTask");
break;
} else {
bufferReadResult = audioRecord.read(buffer, 0, blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
//transformer.ft(toTransform);
//publishProgress(toTransform);
}
}
return true;
}
}
}
You have to download your file and save in cache, than for any request you have to check for cahce file if it is available use otherwise request new file
For complete help look into one of my answer Download and cache media files

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously?

Is it possible to record the voice from Bluetooth headset and play on Android speaker simultaneously? I finished to record audio from handset device by this code. And I am doing the second step- play this sound in android speaker. Please help me to resolve it. Thank you so much
_audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
_audioManager.startBluetoothSco();
_recorder = new MediaRecorder();
_recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
_recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
_recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
_recorder.setOutputFile(file.toString());
_recorder.prepare();
_recorder.start();
Recording using AudioRecord
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
Button showPref;
OnBluetoothRecording bluetoothRecording;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
showPref = (Button)findViewById(R.id.showPreferece);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
showPref.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
startActivity(new Intent(getApplicationContext(),BluetoothPreferenceActivity.class));
}
});
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new OnClickListener() {
#Override
public void onClick(final View v) {
BluetoothRecordingManager.checkAndRecord(getApplicationContext(), new OnBluetoothRecording() {
#Override
public void onStartRecording(boolean state, boolean bluetoothFlag) {
Log.d("CallBack","starting Recording");
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onCancelRecording() {
}
}, true);
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

Gain control in Android

I am confused. I read here that Android has no support for gain control (this article is dated 2013/09/21). and I found AutomaticGainControl class in AudioEffect. what does "software gain control means"? any difference?
Is it used by default while using AudioRecord or i have to call it?
Audio gain conrol
To increase the amplitude of the audio u need to calculate the gain factor and multiply calculated gain factor with every sample captured. The following code does that.
P.S. Ignore the unrelated code
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

Simple Producer-Consumer example

I am developing simple producer-consumer example. One thread records audio samples using AudioRecord class and writes them into buffer. Second one just reads the buffer and does nothing. When user wants to stop recording first thread writes special characters into the buffer and its a indicator for the other that reading is over. Here is my code
public class SpellCollectorActivity extends Activity implements OnClickListener{
private ArrayBlockingQueue<byte[] > audioq;
boolean needToBeStopped = false;
Button generate, action;
private MyRecorder rec;
private MyReader mr;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
action = (Button) findViewById(R.id.actionButton);
action.setOnClickListener(this);
needToBeStopped = false;
audioq = new ArrayBlockingQueue<byte[]>(CAPACITY);
}
public void onClick(View arg0){
switch(arg0.getId()){
case R.id.generateButton:
generateContentToSpell();
break;
case R.id.actionButton:
if(needToBeStopped){
rec.stopThread();
needToBeStopped = false;
action.setText(this.getString(R.string.start));
}else{
rec = new MyRecorder(audioq);
mr = new MyReader(audioq);
rec.start();
mr.start();
needToBeStopped = true;
action.setText(this.getString(R.string.stop));
}
break;
}
}
private class MyRecorder extends Thread{
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private final BlockingQueue<byte[] > myRecAudioq;
private AudioRecord recorder;
private boolean recording = false;
int bufferSize;
/*konstruktor*/
public MyRecorder(BlockingQueue<byte[]> q ){
bufferSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
myRecAudioq = q;
}
public void run(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
freq, channelConfiguration,
audioEncoding, 3*bufferSize);
recorder.startRecording();
recording = true;
byte[] buffer = new byte[bufferSize];
while(recording){
int readBufferSize = recorder.read(buffer, 0, bufferSize);
if(readBufferSize>0){
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public void stopThread(){
recorder.stop();
recording = false;
byte[] buffer = new byte[bufferSize];
for(int i=0;i<bufferSize;i++){
buffer[i] =(byte) 0xff;
}
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private class MyReader extends Thread{
private final BlockingQueue<byte[]> bq;
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int counter = 0;
public MyReader(BlockingQueue<byte[]> q){
bq = q;
}
public void run(){
int buffSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
byte[] compareBuffer= new byte[buffSize];
for(int i=0;i<buffSize;i++){
compareBuffer[i] = (byte)0xff;
}
boolean reading = true;
byte[] buffer = null;
do{
try {
buffer = bq.take();
reading = buffer.equals(compareBuffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if(reading){
int c=1;
}
}while(!reading);
int a=5;
}
}
}
I tried to put a breakpoints in MyReader class at line
int c=1;
but it' s never reached. I wrote this code using this example. What could be the problem?
The value of 'reading' is a comparison between 2 byte[].
This can only be true if they both are the same object (pointer, if you will), that is, if you first call buffer = compareBuffer
What you actually want to do is compare all elements of the buffer, for example using the java.util.Arrays class :
reading = Arrays.equals(compareBuffer, buffer);

Categories

Resources