Simple Producer-Consumer example - android

I am developing simple producer-consumer example. One thread records audio samples using AudioRecord class and writes them into buffer. Second one just reads the buffer and does nothing. When user wants to stop recording first thread writes special characters into the buffer and its a indicator for the other that reading is over. Here is my code
public class SpellCollectorActivity extends Activity implements OnClickListener{
private ArrayBlockingQueue<byte[] > audioq;
boolean needToBeStopped = false;
Button generate, action;
private MyRecorder rec;
private MyReader mr;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
action = (Button) findViewById(R.id.actionButton);
action.setOnClickListener(this);
needToBeStopped = false;
audioq = new ArrayBlockingQueue<byte[]>(CAPACITY);
}
public void onClick(View arg0){
switch(arg0.getId()){
case R.id.generateButton:
generateContentToSpell();
break;
case R.id.actionButton:
if(needToBeStopped){
rec.stopThread();
needToBeStopped = false;
action.setText(this.getString(R.string.start));
}else{
rec = new MyRecorder(audioq);
mr = new MyReader(audioq);
rec.start();
mr.start();
needToBeStopped = true;
action.setText(this.getString(R.string.stop));
}
break;
}
}
private class MyRecorder extends Thread{
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private final BlockingQueue<byte[] > myRecAudioq;
private AudioRecord recorder;
private boolean recording = false;
int bufferSize;
/*konstruktor*/
public MyRecorder(BlockingQueue<byte[]> q ){
bufferSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
myRecAudioq = q;
}
public void run(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
freq, channelConfiguration,
audioEncoding, 3*bufferSize);
recorder.startRecording();
recording = true;
byte[] buffer = new byte[bufferSize];
while(recording){
int readBufferSize = recorder.read(buffer, 0, bufferSize);
if(readBufferSize>0){
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public void stopThread(){
recorder.stop();
recording = false;
byte[] buffer = new byte[bufferSize];
for(int i=0;i<bufferSize;i++){
buffer[i] =(byte) 0xff;
}
try {
myRecAudioq.put(buffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private class MyReader extends Thread{
private final BlockingQueue<byte[]> bq;
private static final int freq = 22050;
private static final int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int counter = 0;
public MyReader(BlockingQueue<byte[]> q){
bq = q;
}
public void run(){
int buffSize = AudioRecord.getMinBufferSize(freq, channelConfiguration, audioEncoding);
byte[] compareBuffer= new byte[buffSize];
for(int i=0;i<buffSize;i++){
compareBuffer[i] = (byte)0xff;
}
boolean reading = true;
byte[] buffer = null;
do{
try {
buffer = bq.take();
reading = buffer.equals(compareBuffer);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if(reading){
int c=1;
}
}while(!reading);
int a=5;
}
}
}
I tried to put a breakpoints in MyReader class at line
int c=1;
but it' s never reached. I wrote this code using this example. What could be the problem?

The value of 'reading' is a comparison between 2 byte[].
This can only be true if they both are the same object (pointer, if you will), that is, if you first call buffer = compareBuffer
What you actually want to do is compare all elements of the buffer, for example using the java.util.Arrays class :
reading = Arrays.equals(compareBuffer, buffer);

Related

Get realtime amplitude from audiojack (MIC) using AudioRecord(not MediaRecorder)

enter code hereI am trying to get realtime amplitude. The data is coming from a temp sensor connected to audiojack via MIC. I need to use AudioRecord as I need the data as it is(Raw data). The code I used is as below:
public class MainActivity extends Activity {
private static final String LOG_TAG = "AudioRecordTest";
private static final float VISUALIZER_HEIGHT_DIP = 50f;
private static String mFileName = null;
private float temperature;
double sDataMax = 0;
String sDataString = "";
private boolean isRecording = false;
private Button mRecordButton = null;
private boolean mStartRecording = true;
private AudioRecord mRecorder = null;
private TextView Output;
private static final int samplingRate = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private void onRecord(boolean start) {
if (start) {
startRecording();
} else {
stopRecording();
}
}
int FileSize = AudioRecord.getMinBufferSize(samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnRecord, !isRecording);
}
public void startRecording() {
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
samplingRate, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, FileSize*10);
mRecorder.startRecording();
isRecording = true;
}
private void writeAudioDataToFile(File logFile) {
// Write the output audio in byte
short sData[] = new short[FileSize];
while (isRecording) {
// gets the voice output from microphone to byte format
mRecorder.read(sData, 0, FileSize);
for (int i = 0; i < sData.length; i++) {
if (Math.abs(sData[i]) >= sDataMax) {
sDataMax = Math.abs(sData[i]);
}
;
sDataString += "\n" + sDataMax;
}

AudioRecorder not recording right

I am trying to use AudioRecorder to record a wav file for a certain interval of time (corresponding to a certain number of samples). to stop the recording i used Timer().schedule(delay) to stop after a while. but no matter what is the value of that delay the file is always the same size, and not readble.
public class Record extends Activity {
private static final int RECORDER_SAMPLERATE = 44100;
int compt=0;
int duration=1;
//int numSample= duration*RECORDER_SAMPLERATE;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private volatile Thread recordingThread=null;
private volatile boolean isRecording = false;
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
long mySubChunk1Size = 16;
short myBitsPerSample= 16;
int myFormat = 1;
int myChannels = 1;
long myByteRate =RECORDER_SAMPLERATE* myChannels * myBitsPerSample/8;
int myBlockAlign = myChannels * myBitsPerSample/8;
Handler handler;
String filePath = Environment.getExternalStorageDirectory().getAbsolutePath()+"/recordedSound"+compt+".wav";
DataOutputStream dd;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_raw_rec);
try
{
try {
startRec();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
new Timer().schedule(new TimerTask()
{
#Override
public void run()
{
recordingThread.interrupt();
recordingThread=null;
isRecording=false;
recorder.stop();
recorder.release();
recorder = null;
}
}
,5000); //execute after 5 seconds of recording
}//end try
catch(IllegalStateException e) {
e.printStackTrace();
}
}
private void startRec() throws IOException{
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeToFile() {
// Write the output audio in byte
byte[] data= new byte[bufferSize];
while (isRecording) {
try{
recorder.read(data, 0, bufferSize);
Log.d("recorder value", "value"+recorder);
}
catch (NullPointerException e)
{
e.printStackTrace();
}
try {
// // writes the data to file from buffer
// // stores the voice buffer
dd=new DataOutputStream( new FileOutputStream(filePath));
dd.writeBytes("RIFF");
dd.writeInt(0); // Final file size not known yet, write 0
dd.writeBytes("WAVE");
dd.writeBytes("fmt ");
dd.writeInt(Integer.reverseBytes(16)); // Sub-chunk size, 16 for PCM
dd.writeShort(Short.reverseBytes((short) 1)); // AudioFormat, 1 for PCM
dd.writeShort(Short.reverseBytes( (short) myChannels));// Number of channels, 1 for mono, 2 for stereo
dd.writeInt(Integer.reverseBytes(RECORDER_SAMPLERATE)); // Sample rate
dd.writeInt(Integer.reverseBytes( (RECORDER_SAMPLERATE*myBitsPerSample*myChannels/8))); // Byte rate, SampleRate*NumberOfChannels*BitsPerSample/8
dd.writeShort(Short.reverseBytes((short) (myChannels*myBitsPerSample/8))); // Block align, NumberOfChannels*BitsPerSample/8
dd.writeShort(Short.reverseBytes( myBitsPerSample)); // Bits per sample
dd.writeBytes("data");
dd.write(data);
//dd.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
what am i doing wrong?
You're creating a new WAV file during every iteration of the while loop in writeToFile.
A better approach would be to write just the PCM data buffers (without any header) to a temporary file. And make sure you only create the FileOutputStream once.Then, when the recording has ended, generate the RIFF header and write it to the destination WAV file followed by the contents of your temporary file.

Implement echo cancellation in audio recorder android

I am using AudioRecord class to read voice input.
public class AudioSession {
private boolean isRecording=false;
public AudioRecord recorder;
private int port =50005;
public String ipAddress="10.105.14.252";
private int sampleRate =AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);;
private int channelConfig =AudioFormat.CHANNEL_IN_MONO;
private int encodingFormat=AudioFormat.ENCODING_PCM_16BIT;
int minBufSize=AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize=0;
DatagramSocket socket;
public void startStreaming(){
new Thread(new Runnable(){
#Override
public
void run(){
try{
socket=new DatagramSocket();
byte[] buffer=new byte[minBufSize];
DatagramPacket packet;
final InetAddress destination=InetAddress.getByName(ipAddress);
recorder=new AudioRecord(MIC,sampleRate,channelConfig,encodingFormat,minBufSize);
if(recorder.getState()==AudioRecord.STATE_INITIALIZED)
recorder.startRecording();
while(isRecording){
bufferSize=recorder.read(buffer,0,minBufSize);
packet=new DatagramPacket(buffer,minBufSize,destination,port);
socket.send(packet);
}
}
catch(Exception e){
e.printStackTrace();
}
finally{
socket.close();
}
}
}).start();
}
and at server side
I am receiving it and giving it as output to speakers
class Server {
static AudioInputStream ais;
static AudioFormat audioFormat;
static int port = 50005;
static int sampleRate = 44100;
static SourceDataLine sourceDataLine;
static boolean streaming = true;
static int i = 0;
static Queue<InetAddress> q = new LinkedList<InetAddress>();
private static InetAddress currentSpeakerAddress;
static Set<InetAddress> h = new HashSet<InetAddress>();
public static final String PERMISSION_TEXT = "You may start talking";
public static void main(String args[]) throws Exception {
while (true) {
DatagramSocket serverSocket = new DatagramSocket(port);
byte[] receiveData = new byte[8192];
audioFormat = new AudioFormat(sampleRate, 16, 1, true, false);
sourceDataLine = (SourceDataLine) AudioSystem
.getLine(new DataLine.Info(SourceDataLine.class,
audioFormat));
sourceDataLine.open(audioFormat);
sourceDataLine.start();
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
ByteArrayInputStream baiss = new ByteArrayInputStream(
receivePacket.getData());
while (streaming == true) {
serverSocket.receive(receivePacket);
String requestText = new String(receivePacket.getData());
InetAddress requestAddress = receivePacket.getAddress();
if (i == 0) {
currentSpeakerAddress = requestAddress;
notifyToTalk(currentSpeakerAddress);
}
if (requestText.contains("Raise Hand")) {
if (currentSpeakerAddress.equals(requestAddress)) {
System.out.println(requestAddress.getHostAddress()
+ " is online");
} else {
storeID(requestAddress);
}
} else if (requestText.contains("Withdraw")) {
if (currentSpeakerAddress.equals(requestAddress)) {
if (h.isEmpty()) {
// break;
i = 0;
}
// currentSpeakerAddress = getNext();
} else {
if (h.remove(requestAddress)) {
q.remove(requestAddress);
}
}
} else if (currentSpeakerAddress.equals(requestAddress)) {
ais = new AudioInputStream(baiss, audioFormat,
receivePacket.getLength());
toSpeaker(receivePacket.getData());
System.out.println(i++ + " " + receivePacket.getLength());
}
}
sourceDataLine.drain();
sourceDataLine.close();
}
}
public static void toSpeaker(byte soundbytes[]) {
try {
sourceDataLine.write(soundbytes, 0, soundbytes.length);
} catch (Exception e) {
System.out.println("Not working in speakers...");
}
}
}
but using this,
there is a lot of echo
I read somewhere to use short[], but sourceDataLine and Datagram uses byte[] to write.
What should I do so that it could work even for API less than 11...
If you want to do it on the DSP level, you can check the hexagon SDK. I think it requires some extra licensing.

How to record audio using AudioRecorder in Android

I want to capture audio from an Android device. My code below seems to successfully make a wav file on the SD card but it cannot be played. I tried to play it using different media players but none work. There is an issue in my code that is causing this problem.
code
public class MainActivity extends ActionBarActivity {
private static final String LOG_TAG = "AudioRecordTest";
static final int AUDIO_PORT = 2048;
static final int SAMPLE_RATE = 8000;
static final int SAMPLE_INTERVAL = 20; // milliseconds
static final int SAMPLE_SIZE = 2; // bytes per sample
static final int BUF_SIZE = SAMPLE_INTERVAL * SAMPLE_INTERVAL * SAMPLE_SIZE * 2;
private static int[] mSampleRates = new int[]{44100, 44056, 47250, 48000, 22050, 16000, 11025, 8000};
private Thread recordingThread = null;
private boolean isRecording = false;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private int bufferSize;
private AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startRecording();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
private void startRecording() {
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
// convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) {
for (short channelConfig : new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) {
try {
Log.d(LOG_TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e(LOG_TAG, rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
private void writeAudioDataToFile() {
/*// Write the output audio in byte
short sData[] = new short[BufferElements2Rec];
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
// // stores the voice buffer
byte bData[] = short2byte(sData);
sendLiveAudio(bData);
}*/
String filePath = "/sdcard/test.wav";
short sData[] = new short[bufferSize / 2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, bufferSize / 2);
Log.d("eray", "Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopRecording();
}
}
Try This.....
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}

Android: AudioRecord.read() returns garbage

I've been working on an app that could playback audio from the microphone in real time.
It sets up an AudioRecorder, that inits without any errors. However, it just returns a bunch of zeors, or a lot of number close to the max value of short, when performing read operations. I'm really stuck, it would be very kind if anyone could help me. This is my code:
public class AudioIn extends Thread {
public static final int ERROR_RECORD_INIT = -1;
public static final int ERROR_RECORD_NOTIFICATION = -2;
public static final int ERROR_RECORD_READ = -3;
public static final int SUCCESS = 0;
public static final int audioFrequency = 44100;
public static final int channelConfig = AudioFormat.CHANNEL_IN_MONO;
public static final int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
final int ShortsReadPerCycle = 1024;
private boolean capture = true;
private AudioRecord recorder;
private int effectiveCaptureBufferSize;
private short[] buffer;
private AudioInHandler handler;
public AudioIn()
{
int minDeviceBuffer = AudioRecord.getMinBufferSize(audioFrequency, channelConfig, audioFormat);
Log.d("AudioIn", "Minimum device capture buffer is: " + Integer.toString(minDeviceBuffer) + " bytes");
effectiveCaptureBufferSize = minDeviceBuffer;
Log.d("AudioIn", "Setting capture buffer size to " + effectiveCaptureBufferSize + " bytes");
}
public void close()
{
capture = false;
}
public int samplesPerBuffer()
{
return effectiveCaptureBufferSize / 2;
}
#Override
public void run() {
// TODO Auto-generated method stub
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
try
{
recorder = new AudioRecord(AudioSource.MIC, audioFrequency, channelConfig, audioFormat, effectiveCaptureBufferSize);
}
catch (Throwable t)
{
handler.onError(ERROR_RECORD_INIT);
return;
}
recorder.startRecording();
while(capture)
{
buffer = new short[ShortsReadPerCycle];
int shortsRead = recorder.read(buffer, 0, buffer.length);
if (shortsRead < 0)
{
new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
handler.onError(ERROR_RECORD_READ);
}
}.run();
this.close();
}
else
{
new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
handler.onNewData(buffer);
}
}.run();
}
}
Log.d("AudioIn.run()", "Stopping AudioRecord...");
recorder.stop();
Log.d("AudioIn.run()", "Stopped AudioRecord, now releasing...");
recorder.release();
Log.d("AudioIn.run()", "AudioRecord released");
recorder = null;
}
public void setHandler(AudioInHandler handler) {
this.handler = handler;
}
}
Calling read() on AudioRecord once does not ensure the whole short[] to be filled. You should check the returned "shortsRead" for actual short read and keep calling read() until the short[] is filled. Sample code given below.
private void readFully(short[] data, int off, int length) {
int read;
while (length > 0) {
read = mRec.read(data, off, length);
length -= read;
off += read;
}
}
In your recording loop
while (!released) {
// fill the pktBuf
short[] pktBuf = new short[pktSize];
readFully(pktBuf, 0, pktSize);
// Do something
}
In this way, everytime a call to read() is made, we increment the offset by "read" and decrement the length remaining by "read", keep reading until the length remaining reaches 0. You will then get a filled short[] with recorded audio data.
Similarly, when you are writing data into an AudioTrack, you have to do the same thing "writeFully()", to ensure the whole short[] is written into the AudioTrack.
Hope it helps.

Categories

Resources