Sound synchronization? - android

I'm newbie in programming and I'm try to do a audio app that plays audio samples, but i want to sync all of them. Like a verification loop that counts 0, 1, 2, 3. And when the user click "play/stop", the audio only start/stop if the loop is in "0".
This is one of my classes, where i set play and stop methods.
public class Sample {
private static final int SAMPLE_RATE = 44100;
private String name;
private AudioTrack audioTrack;
private int loopPoint;
int soundId;
private Uri uri;
private Context context;
private MediaPlayer currentPlayer;
private boolean isImported;
private boolean isLooping = false;
public Sample(String name, byte[] soundBytes) {
this.name = name;
loopPoint = soundBytes.length / 2;
isImported = false;
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
soundBytes.length, AudioTrack.MODE_STATIC);
audioTrack.write(soundBytes, 0, soundBytes.length);
}
public Sample(String name, File file, Context context) {
this.name = name;
this.context = context;
isImported = true;
uri = Uri.parse(file.getAbsolutePath());
}
public String getName() {
return name;
}
public void updateSample(byte[] soundBytes) {
if (!isImported) {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, soundBytes.length,
AudioTrack.MODE_STATIC);
audioTrack.write(soundBytes, 0, soundBytes.length);
}
}
public void play(boolean isLooped) {
isLooping = isLooped;
audioTrack.setPlaybackRate(88200);
if (isImported) {
if (currentPlayer != null) {
currentPlayer.seekTo(0);
} else {
currentPlayer = MediaPlayer.create(context, uri);
}
currentPlayer.setLooping(isLooped);
currentPlayer.start();
} else {
audioTrack.stop();
audioTrack.reloadStaticData();
if (isLooped) {
audioTrack.setLoopPoints(0, loopPoint, -1);
} else {
audioTrack.setLoopPoints(0, 0, 0);
}
audioTrack.play();
}
}
public void stop() {
try {
if (isImported && currentPlayer != null) {
currentPlayer.stop();
currentPlayer.release();
currentPlayer = null;
} else if (!isImported && audioTrack != null) {
audioTrack.stop();
}
} catch (Exception e) {
e.printStackTrace();
}
isLooping = false;
}
public boolean isImported() {
return isImported;
}
public boolean isLooping() {
return isLooping;
}
}

You can use currentPlayer.getCurrentPosition() to get the current position of playback of the sample in milliseconds.
Say your tempo was 120 bpm. That's half a second per beat.
Now say we are in 4/4 timing, then we have 4 beats in a bar, so each bar is a total of 2 seconds long.
So to stop the playback only at the start of a bar, you would need to stop playback when the CurrentPosition is a multiple of 2000 milliseconds.
You could make the MediaPlayer stop at the right time with something like the logic below:
onUserPressedStop() {
// Check if the remainder is 0 when you divide by bar length
if ((currentPlayer.getCurrentPosition() % barLengthMillis) == 0) {
// We are at the start of a bar so stop and release the MediaPlayer
stopMyMediaPlayer();}
// Else were not at the start of a bar, so find out how much longer we need to wait
else {
int waitTime = currentPlayer.getCurrentPosition() % barLengthMillis;
// Stop and release the MediaPlayer once the necessary time has elapsed
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
stopMyMediaPlayer();
}
}, waitTime);
}
}

Related

Detect the presence of environmental noise

I am very new to android development and I don't have enough experience. So the question I am asking might be very simple. I want to detect if there is noise in the environment using microphone. Now if there is no mic on the cellphone I will toast a relevant.
I found the code from here: android: Detect sound level
On main activity I have a button. Pressing the button will toast some result. But I only get 0.0 even though there is a noise in the room. Could some one give me some hint on this please.
public class MainActivity extends AppCompatActivity implements SensorEventListener {
double soundLevel;
protected void onCreate(Bundle savedInstanceState) {
noiseButton = findViewById(R.id.noiseCheck);
PackageManager PM= this.getPackageManager();
final boolean microphone = PM.hasSystemFeature(PackageManager.FEATURE_MICROPHONE);
noiseButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (microphone){
double soundLevel = detectEnvironmentalNoise();
Toast.makeText(mContext, "Environmental noise level is " + soundLevel , Toast.LENGTH_LONG).show();
}
else{
Toast.makeText(mContext, "This device is not equipped to microphone to detect environmental noise", Toast.LENGTH_LONG).show();
}
}
});
}
public double detectEnvironmentalNoise() {
AudioRecord audio = null;
int sampleRate = 8000;
double lastLevel = 0;
try {
int bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audio = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
} catch (Exception e) {
android.util.Log.e("TrackingFlow", "Exception", e);
}
audio.startRecording();
short[] buffer = new short[100];
int bufferReadResult = 1;
if (audio != null) {
// Sense the voice...
bufferReadResult = audio.read(buffer, 0, 10000);
double sumLevel = 0;
for (int i = 0; i < bufferReadResult; i++) {
sumLevel += buffer[i];
}
lastLevel = Math.abs((sumLevel / bufferReadResult));
}
return lastLevel;
}
}
You need to implement getMaxAmplitude() for AudioRecord as described in below post:
Implement getMaxAmplitude for audioRecord
Or you can use MediaRecorder as below:
public class SoundMeter {
private MediaRecorder mRecorder = null;
public void start() {
if (mRecorder == null) {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile("/dev/null");
mRecorder.prepare();
mRecorder.start();
}
}
public void stop() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
}
public double getAmplitude() {
if (mRecorder != null)
return mRecorder.getMaxAmplitude();
else
return 0;
}
}

Musicg library for Clap Detection not working android

I am using musicg library for clap detection but whenever starts activity it shows continuous detection of clap without any clap.
I thought that there is some value issue with bit rate or frame size.
This is my code
RecorderThread.java
public class RecorderThread extends Thread {
private AudioRecord audioRecord;
private boolean isRecording;
private int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int sampleRate = 44100;
private int frameByteSize = 2048; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
int recBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfiguration, audioEncoding); // need to be larger than size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
return this.isAlive() && isRecording;
}
public void startRecording() {
try {
audioRecord.startRecording();
isRecording = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
isRecording = false;
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
//System.out.println(averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
public void run() {
startRecording();
}
}
DetectorThread.java
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private ClapApi clapApi;
private volatile Thread _thread;
private LinkedList<Boolean> clapResultList = new LinkedList<Boolean>();
private int numClaps;
private int totalClapsDetected = 0;
private int clapCheckLength = 3;
private int clapPassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
//if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_CONFIGURATION_MONO) {
channel = 1;
//}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
clapApi = new ClapApi(waveHeader);
}
private void initBuffer() {
numClaps = 0;
clapResultList.clear();
// init the first frames
for (int i = 0; i < clapCheckLength; i++) {
clapResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
public void run() {
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
MainActivity.clapsValue = numClaps;
// whistle detection
//System.out.println("*Whistle:");
boolean isClap = clapApi.isClap(buffer);
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(isClap);
if (isClap) {
numClaps++;
}
//System.out.println("num:" + numWhistles);
if (numClaps >= clapPassScore) {
// clear buffer
initBuffer();
totalClapsDetected++;
}
// end whistle detection
} else {
// no sound detected
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(false);
MainActivity.clapsValue = numClaps;
}
// end audio analyst
}
} catch (Exception e) {
e.printStackTrace();
}
}
public int getTotalClapsDetected() {
return totalClapsDetected;
}
}
MainActivity.java
public class MainActivity extends Activity {
public static final int DETECT_NONE = 0;
public static final int DETECT_CLAP = 1;
public static int selectedDetection = DETECT_NONE;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
private Thread detectedTextThread;
public static int clapsValue = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startVoiceDetection();
}
#Override
protected void onPause() {
super.onPause();
stopVoiceDetection();
}
#Override
protected void onDestroy() {
super.onDestroy();
android.os.Process.killProcess(android.os.Process.myPid());
}
private void startVoiceDetection() {
selectedDetection = DETECT_CLAP;
recorderThread = new RecorderThread();
recorderThread.start();
detectorThread = new DetectorThread(recorderThread);
detectorThread.start();
goListeningView();
}
private void stopVoiceDetection() {
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread = null;
}
selectedDetection = DETECT_NONE;
}
private void goListeningView() {
if (detectedTextThread == null) {
detectedTextThread = new Thread() {
public void run() {
try {
while (recorderThread != null && detectorThread != null) {
runOnUiThread(new Runnable() {
public void run() {
if (detectorThread != null) {
Log.e("Clap", "Detected");
}
}
});
sleep(100);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
detectedTextThread = null;
}
}
};
detectedTextThread.start();
}
}
}
change
AudioFormat.CHANNEL_CONFIGURATION_MONO;
to
AudioFormat.CHANNEL_IN_MONO
me helps)

Media Player using AudioTrack class does not resume after pause()

I'm building, inside my existing app, a player using the AudioTrack class,in MODE_STATIC, because i want to implement the timestretch and the loop points features.
The code is ok for start() and stop(), but when paused, if i try to resume, calling play() again, the status bar remain fixed and no audio is played.
Now, from the docs :
Public void pause ()Pauses the playback of the audio data. Data that has not been played >back will not be discarded. Subsequent calls to play() will play this data back. See >flush() to discard this data.
It seems so easy to understand but there is something that escapes me.
Can some one help me?
Is it necessary to create boolean variables like start, play, pause, stopAudio etc?
If yes, where is the utility of the methods inherited from the AudioTrack class?
In MODE_STREAM i have realized the project, using the above boolean variables., but i need the MODE_STATIC.
This is the code, thanks:
Button playpause, stop;
SeekBar posBar;
int sliderval=0;
int headerOffset = 0x2C;
File file =new File(Environment.getExternalStorageDirectory(), "raw.pcm");
int fileSize = (int) file.length();
int dataSize = fileSize-headerOffset ;
byte[] dataArray = new byte[dataSize];
int posValue;
int dataBytesRead = initializeTrack();
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, dataBytesRead , AudioTrack.MODE_STATIC);
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
playpause= (Button)(findViewById(R.id.playpause));
stop= (Button)(findViewById(R.id.stop));
posBar=(SeekBar)findViewById(R.id.posBar);
// create a listener for the slider bar;
OnSeekBarChangeListener listener = new OnSeekBarChangeListener() {
public void onStopTrackingTouch(SeekBar seekBar) { }
public void onStartTrackingTouch(SeekBar seekBar) { }
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) { sliderval = progress;}
}
};
// set the listener on the slider
posBar.setOnSeekBarChangeListener(listener); }
public void toggleButtonSound(View button)
{
switch (button.getId())
{
case R.id.playpause:
play();
break;
case R.id.stop:
stop();
break;
}
}
private void stop() {
if(audioTrack.getState()==AudioTrack.PLAYSTATE_PLAYING ||
audioTrack.getState()==AudioTrack.PLAYSTATE_PAUSED || audioTrack.getState()==AudioTrack.PLAYSTATE_STOPPED)
{ audioTrack.stop();
resetPlayer();}
}
Context context;
private double actualPos=0;
public void pause() {}
public void play()
{
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PLAYING)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack.pause();
}
else if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PAUSED)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack.play();
}
else if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_STOPPED)
{ //Log.i("", "Play pressed in state "+audioTrack.getPlayState());
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, dataSize, AudioTrack.MODE_STATIC);
audioTrack.write(dataArray, 0, dataBytesRead);
audioTrack.play();
}
posBar.setMax((int) (dataBytesRead/2)); // Set the Maximum range of the
audioTrack.setNotificationMarkerPosition((int) (dataSize/2));
audioTrack.setPositionNotificationPeriod(1000);
audioTrack.setPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener() {
#Override
public void onPeriodicNotification(AudioTrack track) {
posBar.setProgress(audioTrack.getPlaybackHeadPosition());
Log.i("", " " + audioTrack.getPlaybackHeadPosition() + " " + dataBytesRead/2);
}
#Override
public void onMarkerReached(AudioTrack track) {
Log.i("", " End reached ");
audioTrack.pause();
audioTrack.flush();
audioTrack.release();
posBar.setProgress(0);
resetPlayer();}
});
}
private int initializeTrack() {
InputStream is;
BufferedInputStream bis;
DataInputStream dis;
int temp = 0;
try {
is = new FileInputStream(file);
bis = new BufferedInputStream(is);
dis = new DataInputStream(bis);
temp = dis.read(dataArray, 0, dataSize);
dis.close();
bis.close();
is.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return temp;
}
public void resetPlayer() {
audioTrack.flush();
audioTrack.release();
posBar.setProgress(0);
sliderval=0;
}
You see, you did implement AudioTrack so that even when its paused the contents of file still uploads to AudioTrack:
I don't know how it manage it but in my case I also pause data uploading to AT. Like:
while (byteOffset < fileLengh) {
if(isPaused)
continue;
ret = in.read(byteData, 0, byteCount);
if (ret != -1) { // Write the byte array to the track
audioTrack.write(byteData, 0, ret);
byteOffset += ret;
} else
break;
}
So then I unpause the AT the file uploading while cycle resumes too. I guess that's it. Also I have to mention that even when AT is playing the following:
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PLAYING)
and
if (audioTrack.getPlayState()==AudioTrack.PLAYSTATE_PAUSED)
doesn't work for me and getPlayState() always returns 1 (AudioTrack.PLAYSTATE_STOPPED) for me, no matter if its playing or has been paused.

how can i play pcm file

The following code should record audio and store it in to SD card in PCM format.
the code is working with me ,but the PCM file doesn't play !!!!
I got this code from this link.... Android : recording audio using audiorecord class play as fast forwarded
I need to play the PCM file How can i do that ??????
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
System.out.println("BUFFER SIZE VALUE IS " + bufferSize);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
Android's media player by default doesn't play PCM files. Either
Copy it from your SD card to your computer and play it there
Write your own player using AudioTrack
Install an app that plays PCM
Here's a tutorial on how to play PCM using the AudioTrack class: (http://jongladwin.blogspot.co.uk/2010/03/android-play-pcmwav-audio-buffer-using.html)
Windows Media Player should be able to play PCM, some alternatives are mentioned here: (http://www.makeuseof.com/answers/play-pcm-file-pc/)
I guess most of the big music player apps on Android will support PCM.
I also used your code, but my voice record was like a "zzzzz" record. So I changed a little the code and now I can listen without problems and distortions the record both by smartphone and by PC (in this case with Audacity).
This is my code:
public class VoiceActivity extends Activity {
private static final String TAG = "VoiceRecord";
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS_IN = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_CHANNELS_OUT = AudioFormat.CHANNEL_OUT_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
// Initialize minimum buffer size in bytes.
private int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING);
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_voice);
((Button) findViewById(R.id.start_button)).setOnClickListener(btnClick);
((Button) findViewById(R.id.stop_button)).setOnClickListener(btnClick);
enableButtons(false);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.start_button, !isRecording);
enableButton(R.id.stop_button, isRecording);
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile() {
//Write the output audio in byte
String filePath = "/sdcard/8k16bitMono.pcm";
byte saudioBuffer[] = new byte[bufferSize];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(saudioBuffer, 0, bufferSize);
try {
// writes the data to file from buffer stores the voice buffer
os.write(saudioBuffer, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() throws IOException {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
PlayShortAudioFileViaAudioTrack("/sdcard/8k16bitMono.pcm");
}
}
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING, intSize, AudioTrack.MODE_STREAM);
if (at!=null) {
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
else
Log.d(TAG, "audio track is not initialised ");
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.start_button: {
enableButtons(true);
startRecording();
break;
}
case R.id.stop_button: {
enableButtons(false);
try {
stopRecording();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
}
}
}
};
// onClick of backbutton finishes the activity.
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
it's my solution
public class AudioTrackPlayer {
private String pathAudio;
private AudioTrack audioPlayer;
private Thread mThread;
private int bytesread = 0, ret = 0;
private int size;
private FileInputStream in = null;
private byte[] byteData = null;
private int count = 512 * 1024; // 512 kb
private boolean isPlay = true;
private boolean isLooping = false;
private static Handler mHandler;
public AudioTrackPlayer() {
}
public void prepare(String pathAudio){
this.pathAudio = pathAudio;
mHandler = new Handler();
}
public void play(){
stop();
isPlay = true;
bytesread = 0;
ret = 0;
if (pathAudio == null)
return;
audioPlayer = createAudioPlayer();
if (audioPlayer == null) return;
audioPlayer.play();
mThread = new Thread(new PlayerProcess());
mThread.start();
}
private final Runnable mLopingRunnable = new Runnable() {
#Override
public void run() {
play();
}
};
private AudioTrack createAudioPlayer(){
int intSize = android.media.AudioTrack.getMinBufferSize(16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (audioTrack == null) {
Log.d("TCAudio", "audio track is not initialised ");
return null;
}
File file = null;
file = new File(pathAudio);
byteData = new byte[(int) count];
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
size = (int) file.length();
return audioTrack;
}
private class PlayerProcess implements Runnable{
#Override
public void run() {
while (bytesread < size && isPlay) {
if (Thread.currentThread().isInterrupted()) {
break;
}
try {
ret = in.read(byteData, 0, count);
} catch (IOException e) {
e.printStackTrace();
}
if (ret != -1) { // Write the byte array to the track
audioPlayer.write(byteData,0, ret);
bytesread += ret;
} else break;
}
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
if (audioPlayer!=null){
if (audioPlayer.getState()!=AudioTrack.PLAYSTATE_STOPPED){
audioPlayer.stop();
audioPlayer.release();
mThread = null;
}
}
if (isLooping && isPlay ) mHandler.postDelayed(mLopingRunnable,100);
}
}
public void setLooping(){
isLooping = !isLooping;
}
public void pause(){
}
public void stop(){
isPlay = false;
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (audioPlayer != null) {
audioPlayer.stop();
audioPlayer.release();
audioPlayer = null;
}
}
public void reset(){
}
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
Replace Recording Code...

Detect 'Whistle' sound in android

I want to detect 'Whistle' sound. For that I have implemented http://code.google.com/p/musicg/
Source code itself having issue. When you start app it is ready for listen but when you go back and again restart detector thread it does not trigger whistle detection.
DetectorThread.java
package weetech.wallpaper.services;
import java.util.LinkedList;
import weetech.wallpaper.utils.Debug;
import android.media.AudioFormat;
import android.media.AudioRecord;
import com.musicg.api.WhistleApi;
import com.musicg.wave.WaveHeader;
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private WhistleApi whistleApi;
private Thread _thread;
private LinkedList<Boolean> whistleResultList = new LinkedList<Boolean>();
private int numWhistles;
private int totalWhistlesDetected = 0;
private int whistleCheckLength = 3;
private int whistlePassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_IN_MONO) {
channel = 1;
}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
whistleApi = new WhistleApi(waveHeader);
}
private void initBuffer() {
numWhistles = 0;
whistleResultList.clear();
// init the first frames
for (int i = 0; i < whistleCheckLength; i++) {
whistleResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
#Override
public void run() {
Debug.e("", "DetectorThread started...");
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
// MainActivity.whistleValue = numWhistles;
// whistle detection
// System.out.println("*Whistle:");
try {
boolean isWhistle = whistleApi.isWhistle(buffer);
Debug.e("", "isWhistle : " + isWhistle + " "
+ buffer.length);
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(isWhistle);
if (isWhistle) {
numWhistles++;
}
// Debug.e("", "numWhistles : " + numWhistles);
if (numWhistles >= whistlePassScore) {
// clear buffer
initBuffer();
totalWhistlesDetected++;
Debug.e("", "totalWhistlesDetected : "
+ totalWhistlesDetected);
if (onWhistleListener != null) {
onWhistleListener.onWhistle();
}
}
} catch (Exception e) {
Debug.w("", "" + e.getCause());
}
// end whistle detection
} else {
// Debug.e("", "no sound detected");
// no sound detected
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(false);
// MainActivity.whistleValue = numWhistles;
}
// end audio analyst
}
Debug.e("", "Terminating detector thread...");
} catch (Exception e) {
e.printStackTrace();
}
}
private OnWhistleListener onWhistleListener;
public void setOnWhistleListener(OnWhistleListener onWhistleListener) {
this.onWhistleListener = onWhistleListener;
}
public interface OnWhistleListener {
void onWhistle();
}
public int getTotalWhistlesDetected() {
return totalWhistlesDetected;
}
}
RecorderThread.java
public class RecorderThread {
private AudioRecord audioRecord;
private int channelConfiguration;
private int audioEncoding;
private int sampleRate;
private int frameByteSize; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
sampleRate = 44100;
frameByteSize = 1024 * 2;
channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
int recBufSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfiguration, audioEncoding); // need to be larger than
// size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
return true;
}
return false;
}
public void startRecording() {
try {
audioRecord.startRecording();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
Debug.e("", "averageAbsValue : " + averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
}
Usage
public class DetectionService extends Service implements
OnWhistleListener {
Handler handler;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
#Override
public void onCreate() {
super.onCreate();
handler = new Handler();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
try {
if (intent != null && intent.getExtras() != null) {
if (intent.getExtras().containsKey("action")) {
Debug.e("", "action : " + intent.getStringExtra("action"));
if (intent.getStringExtra("action").equals("start")) {
startWhistleDetection();
}
if (intent.getStringExtra("action").equals("stop")) {
stopWhistleDetection();
stopSelf();
}
}
} else {
startWhistleDetection();
Debug.e("", "intent is null OR intent.getExtras() is null");
}
} catch (Exception e) {
e.printStackTrace();
}
return super.onStartCommand(intent, flags, startId);
}
private void startWhistleDetection() {
try {
stopWhistleDetection();
} catch (Exception e) {
e.printStackTrace();
}
recorderThread = new RecorderThread();
recorderThread.startRecording();
detectorThread = new DetectorThread(recorderThread);
detectorThread.setOnWhistleListener(this);
detectorThread.start();
}
private void stopWhistleDetection() {
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread.setOnWhistleListener(null);
detectorThread = null;
}
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onWhistle() {
Debug.e("", "onWhistle()");
}
It detects whistle first time until you don't stop service. But after stopping and again starting It does not detect (does not call listener). I just failed to trace, what can be the issue?
Is there any issue with recording?
I invested 6 hours, :D Unbelievable, audio recorder is not released when it is stopped. I just released recorder after stopping.
Source code is having minor silly mistake. It is not releasing recorder.
public void stopRecording() {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
}
This code is ok for me
if (detectorThread != null) {
detectorThread.stopDetection();
recorderThread.stopRecording();
}

Categories

Resources