I am trying to make an audio app which allows sound to be projected through the speaker via the mic. I made a toggle button for this app but after click on it, it stays highlighted and I can't adjust the system volume.
So how do i break this loop?
private void setUpButton() {
final ToggleButton tb = (ToggleButton) findViewById(R.id.tb);
tb.setOnClickListener(new View.OnClickListener (){
public void onClick(View view) {
// TODO Auto-generated method stub
boolean on = ((ToggleButton) view).isChecked();
if (on){
boolean is = true;
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
int buffersize = AudioRecord.getMinBufferSize(20000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
AudioRecord arec = new AudioRecord(MediaRecorder.AudioSource.MIC, 20000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffersize);
AudioTrack atrack = new AudioTrack(AudioManager.STREAM_MUSIC, 20000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, buffersize, AudioTrack.MODE_STREAM);
atrack.setPlaybackRate(20000);
byte[] buffer = new byte[buffersize];
arec.startRecording();
atrack.play();
while(is) {
arec.read(buffer, 0, buffersize);
atrack.write(buffer, 0, buffer.length);
}
}
}});
}
The toggle button is staying highlighted because you have your infinite while(is){...} loop running in the main UI thread (as per 323go's comment). The main UI thread becomes busy constantly running the infinite loop and therefore does not have the opportunity to process any more UI actions such as further button state updates etc.
Move your infinite loop into it's own thread with a message handler to handle messages from your main UI thread telling it when to start or stop etc. This will free up the main UI thread to do what it's meant to be doing ... handling the User Interface.
For example:
MainActivity
public class MainActivity {
private AudioScheduler mAudioThread;
...
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mAudioThread = new AudioThread();
mAudioThread.start();
final ToggleButton tb = (ToggleButton) findViewById(R.id.tb);
tb.setOnClickListener(new View.OnClickListener (){
public void onClick(View view) {
Message messageToAudioThread = Message.obtain();
boolean on = ((ToggleButton) view).isChecked();
if (on){
messageToAudioThread.what = AudioThread.PLAY;
} else {
messageToAudioThread.what = AudioThread.STOP;
}
mAudioThread.getHandler().sendMessage(messageToAudioThread);
}
}
#Override
protected void onDestroy() {
super.onDestroy();
mAudioThread.close();
}
}
AudioThread
public class AudioThread extends Thread {
// Handler message constants
public static final int PLAY = 0;
public static final int STOP = 1;
// Class variables
private boolean mRunning = false;
private boolean mPlayAudio = false;
private static class AudioThreadHandler extends Handler {
private final WeakReference<AudioThread> mThread;
public AudioThreadHandler(AudioThread thread) {
mThread = new WeakReference<AudioThread>(thread);
}
#Override
public void handleMessage(Message msg) {
AudioThread thread = mThread.get();
if (thread != null) {
switch(msg.what) {
case PLAY:
thread.mPlayAudio = true;
break;
case STOP:
thread.mPlayAudio = false;
break;
}
}
}
};
private final AudioThreadHandler mAudioThreadHandler = new AudioThreadHandler(this);
#Override
public void run() {
try {
mRunning = true;
while(mRunning) {
if (mPlayAudio) {
// Your code for handling audio recording/playback
// or whatever goes here
} else {
// If you have code you want to keep executing while
// audio is not active, stick it here
}
}
} catch (Exception e) {
Log.e("AudioThread", "Thread Loop Exception: " + e);
}
}
public void close() {
mRunning = false;
}
public Handler getHandler() {
return mAudioThreadHandler;
}
}
From your description you are wanting to record, manipulate (adjust volume etc), and output audio all in real time. For that level of audio processing you will need to understand and use threads. There are plenty of good examples and tutorials out there if you don't already have a good grasp of threads.
Related
I'm trying to access three sensors simultaneously. I need the accelerometer data to detect if the person is running, walking or standing still and MIC and CAMERAMIC data as input to an adaptive filter.
I tried to use threads, one thread for each sensor, but they don't really work in parallel.
//declaring 3 threads
public class firstMicThread implements Runnable {
public void run() {
isRecording = true;
Log.i("Thread 1 ", "first thread started" + String.valueOf(isRecording));
startRecording(audiosource);
}
}
public class secondMicThread implements Runnable {
public void run() {
isRecording = true;
Log.i("Thread 2 ", "second thread started" + String.valueOf(isRecording));
startRecording(audiosource2);
}
}
public class accelerometerThread implements Runnable {
public void run() {
isRecording = true;
Log.i("Thread 3 ", "acceletometer started " + String.valueOf(isRecording));
// onResume();
sensorManager.registerListener( RecordandStore.this, accelerometer, SensorManager.SENSOR_DELAY_NORMAL );
Log.i("Thread 3 ", "acceletometer listener registered" + String.valueOf(isRecording));
}
}
and
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btn_startrec: {
if (checkpermissionfromdevice()) {
Log.i("Start click:", "permissions true");
// first mic
Runnable firstmicR = new firstMicThread();
Thread firstmicthread = new Thread( firstmicR);
// second mic
Runnable secondmicR = new secondMicThread();
Thread secondmicthread = new Thread( secondmicR);
// accelerometer
Runnable acceletometerR = new accelerometerThread();
Thread accelerometerthread = new Thread(acceletometerR);
firstmicthread.start();
secondmicthread.start();
accelerometerthread.start();
startrec.setEnabled(false);
stoprec.setEnabled(true);}
else { // if check permissionfordevice == false
requestPermission();
}
break;}
Before moving on with my project I thought what I should use to run tasks in background.
The Activity has to do following on button hold - play audio, record video, record user voice input, move graph(set new coordinates with each iteration).
At the moment I am using MediaPlayer built in ASync for playing audio. RunOnUiThread with Runnable for audio recording and Handler for moving graph and I haven't gotten to recording video yet. The UI should be updated after every 3 or 5 ms.
My question is what would be the best option to use in this situation.
This is my audio recording and pitch detection:
AudioDispatcher dispatcher =
AudioDispatcherFactory.fromDefaultMicrophone(44100,2048,0);
PitchDetectionHandler pdh = new PitchDetectionHandler() {
#Override
public void handlePitch(PitchDetectionResult res, AudioEvent e){
final float pitchInHz = res.getPitch();
runOnUiThread(new Runnable() {
#Override
public void run() {
noteText.setText(micInputResult.processPitch(pitchInHz));
pitchText.setText("" + pitchInHz);
gameView.setSingerArrowY(micInputResult.processPlayerCoordinate(pitchInHz));
gameView.invalidate();
}
});
}
};
AudioProcessor pitchProcessor = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 44100, 2048, pdh);
dispatcher.addAudioProcessor(pitchProcessor);
Thread audioThread = new Thread(dispatcher, "Audio Thread");
audioThread.start();
And moving graph:
playRecordButton.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View view, MotionEvent motionevent) {
final int action = motionevent.getAction();
if (action == MotionEvent.ACTION_DOWN) {
Log.i("repeatBtn", "MotionEvent.ACTION_DOWN");
playMedia(songIndex);
runnable = new Runnable() {
public void run() {
//Setting new coordinates for graph on each iteration
assa.setMinX(orderNr);
assa.setMaxX(orderNr+5);
graph.invalidate();
if(pitchesList.size() != orderNr){
orderNr++;
}
handler.postDelayed(this, 100);
}
};
handler.postDelayed(runnable, 100);
} else if (action == MotionEvent.ACTION_UP) {
Log.i("repeatBtn", "MotionEvent.ACTION_UP");
handler.removeCallbacks(runnable);
assa.setMinX(-2);
assa.setMaxX(2);
graph.invalidate();
orderNr = 0;
stopAudio();
}//end else
return false;
} //end onTouch
}); //end b my button
I am aware that they should ultimately be ran at once but that is the reason for asking because I have tried many different ways but I am not sure which I should use.
I am using this code to play a sound
final MediaPlayer mp = MediaPlayer.create(this, R.raw.sound);
mp.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp.release();
}
});
It works fine on its own, however there was a problem after I added an animation that extends ImageView, which refreshes(by calling handler.postDelayed) the image resource at an interval about 30ms to create animation. The problem is that when the animation starts, it terminates the the playing of the sound. Here is the code for the Runnable that refreshes the ImageView.
private Runnable runnable = new Runnable () {
public void run() {
String name = "frame_" + frameCount;
frameCount ++;
int resId = mContext.getResources().getIdentifier(name, "drawable", mContext.getPackageName());
imageView.setImageResource(resId);
if(frameCount < totalFrameCount) {
mHandler.postDelayed(runnable, interval);
}
}
};
I also tried to use a thread that calls the anmiationView.postInvalidate to do the animation, however it has the same problem. Please help. Thanks
Edit:
It looks like the problem is due to WHEN the animation is called. Previously I called it in the onActivityResult of the activity. Looks like this is not the right place to call. Now I put the animation view in a popupWindow and play it there, it works properly. Not sure exactly why.
in handler's comments :
"A Handler allows you to send and process {#link Message} and Runnable
objects associated with a thread's {#link MessageQueue}. Each Handler
instance is associated with a single thread and that thread's message
queue. When you create a new Handler, it is bound to the thread /
message queue of the thread that is creating it -- from that point on,
it will deliver messages and runnables to that message queue and execute
them as they come out of the message queue."
so, the problem may be caused by both of animation and media playing operations are in
the same message queue own by which thread create the handler (let's say the main thread).
if the animation loops for ever, then the media player will hardly get any chance to run.
you could take it a try with HandlerThread, the thread will contain a new looper for the
handler created from it, all the runnables added to that handler will be running in another
individual thread.
the animation thread and the media play thread should be running in the different threads not
scheduling in the same one.
hope, it helps.
the HandlerThread usage and some discuss looks like this :
How to create a Looper thread, then send it a message immediately?
maybe it is caused by your miss arranged codes, i take it a try on my nexus 4 with android version 4.4.2, even no any cache tech, the animation and music works like a charm...
here is the major codes :
public class MainActivity extends Activity implements View.OnClickListener {
protected static final String TAG = "test002" ;
protected static final int UPDATE_ANI = 0x0701;
protected static final int UPDATE_END = 0x0702;
protected static final int[] ANI_IMG_IDS = {R.raw.img1, R.raw.img2, R.raw.img3, R.raw.img4,
R.raw.img5, R.raw.img6, R.raw.img7};
protected static final int[] BTN_IDS = {R.id.btnStart, R.id.btnStop};
protected android.os.Handler aniHandler = null; // async update
protected boolean isAniRunning = false ;
protected int aniImgIndex = 0 ;
protected ImageView aniImgView = null ;
protected MediaPlayer mediaPly = null ;
// animation timer
class AniUpdateRunnable implements Runnable {
public void run() {
Message msg = null ;
while (!Thread.currentThread().isInterrupted() && isAniRunning) {
msg = new Message();
msg.what = UPDATE_ANI;
aniHandler.sendMessage(msg);
try {
Thread.sleep(500);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break ;
}
}
msg = new Message() ;
msg.what = UPDATE_END ;
aniHandler.sendMessage(msg) ;
}
}
protected void prepareMediaPlayer(MediaPlayer mp, int resource_id) {
AssetFileDescriptor afd = getResources().openRawResourceFd(resource_id);
try {
mp.reset();
mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getDeclaredLength());
afd.close();
mp.prepare();
} catch (IllegalArgumentException e) {
Log.d(TAG, "IlleagalArgumentException happened - " + e.toString()) ;
} catch(IllegalStateException e) {
Log.d(TAG, "IllegalStateException happened - " + e.toString()) ;
} catch(IOException e) {
Log.d(TAG, "IOException happened - " + e.toString()) ;
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// init : buttons onclick callback
{
Button btn;
int i;
for (i = 0; i < BTN_IDS.length; i++) {
btn = (Button) findViewById(BTN_IDS[i]);
btn.setOnClickListener(this);
}
}
// init : update animation handler callback
{
aniHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case UPDATE_ANI:
updateAniImages();
break ;
case UPDATE_END:
updateAniEnd();
break ;
default:
break;
}
}
};
}
// init : prepare image view
{
aniImgView = (ImageView)findViewById(R.id.imgAni) ;
mediaPly = MediaPlayer.create(this, R.raw.happyny) ;
mediaPly.setLooping(true);
}
}
protected void updateAniImages() {
if(aniImgIndex >= ANI_IMG_IDS.length) {
aniImgIndex = 0 ;
}
InputStream is = getResources().openRawResource(ANI_IMG_IDS[aniImgIndex]) ;
Bitmap bmp = (Bitmap) BitmapFactory.decodeStream(is) ;
aniImgView.setImageBitmap(bmp);
aniImgIndex++ ;
}
protected void updateAniEnd() {
aniImgIndex = 0 ;
aniImgView.setImageBitmap(null);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart:
isAniRunning = true ;
// no re-enter protectiion, should not be used in real project
new Thread(new AniUpdateRunnable()).start();
mediaPly.start();
break;
case R.id.btnStop:
isAniRunning = false ;
mediaPly.stop();
prepareMediaPlayer(mediaPly, R.raw.happyny);
break;
default:
break;
}
}
}
the major project codes and test apk should be find here :
apk installer
source code
I am writing a small app that capture the audio from the android MIC, performs a FFT on the input and then graphs the chart to the user. I am trying to do the recording and graphing concurrently (obviously with a small delay from being recorded to being graphed). I am attempting to launch two threads, one to read and one to process. However, I am having synchronization issues when I process it seems to only be receiving (or not) zeros. Any advise would be greatly appreciated. :)
public class Plotter extends Activity {
/* plotting objects */
private static GraphicalView mView;
private LineGraph line = new LineGraph();
private boolean recordAudio = true; // record?
private AudioRecord mRecorder = null; // audio object
private Menu mMenu; // app menu
private static final String LOG_TAG = "Frequency Plotter"; // debug tag
private Mfft mfft = null; // FFT class
private static final int BUF_SIZE = 8192; // amount to read in
private Thread listener = null;
private Thread processor = null;
Stack<Float> items = new Stack<Float>();
/* colors for line */
private int[] colors = {Color.BLUE,Color.CYAN,Color.DKGRAY,Color.GRAY,
Color.GREEN,Color.LTGRAY,Color.MAGENTA,Color.RED,Color.WHITE,Color.YELLOW};
private void processAudio(){
ArrayList<Double> real = new ArrayList<Double>();
try{
Random randomGenerator = new Random();
float[] in = new float[2048];
Arrays.fill(in,1);
while(true){
synchronized(items){
while(items.size() < 2048)
items.wait();
items.notifyAll();
for(int i=0; i < 2048; i++){
in[i] = items.pop();
}
}
double[] ret = mfft.fft(2048,44100,in); // get FFT of data
TimeSeries dataset = new TimeSeries( (real.size()+1)/2048 + "" );
XYSeriesRenderer renderer = new XYSeriesRenderer(); // customized renderer
// Customization time
renderer.setColor(colors[randomGenerator.nextInt(10)]);
renderer.setPointStyle(PointStyle.SQUARE);
renderer.setFillPoints(true);
line.addRenderer(renderer); // add custom renderer
for(int i = 0; i < 2048; i++){
real.add(ret[i]);
dataset.add(real.size()-1,ret[i]); // Add it to our graph
}
line.addDataset(dataset); // add data to line
mView.repaint(); // render lines
}
}catch(Exception e){
Log.e(LOG_TAG, e + " ");
}
}
private void writeToBuffer(short[] in) {
synchronized(items){
for(int i = 0; i < BUF_SIZE; i++){ // copy to create float
items.push((float)in[i]);
}
items.notifyAll();
}
}
private void listen(){
final short[] in = new short[BUF_SIZE];
mRecorder = new AudioRecord(
MediaRecorder.AudioSource.MIC, // source
44100, // frequency (HERTZ)
AudioFormat.CHANNEL_IN_MONO, // channel
AudioFormat.ENCODING_PCM_16BIT, // format
BUF_SIZE // size data packet
);
mRecorder.startRecording();
while(recordAudio){
try{
/* read next part */
mRecorder.read(in,0,BUF_SIZE); // read from device
writeToBuffer(in);
}catch(Exception t){
/* something went horribly wrong!!!*/
recordAudio = false;
Log.e(LOG_TAG, "Failure reading" + t.getMessage());
}
}
}
private void startRecording(){
/* create a new thread that will run the recording in the background */
listener = new Thread(
new Runnable(){
public void run(){
listen();
}
});
listener.start();
/* small delay to produce */
try {
Thread.sleep(100);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
/* create a thread to process the audio */
processor = new Thread(
new Runnable(){
public void run(){
processAudio();
}
});
processor.start();
}
private void stopRecording(){
recordAudio = false;
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
/** clear the current chart */
private void clearChart(){
line = new LineGraph();
this.onStart();
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
#Override
protected void onStart() {
super.onStart();
/* instantiate */
mfft = new Mfft(); // instance of the FFT class
mView = line.getView(this); // get the chart view
/* new horizontal layout */
LinearLayout ll = new LinearLayout(this);
ll.setOrientation(LinearLayout.HORIZONTAL);
ll.addView(mView); // add chart to layout
setContentView(ll); // set layout
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case R.id.record:
startRecording();
item.setEnabled(false); // disable start
mMenu.findItem(R.id.stop).setEnabled(true); // enable stop
return true;
case R.id.stop:
stopRecording();
item.setEnabled(false); // disable stop
mMenu.findItem(R.id.clear).setEnabled(true); // enable stop
return true;
case R.id.clear:
clearChart(); // clear chart
item.setEnabled(false); // disable clear
mMenu.findItem(R.id.record).setEnabled(true); // enable stop
return true;
default:
return super.onOptionsItemSelected(item);
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
mMenu = menu;
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.my_menu, menu);
return true;
}
}
Edit: Added full definitions.
several thoughts...
similar example code, Audalyzer
Unfortunately the author has stopped development on this project, but the source tarball is still available online. In particular note: org.hermit.android.io.AudioReader.java . You read the audio and pass it via a Stack object, this author uses short [] arrays. (still that does not seem like it should be your problem source...)
http://code.google.com/p/moonblink/downloads/detail?name=SourceTarball.zip
BUF_SIZE thoughts
Your audio buffer (BUF_SIZE = 8192) feels a bit small. How does that relate to AudioRecord.getMinBufferSize()? I used 2x minBufferSize, and that's without doing any calculations on it (only read/write).
Handler thoughts
I'm still reviewing your code, unclear how your threads communicate. But, your problem sounds like it needs a way for the threads to communicate a Handler.
Below are the links I've been reviewing to grasp how to use Handlers and communicate between threads effectively:
threads - nice overview of handlers (withOUT looper). w/ code example: com.indy.testing.TestMain.java.MyThread.java
http://indyvision.net/2010/02/android-threads-tutorial-part-3/
threads - ok overview of handlers and loopers
http://techtej.blogspot.com/2011/02/android-passing-data-between-main.html
threads w/ 2way comm. w/ code example: sample.thread.messaging.ThreadMessaging.java
http://codinghard.wordpress.com/2009/05/16/android-thread-messaging/
As the question mentions, I am re-using the same Intent object, reloading it with different extras data, before using it in startService. Is this OK? or do I need to do a "new Intent(context, ServiceClass)" before every startService?
I am using the Service for continuous stream of processing so I am a bit concerned if I have to create a new Intent everytime I need to send the data to the Service.
The problem is that currently, the Service seems to get executed only once. All subsequent startService calls don't seem to fire the Service. What can be the reason?
EDIT: Adding code
This is the main activity:
public class Measurement extends Activity{
AudioRecord recorder;
int iAudioBufferSize;
boolean bRecording;
int iBytesRead;
// UI stuff
Button bt_Measure;
Button bt_Stop;
RadioButton rd_Live;
RadioButton rd_DataCollection;
RadioButton rd_SampleCollection;
Thread recordThread;
Intent sampleDataIntent;
#Override
public void onDestroy() {
super.onDestroy();
if (recorder != null)
recorder.release();
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
bt_Measure = (Button) findViewById(R.id.bt_measure);
bt_Measure.setEnabled(true);
bt_Stop = (Button) findViewById(R.id.bt_stop);
rd_Live = (RadioButton) findViewById(R.id.rd_live);
rd_DataCollection = (RadioButton) findViewById(R.id.rd_datacollection);
rd_SampleCollection = (RadioButton) findViewById(R.id.rd_samplecollection);
rd_Live.setChecked(true);
bRecording = false;
int iSampleRate = AudioTrack
.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
iAudioBufferSize = AudioRecord.getMinBufferSize(iSampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
recorder = new AudioRecord(AudioSource.MIC, iSampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
iAudioBufferSize);
sampleDataIntent = new Intent(this, DetectionService.class);
bt_Measure.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
if (rd_Live.isChecked()){ // radio button for Live processing
if (!bRecording) {
bt_Measure.setEnabled(false);
bt_Stop.setEnabled(true);
try {
recorder.startRecording();
bRecording = true;
recordThread = new Thread() {
#Override
public void run() {
byte[] buffer = new byte[iAudioBufferSize];
int iBufferReadResult;
int iBytesToRead = iAudioBufferSize;
iBytesRead = 0;
while (!interrupted()) {
iBufferReadResult = recorder.read(buffer,
0, iBytesToRead);
sampleDataIntent.removeExtra("THE_DATA");
sampleDataIntent.putExtra("THE_DATA", buffer);
sampleDataIntent.putExtra("FRAME_SIZE", iBufferReadResult);
startService(sampleDataIntent);
iBytesRead = iBytesRead + iBufferReadResult;
}
}
};
recordThread.start();
} catch (Exception e) {
tv_Speed.setText(e.getLocalizedMessage());
}
}
}
}
});
}
}
This is the Detection Serice:
public class DetectionService extends IntentService {
ByteArrayOutputStream baos;
byte[] sampleBuffer;
byte[] lastSampleBuffer;
PeakFilter peakFilter1;
PeakFilter peakFilter2;
PeakFilter peakFilter3;
PeakFilter peakFilter4;
float fSum = 0;
short sAvgAmpl = 0;
short sMaxAvg = 0;
short sPeakAmpl = 0;
public DetectionService() {
super("DetectionService");
baos = new ByteArrayOutputStream();
peakFilter1 = new PeakFilter(260, 20, 44100);
peakFilter2 = new PeakFilter(260, 20, 44100);
peakFilter3 = new PeakFilter(260, 20, 44100);
peakFilter4 = new PeakFilter(260, 20, 44100);
}
#Override
protected void onHandleIntent(Intent sampleDataIntent) {
try {
baos.write(sampleDataIntent.getByteArrayExtra("THE_DATA"));
} catch (IOException e1) {
e1.printStackTrace();
}
if (baos.size() > 44100 * 5) // 5 second frame
sampleBuffer = baos.toByteArray();
else
return;
try {
// NON THREAD IMPEMENTATION
// Threaded implementation has the same Try..Catch executed in
// separate thread. Neither seems to be running more than once.
short[] preFilterBuffer = new short[sampleBuffer.length/2];
int iSample = 0;
for (int i = 0, j = 0; i < preFilterBuffer.length; i++, j+=2){
preFilterBuffer[iSample] = (short) (sampleBuffer[j+1] << 8 | sampleBuffer[j]);
iSample++;
}
short[] FilteredSamples = new short[preFilterBuffer.length];
// Filters are not spawning threads. Purely Math processing.
peakFilter1.filter(FilteredSamples, preFilterBuffer, preFilterBuffer.length);
peakFilter1.amplify(FilteredSamples, FilteredSamples, 3);
peakFilter2.filter(FilteredSamples, FilteredSamples, FilteredSamples.length);
peakFilter2.amplify(FilteredSamples, FilteredSamples, 3);
peakFilter3.filter(FilteredSamples, FilteredSamples, FilteredSamples.length);
peakFilter3.amplify(FilteredSamples, FilteredSamples, 2);
peakFilter4.filter(FilteredSamples, FilteredSamples, FilteredSamples.length);
// LOT OF MATH on filtered samples
// ......
//
} catch (Exception e) {
e.printStackTrace(); }
}
You can add methods to your service, for example putData(...)… After starting the service, if it's… alive, I think you just use it as an object.