sound locator in android - android

i create app : record voice and determine frequency of sound
package com.example.recordsound;
import edu.emory.mathcs.jtransforms.fft.DoubleFFT_1D;
import ca.uol.aig.fftpack.RealDoubleFFT;
public class MainActivity extends Activity implements OnClickListener{
int audioSource = MediaRecorder.AudioSource.MIC; // Audio source is the device MIC
int channelConfig = AudioFormat.CHANNEL_IN_MONO; // Recording in mono
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; // Records in 16bit
private DoubleFFT_1D fft; // The fft double array
private RealDoubleFFT transformer;
int blockSize = 256; // deal with this many samples at a time
int sampleRate = 8000; // Sample rate in Hz
public double frequency = 0.0; // the frequency given
RecordAudio recordTask; // Creates a Record Audio command
TextView tv; // Creates a text view for the frequency
boolean started = false;
Button startStopButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
tv = (TextView)findViewById(R.id.textView1);
startStopButton= (Button)findViewById(R.id.button1);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
private class RecordAudio extends AsyncTask<Void, Double, Void>{
#Override
protected Void doInBackground(Void... params){
/*Calculates the fft and frequency of the input*/
//try{
int bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioEncoding); // Gets the minimum buffer needed
AudioRecord audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig, audioEncoding, bufferSize); // The RAW PCM sample recording
short[] buffer = new short[blockSize]; // Save the raw PCM samples as short bytes
// double[] audioDataDoubles = new double[(blockSize*2)]; // Same values as above, as doubles
// -----------------------------------------------
double[] re = new double[blockSize];
double[] im = new double[blockSize];
double[] magnitude = new double[blockSize];
// ----------------------------------------------------
double[] toTransform = new double[blockSize];
tv.setText("Hello");
// fft = new DoubleFFT_1D(blockSize);
try{
audioRecord.startRecording(); //Start
}catch(Throwable t){
Log.e("AudioRecord", "Recording Failed");
}
while(started){
/* Reads the data from the microphone. it takes in data
* to the size of the window "blockSize". The data is then
* given in to audioRecord. The int returned is the number
* of bytes that were read*/
int bufferReadResult = audioRecord.read(buffer, 0, blockSize);
// Read in the data from the mic to the array
for(int i = 0; i < blockSize && i < bufferReadResult; i++) {
/* dividing the short by 32768.0 gives us the
* result in a range -1.0 to 1.0.
* Data for the compextForward is given back
* as two numbers in sequence. Therefore audioDataDoubles
* needs to be twice as large*/
// audioDataDoubles[2*i] = (double) buffer[i]/32768.0; // signed 16 bit
//audioDataDoubles[(2*i)+1] = 0.0;
toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
//audiodataDoubles now holds data to work with
// fft.complexForward(audioDataDoubles);
transformer.ft(toTransform);
//------------------------------------------------------------------------------------------
// Calculate the Real and imaginary and Magnitude.
for(int i = 0; i < blockSize; i++){
// real is stored in first part of array
re[i] = toTransform[i*2];
// imaginary is stored in the sequential part
im[i] = toTransform[(i*2)+1];
// magnitude is calculated by the square root of (imaginary^2 + real^2)
magnitude[i] = Math.sqrt((re[i] * re[i]) + (im[i]*im[i]));
}
double peak = -1.0;
// Get the largest magnitude peak
for(int i = 0; i < blockSize; i++){
if(peak < magnitude[i])
peak = magnitude[i];
}
// calculated the frequency
frequency = (sampleRate * peak)/blockSize;
//----------------------------------------------------------------------------------------------
/* calls onProgressUpdate
* publishes the frequency
*/
publishProgress(frequency);
try{
audioRecord.stop();
}
catch(IllegalStateException e){
Log.e("Stop failed", e.toString());
}
}
// }
return null;
}
protected void onProgressUpdate(Double... frequencies){
//print the frequency
String info = Double.toString(frequencies[0]);
tv.setText(info);
}
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(started){
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
}
how detect locate of sound with phone ?
i want arrow display and rotate to sound direction .
thank you...
possible with android phone for sound locator ?
or
create a robot or external sensor ?
thx

Related

Adding Fragments and Simple UI Elements significantly slows down Audio Processing Algo

I am new to android and I am trying to build an APP to record audio, do FFT to get freq spectrum.
The buffer size of complete audio is 155 * 2048
i.e. 155* AudioRecord.getMinBufferSize(44100, mono_channel, PCM_16bit)
Each chunk from the recorder is of 2048 shorts , i convert type short into type double and pass it to the FFT library. The library returns me the real and imaginary part which i will use to construct the frequency spectrum. Then i append each chunk to an array.
Now here is the problem:
In app 1 there are no UI elements or Fragments just a simple basic button which is attach to a listener that execute an Async task for reading chunks from Audio.Recorder and does FFT on it chunk by chunk ( each chunk = 2048 short). This process (Recording and FFT) for 155 chunks with sample rate 44100 should take 7 seconds ( 2048 * 155 / 44100 ) but the task took around 9 seconds, which is a lag of 2 seconds (which is acceptable).
In app 2 there are 7 fragments with login and signup screen where each fragment is separate from each other and linked to main activity. The same code here does the task (recording and fft) for 155 * 2048 chunks in 40-45 seconds which means the lag is upto 33-37 seconds. This lag is too much for my purpose. What could be the cause of so much lag in app 2 and how can i reduce it ?
Following is the FFT Library Code and Complex Type Code
FFT.java , Complex.java
My application Code
private boolean is_recording = false;
private AudioRecord recorder = null;
int minimum_buffer_size = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = 155 * AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
private static final int SAMPLE_RATE = 44100;
private Thread recordingThread = null;
short[] audioBuffer = new short[bufferSize];
MainTask recordTask;
double finalData[];
Complex[] fftArray;
boolean recieved = false;
int data_trigger_point = 10;
int trigger_count = 0;
double previous_level_1 ;
double previous_level_2 ;
double previous_level_3 ;
int no_of_chunks_to_be_send = 30;
int count = 0;
short[] sendingBuffer = new short[minimum_buffer_size * no_of_chunks_to_be_send];
public static final int RequestPermissionCode = 1;
mButton = (ImageButton) view.findViewById(R.id.submit);
mButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if (is_recording) {
mButton.setBackgroundResource(R.drawable.play);
stopRecodringWithoutTone();
}
else {
mButton.setBackgroundResource(R.drawable.wait);
is_recording = true;
recordTask = new MainTask();
recordTask.execute();
}
}
});
public class MainTask extends AsyncTask<Void, int[], Void> {
#Override
protected Void doInBackground(Void... arg0) {
try {
recorder = new AudioRecord(
MediaRecorder.AudioSource.DEFAULT,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minimum_buffer_size);
recorder.startRecording();
short[] buffer_recording = new short[minimum_buffer_size];
int recieve_counter = 0;
while (is_recording) {
if (count < bufferSize) {
int bufferReadResult = recorder.read(buffer_recording, 0, minimum_buffer_size);
System.arraycopy(buffer_recording, 0, audioBuffer, count, buffer_recording.length);
count += bufferReadResult;
System.out.println(count);
finalData = convert_to_double(buffer_recording);
int [] magnitudes = processFFT(finalData);
}
else {
stopRecording();
}
}
}
catch (Throwable t) {
t.printStackTrace();
Log.e("V1", "Recording Failed");
}
return null;
}
#Override
protected void onProgressUpdate(int[]... magnitudes) {
}
}
private int[] processFFT(double [] data){
Complex[] fftTempArray = new Complex[finalData.length];
for (int i=0; i<finalData.length; i++)
{
fftTempArray[i] = new Complex(finalData[i], 0);
}
fftArray = FFT.fft(fftTempArray);
int [] magnitude = new int[fftArray.length/2];
for (int i=0; i< fftArray.length/2; i++) {
magnitude[i] = (int) fftArray[i].abs();
}
return magnitude;
}
private double[] convert_to_double(short data[]) {
double[] transformed = new double[data.length];
for (int j=0;j<data.length;j++) {
transformed[j] = (double)data[j];
}
return transformed;
}
private void stopRecording() {
if (null != recorder) {
recorder.stop();
postAudio(audioBuffer);
recorder.release();
is_recording = false;
recorder = null;
recordingThread = null;
count = 0;
recieved = false;
}
}
I am not sure why there is a lag, however you can circumvent this problem : Run two async tasks, task 1 records the data and stores it in an array. 2nd async task takes chunks form this array and does FFT.
AsyncTask runs at a lower priority to make sure the UI thread will remain responsive. Thus more UI, more delay in AsyncTask
You're facing the delay because of the scheduling of BACKGROUND thread priority by the Linux cgroup that Android uses that has to live with 10% CPU time altogether.
If you go with THREAD_PRIORITY_BACKGROUND + THREAD_PRIORITY_MORE_FAVORABLE
your thread will be lifted with 10% limitation.
So your code will look like this:
protected final Void doInBackground(Void... arg0) {
Process.setThreadPriority(THREAD_PRIORITY_BACKGROUND + THREAD_PRIORITY_MORE_FAVORABLE);
...//your code here
}
If that doesn't work due to some reasons on next call of doInBackground because Android by default resets the priority. In that case, try using Process.THREAD_PRIORITY_FOREGROUND

How to save recorded sound to file by using AudioRecord in android?

I'm developing an android (compileSdkVersion 23) app to record audio by using AudioRecord and the reason of using it is to get frequency after FFT in real time.
Not only this work, I need to save the recorded sound to check the sound(In this process, tracking the frequency is unnecessary.)
How to save recorded sound to file by using the AudioRecord in android?
Thus, am I using the AudioRecord correctly?
Here is code:
public class MainActivity extends Activity {
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
AudioRecord audioRecord;
RecordAudio recordTask;
int blockSize;// = 256;
boolean started = false;
boolean CANCELLED_FLAG = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
blockSize = 256;
final Button btRec = (Button) findViewById(R.id.btRec);
btRec.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (started == true) {
//started = false;
CANCELLED_FLAG = true;
//recordTask.cancel(true);
try{
audioRecord.stop();
}
catch(IllegalStateException e){
Log.e("Stop failed", e.toString());
}
btRec.setText("Start");
// canvasDisplaySpectrum.drawColor(Color.BLACK);
}
else {
started = true;
CANCELLED_FLAG = false;
btRec.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
});
}
private class RecordAudio extends AsyncTask<Void, double[], Boolean> {
#Override
protected Boolean doInBackground(Void... params) {
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
audioRecord = new AudioRecord(
MediaRecorder.AudioSource.DEFAULT, frequency,
channelConfiguration, audioEncoding, bufferSize);
int bufferReadResult;
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
try {
audioRecord.startRecording();
} catch (IllegalStateException e) {
Log.e("Recording failed", e.toString());
}
while (started) {
if (isCancelled() || (CANCELLED_FLAG == true)) {
started = false;
//publishProgress(cancelledResult);
Log.d("doInBackground", "Cancelling the RecordTask");
break;
} else {
bufferReadResult = audioRecord.read(buffer, 0, blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
//transformer.ft(toTransform);
//publishProgress(toTransform);
}
}
return true;
}
}
}
You have to download your file and save in cache, than for any request you have to check for cahce file if it is available use otherwise request new file
For complete help look into one of my answer Download and cache media files

Generating frequency with AudioTrack

I found the following code, that generates 'noise'. I want to be able to generate a tone. As far as I understood there's some kind of formula involving SIN to generate the tone.
This line generates the noise: rnd.nextBytes(noiseData);
I tried to assign specific value manually to all array elements, but then there's no sound. I found a code that generates a tone, but it doesn't stream it. When I tried to pass the data to my code, the tone is generated for a few seconds and then the app crashes.
Any suggestions how can I generate a tone from this? Thanks
public class Internal extends Activity
{
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
public void onPlayClicked(View v)
{
start();
}
public void onStopClicked(View v)
{
stop();
}
boolean m_stop = false;
AudioTrack m_audioTrack;
Thread m_noiseThread;
Runnable m_noiseGenerator = new Runnable()
{
public void run()
{
Thread.currentThread().setPriority(Thread.MIN_PRIORITY);
/* 8000 bytes per second, 1000 bytes = 125 ms */
byte [] noiseData = new byte[1000];
Random rnd = new Random();
while(!m_stop)
{
rnd.nextBytes(noiseData);
m_audioTrack.write(noiseData, 0, noiseData.length);
}
}
};
void start()
{
m_stop = false;
/* 8000 bytes per second*/
m_audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_8BIT, 8000 /* 1 second buffer */,
AudioTrack.MODE_STREAM);
m_audioTrack.play();
m_noiseThread = new Thread(m_noiseGenerator);
m_noiseThread.start();
}
void stop()
{
m_stop = true;
m_audioTrack.stop();
}
}
This is the code that generates a tone, but when I feed its output to my write buffer, it plays for a second and then the app crashes.. even though I changed 'AudioFormat.ENCODING_PCM_8BIT' to 'AudioFormat.ENCODING_PCM_16BIT'
private final int duration = 1; // seconds
private final int sampleRate = 8000;
private final int numSamples = duration * sampleRate;
private final double sample[] = new double[numSamples];
private final double freqOfTone = 440; // hz
private final byte generatedSnd[] = new byte[2 * numSamples];
void genTone(){
// fill out the array
for (int i = 0; i < numSamples; ++i) {
sample[i] = Math.sin(2 * Math.PI * i / (sampleRate/freqOfTone));
}
// convert to 16 bit pcm sound array
// assumes the sample buffer is normalised.
int idx = 0;
for (final double dVal : sample) {
// scale to maximum amplitude
final short val = (short) ((dVal * 32767));
// in 16 bit wav PCM, first byte is the low order byte
generatedSnd[idx++] = (byte) (val & 0x00ff);
generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
}
}
Be sure that your noiseData buffer size is greater or equal than the minimum buffersize AudioTrack will return you with getMinBufferSize.
http://developer.android.com/reference/android/media/AudioTrack.html#getMinBufferSize(int, int, int)

Converting a byte array (audio buffer from AudioRecord) to float array and vice versa

edit: I've edited the code to show my fruitless (and maybe completely stupid) attempt to solve the problem myself. With this code I only get an awful rattle-like sound.
I’m rather new to Android app development and now my uncle asked me to develop an app for him, which records audio and simultaneously plays it. As if this wasn't enough, he also wants me to add a frequency filter. Actually, that’s beyond my skills, but I told him I would try, anyway.
I am able to record audio and play it with the RecordAudio and AudioTrack classes, respectively, but I have big problems with the frequency filter. I’ve used Google and searched this forum, of course, and could find some promising code snippets, but nothing really worked.
This is the (working) code I have so far:
public class MainActivity extends ActionBarActivity {
float freq_min;
float freq_max;
boolean isRecording = false;
int SAMPLERATE = 8000;
int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
Thread recordingThread = null;
AudioRecord recorder;
Button cmdPlay;
EditText txtMinFrequency, txtMaxFrequency;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cmdPlay = (Button)findViewById(R.id.bPlay);
cmdPlay.setOnClickListener(onClickListener);
txtMinFrequency = (EditText)findViewById(R.id.frequency_min);
txtMaxFrequency = (EditText)findViewById(R.id.frequency_max);
}
private OnClickListener onClickListener = new OnClickListener() {
#Override
public void onClick(View v) {
if (!isRecording) {
freq_min = Float.parseFloat(txtMinFrequency.getText().toString());
freq_max = Float.parseFloat(txtMaxFrequency.getText().toString());
isRecording = true;
cmdPlay.setText("stop");
startRecording();
}
else {
isRecording = false;
cmdPlay.setText("play");
stopRecording();
}
}
};
public void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLERATE,
AudioFormat.CHANNEL_IN_MONO, AUDIO_FORMAT, 1024);
recorder.startRecording();
recordingThread = new Thread(new Runnable(){
public void run() {
recordAndWriteAudioData();
}
});
recordingThread.start();
}
public void stopRecording() {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
private void recordAndWriteAudioData() {
byte audioData[] = new byte[1024];
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLERATE, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, 1024, AudioTrack.MODE_STREAM);
at.play();
while (isRecording) {
recorder.read(audioData, 0, 1024);
// Converting from byte array to float array and dividing floats by 32768 to get values between 0 and 1
float[] audioDataF = shortToFloat(byteToShort(audioData));
for (int i = 0; i < audioDataF.length; i++) {
audioDataF[i] /= 32768.0;
}
// Fast Fourier Transform
FloatFFT_1D fft = new FloatFFT_1D(512);
fft.realForward(audioDataF);
// fiter frequencies
for(int fftBin = 0; fftBin < 512; fftBin++){
float frequency = (float)fftBin * (float)SAMPLERATE / (float)512;
if(frequency < freq_min || frequency > freq_max){
int real = 2 * fftBin;
int imaginary = 2 * fftBin + 1;
audioDataF[real] = 0;
audioDataF[imaginary] = 0;
}
}
//inverse FFT
fft.realInverse(audioDataF, false);
// multiplying the floats by 32768
for (int i = 0; i < audioDataF.length; i++) {
audioDataF[i] *= 32768.0;
}
// converting float array back to short array
audioData = shortToByte(floatToShort(audioDataF));
at.write(audioData, 0, 1024);
}
at.stop();
at.release();
}
public static short[] byteToShort (byte[] byteArray){
short[] shortOut = new short[byteArray.length / 2];
ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray);
for (int i = 0; i < shortOut.length; i++) {
shortOut[i] = byteBuffer.getShort();
}
return shortOut;
}
public static float[] shortToFloat (short[] shortArray){
float[] floatOut = new float[shortArray.length];
for (int i = 0; i < shortArray.length; i++) {
floatOut[i] = shortArray[i];
}
return floatOut;
}
public static short[] floatToShort (float[] floatArray){
short[] shortOut = new short[floatArray.length];
for (int i = 0; i < floatArray.length; i++) {
shortOut[i] = (short) floatArray[i];
}
return shortOut;
}
public static byte[] shortToByte (short[] shortArray){
byte[] byteOut = new byte[shortArray.length * 2];
ByteBuffer.wrap(byteOut).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(shortArray);
return byteOut;
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
On the site, Filter AudioRecord Frequencies, I found a code, which uses FFT to filter frequencies:
I hope this code is correct, because - to be honest - I wouldn’t know at all how to alter it, if it wasn’t. But the actual problem is, that the audio buffer is a ByteArray, but I need a Float Array for the FFT with values between 0 and 1 (and after the reverse FFT I have to convert the float array back to a ByteArray).
I simply can’t find code anywhere to do this, so any help would be highly appreciated!
byteToShort conversion is incorrect. While the data and most android devices are littlendian, ByteBuffer by default uses big-endian order. So we need to force it little-endian before conversion to short:
public static short[] byteToShort (byte[] byteArray){
short[] shortOut = new short[byteArray.length / 2];
ByteBuffer byteBuffer = ByteBuffer.wrap(byteArray);
byteBuffer.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortOut);
return shortOut;
}

A simple Real-Time Mic Meter in Android

I have helped from the book Pro Android media...
Here is the code:
public class MicMeter extends Activity implements OnClickListener {
RecordAudio recordTask;
int blocksize = 256;
int frequency = 8000;
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
TextView txt;
Button start;
boolean started = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mic_meter);
start = (Button)findViewById(R.id.button1);
txt = (TextView)findViewById(R.id.textView1);
start.setOnClickListener(this);
}
private class RecordAudio extends AsyncTask <Void,double[],Void>{
#Override
protected Void doInBackground(Void... params) {
try{
int bufferSize = AudioRecord.getMinBufferSize(frequency,channelConfig,audioEncoding);
AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, frequency, channelConfig, audioEncoding, bufferSize);
short[] buffer = new short[blocksize];
double[] meter = new double[blocksize];
audioRecord.startRecording();
while(started){
int bufferReadResult = audioRecord.read(buffer, 0, blocksize);
for (int i = 0; i < blocksize && i < bufferReadResult; i++) {
meter[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
publishProgress(meter);
}
audioRecord.stop();
}catch (Throwable t) {
Log.e("AudioRecord","RecordingFail");
}
return null;
}
#Override
protected void onProgressUpdate(double[]... meter) {
for(int i = 0 ; i < meter[0].length ; i++){
double[] helper = meter[i];
txt.setText(Double.toString(helper));
}
}
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if(started){
recordTask.cancel(true);
}else{
started = true;
recordTask = new RecordAudio();
recordTask.execute();
}
}
}
while i press the button.
It shows 255.0 and then it doesn't response...
Are there any way to fix it??
Are there any beter version about this?
thank
for(int i = 0 ; i < meter[0].length ; i++){
double helper = i;
txt.setText(Double.toString(helper));
setText overwrites the old value. So only the last call will show. The last call sets it to helper, which will always be meter[0].length. Since that's a fixed number, it won't change.

Categories

Resources