Audio streaming in android - android

I'm trying to stream audio from an android phone to my pc but I keep getting an Exception.
When pressing the send button it used to be a code that records audio and stream it to the same phone but I altered it to send the recorded data over a socket.
I think it's a memory Exception but what should I do?
byte[] buffer = null;
byte[] byteBuf = null;
byte[][] buffers;
switch (v.getId()) {
case R.id.btnStart: {
Toast.makeText(AudioRecordingActivity.this, "Start Recording",
Toast.LENGTH_SHORT).show();
enableButtons(true);
//here is where I start my code
AudioRecord recorder = null;
boolean stopped =false;
// AudioTrack track = null;
buffers = new byte[65535 ][160000 ];
int ix = 0;
/*
* Initialize buffer to hold continuously recorded audio data, start recording, and start
* playback.
*/
try
{
try {
socket = new Socket("172.16.231.125",4444);
socketOutputStream =socket.getOutputStream();
Toast.makeText(AudioRecordingActivity.this, "Socket was made",
Toast.LENGTH_SHORT).show();
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
Toast.makeText(AudioRecordingActivity.this, "Problem",
Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
int N = AudioRecord.getMinBufferSize(8000,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
recorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, N*10);
// track = new AudioTrack(AudioManager.STREAM_MUSIC, 8000,
// AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, N*10, AudioTrack.MODE_STREAM);
recorder.startRecording();
//track.play();
/*
* Loops until something outside of this thread stops it.
* Reads the data from the recorder and writes it to the audio track for playback.
*/
while(true)
{
buffer = buffers[ix++ % buffers.length];
N = recorder.read(buffer,0,buffer.length);
//ByteBuffer byteBuf = ByteBuffer.allocate(2*N);
// byteBuf = new byte[2*N];
/* int i = 0;
while (N >= i) {
byte b = (byte)(buffer[i]/256);
byteBuf[i]=b;
i++;
}*/
//byte [] bb = byteBuf.array();
try {
socketOutputStream.write( buffer );
socketOutputStream.flush();
Toast.makeText(AudioRecordingActivity.this, "Data was sent",
Toast.LENGTH_SHORT).show();
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(AudioRecordingActivity.this, "can't send",
Toast.LENGTH_SHORT).show();
} catch (IOException e) {
// TODO Auto-generated catch block
Toast.makeText(AudioRecordingActivity.this, "can't send",
Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
//track.write(buffer, 0, buffer.length);
}
}
catch(Throwable x)
{
Toast.makeText(AudioRecordingActivity.this,"Error reading voice audio",
Toast.LENGTH_SHORT).show();
}

Related

How to Set Sample Rate on Text to Speech - Android

In the output of my text to Speech, I need to set Sampling rate about to 32000 Hz with Pitch - 1 and SpeechRate - 0.2 (which I already did). But I can't set Sample Rate.
tts = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if(status != TextToSpeech.ERROR) {
tts.setLanguage(Locale.US);
tts.setSpeechRate((float) 0.2);
tts.setPitch((float) 1);
}
}
}, TextToSpeech.Engine.KEY_FEATURE_NETWORK_SYNTHESIS);
I used AudioTrack to set Sample Rate but it took lots of time because I have to first TTS synthesizeToFile then I play it in AudioTrack.
HashMap<String, String> myHasRead = new HashMap<String, String>();
myHasRead.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, outPutS);
String StorePath = Environment.getExternalStorageDirectory().getAbsolutePath();
File myF = new File(StorePath+"/tempAudio.wav");
try {
myF.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
tts.setOnUtteranceProgressListener(new TtsUtteranceListener());
tts.synthesizeToFile("Bla Bla bla",myHasRead, StorePath+"/tempAudio.wav");
....
private class TtsUtteranceListener extends UtteranceProgressListener {
#Override
public void onStart(String utteranceId) {
}
#Override
public void onDone(String utteranceId) {
playWav();
}
#Override
public void onError(String utteranceId) {
}
}
public void playWav(){
int minBufferSize = AudioTrack.getMinBufferSize(32000, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = 512;
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 32000, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
String filepath = Environment.getExternalStorageDirectory().getAbsolutePath();
int i = 0;
byte[] s = new byte[bufferSize];
try {
FileInputStream fin = new FileInputStream(filepath + "/tempAudio.wav");
DataInputStream dis = new DataInputStream(fin);
at.play();
while((i = dis.read(s, 0, bufferSize)) > -1){
at.write(s, 0, i);
}
at.stop();
at.release();
dis.close();
fin.close();
} catch (FileNotFoundException e) {
// TODO
e.printStackTrace();
} catch (IOException e) {
// TODO
e.printStackTrace();
}
}
There is any way to set sample rate Direct to TTS like tts.setSampleRate(32000); or get Stream from TTS to AudioTrack like DataInputStream dis = new DataInputStream(tts.speak("bla bla bla").getDataInputStream); . In Short I need Chipmunk's Text to Speech for Android but without synthesizeToFile or direct stream TTS voice Data in AudioTrack without saving output of TTS.
You can't set TTS sampling Rate directly:
I did something like this in a project ( Dint use TTS )
This might help you,
To play record with different voice type :-
waveSampling=90000; (Chipmunk)
waveSampling=24200; ("SLOW MOTION")
waveSampling=30000;("BANE") /batman character
waveSampling=18000;(Ghost)
waveSampling=70000;(Bee)
waveSampling=60000;(Woman)
waveSampling=37000; (Normal)
void playRecord() throws IOException {
int minBufferSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = 512;
at = new AudioTrack(AudioManager.STREAM_MUSIC, waveSampling, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
String filepath = Environment.getExternalStorageDirectory().getAbsolutePath();
int i = 0;
byte[] s = new byte[bufferSize];
try {
FileInputStream fin = new FileInputStream(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/temp/"+filename+".wav");
DataInputStream dis = new DataInputStream(fin);
at.play();
while((i = dis.read(s, 0, bufferSize)) > -1){
at.write(s, 0, i);
}
at.stop();
at.release();
dis.close();
fin.close();
openmenu();
} catch (FileNotFoundException e) {
// TODO
e.printStackTrace();
} catch (IOException e) {
// TODO
e.printStackTrace();
}
}
To save the Audio :-
public void save() throws IOException {
Random r = new Random();
final int i1 = r.nextInt(80 - 65) + 65;
File tempfile2=new File(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/temp/"+i1+filename+".wav");
savedfile=Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/"+"VOICE CHANGER"+i1+filename+".mp3";
Toast.makeText(this, "File Saved", Toast.LENGTH_SHORT).show();
rawToWave(tempfile,tempfile2);
File wavFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/temp/"+i1+filename+".wav");
IConvertCallback callback = new IConvertCallback() {
#Override
public void onSuccess(File convertedFile) {
File newfile=new File(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/"+"VOICE CHANGER"+i1+filename+".mp3");
File savedmp3=new File(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Voice Changer/temp/"+i1+filename+".mp3");
Toast.makeText(MainActivity.this, "SUCCESS: " + newfile.getPath(), Toast.LENGTH_LONG).show();
try {
copyit(savedmp3,newfile);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void onFailure(Exception error) {
Toast.makeText(MainActivity.this, "ERROR: " + error.getMessage(), Toast.LENGTH_LONG).show();
}
};
Toast.makeText(this, "Converting audio file...", Toast.LENGTH_SHORT).show();
AndroidAudioConverter.with(this)
.setFile(wavFile)
.setFormat(cafe.adriel.androidaudioconverter.model.AudioFormat.MP3)
.setCallback(callback)
.convert();
}
The output will be a .mp3 file. If you want the output fast you can use .wav format.

Android Wear Audio Recorder using the ChannelAPI

I'm trying to build an audio recorder app for Android Wear. Right now, I'm able to capture the audio on the watch, stream it to phone and save it on a file. However, the audio file is presenting gaps or cropped parts.
I found this aswered questions related to my problem link1, link2, but they couldn't help me.
Here is my code:
First, on the watch side, I create the channel using the channelAPI and sucessfully send the audio being captured on the watch to the smartphone.
//here are the variables values that I used
//44100Hz is currently the only rate that is guaranteed to work on all devices
//but other rates such as 22050, 16000, and 11025 may work on some devices.
private static final int RECORDER_SAMPLE_RATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
int BufferElements2Rec = 1024;
int BytesPerElement = 2;
//start the process of recording audio
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLE_RATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToPhone();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToPhone(){
short sData[] = new short[BufferElements2Rec];
ChannelApi.OpenChannelResult result = Wearable.ChannelApi.openChannel(googleClient, nodeId, "/mypath").await();
channel = result.getChannel();
Channel.GetOutputStreamResult getOutputStreamResult = channel.getOutputStream(googleClient).await();
OutputStream outputStream = getOutputStreamResult.getOutputStream();
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
try {
byte bData[] = short2byte(sData);
outputStream.write(bData);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
Then, on the smartphone side, I receive the audio data from the channel and write it to a PCM file.
public void onChannelOpened(Channel channel) {
if (channel.getPath().equals("/mypath")) {
Channel.GetInputStreamResult getInputStreamResult = channel.getInputStream(mGoogleApiClient).await();
inputStream = getInputStreamResult.getInputStream();
writePCMToFile(inputStream);
MainActivity.this.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(MainActivity.this, "Audio file received!", Toast.LENGTH_SHORT).show();
}
});
}
}
public void writePCMToFile(InputStream inputStream) {
OutputStream outputStream = null;
try {
// write the inputStream to a FileOutputStream
outputStream = new FileOutputStream(new File("/sdcard/wearRecord.pcm"));
int read = 0;
byte[] bytes = new byte[1024];
while ((read = inputStream.read(bytes)) != -1) {
outputStream.write(bytes, 0, read);
}
System.out.println("Done writing PCM to file!");
} catch (Exception e) {
e.printStackTrace();
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (outputStream != null) {
try {
// outputStream.flush();
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
What am I doing wrong or what are your suggestions to achieve a perfect gapless audio file on the smartphone? Thanks in advance.
I noticed in your code that you are reading everything into a short[] array, and then converting it to a byte[] array for the Channel API to send. Your code also creates a new byte[] array through each iteration of the loop, which will create a lot of work for the garbage collector. In general, you want to avoid allocations inside loops.
I would allocate one byte[] array at the top, and let the AudioRecord class store it directly into the byte[] array (just make sure you allocate twice as many bytes as you did shorts), with code like this:
mAudioTemp = new byte[bufferSize];
int result;
while ((result = mAudioRecord.read(mAudioTemp, 0, mAudioTemp.length)) > 0) {
try {
mAudioStream.write(mAudioTemp, 0, result);
} catch (IOException e) {
Log.e(Const.TAG, "Write to audio channel failed: " + e);
}
}
I also tested this with a 1 second audio buffer, using code like this, and it worked nicely. I'm not sure what the minimum buffer size is before it starts to have problems:
int bufferSize = Math.max(
AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT),
44100 * 2);

Using decodeByteArray to convert bitmap: Showing a black image and LogCat shows skimagedecoder factory returned null

I am trying to receive an image from a remote server which sends images every 5 seconds. Right now, on the Android side, I am using decodeByteArray() to convert it to a bitmap. When it is running, sometimes it shows an image on the screen, and sometimes it shows a black screen and the LogCat shows the skimagedecoder factory returning null. I don't know what the problem is.
Here is the code I have:
public class connection extends AsyncTask {
#Override
protected Object doInBackground(Object... arg0) {
int i = 0;
try {
clientSocket = new Socket("134.129.125.126", 8080);
input = clientSocket.getInputStream();
} catch (UnknownHostException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
while (true) {
data = new byte[2048 * 2048];
try {
read = input.read(data, 0, data.length);
System.out.println("getInputStream()");
bitmap = BitmapFactory.decodeByteArray(data, 0, read);
System.out.println("deco");
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
System.out.println(e);
}
runOnUiThread(new Runnable() {
public void run() {
image.setImageBitmap(bitmap);
System.out.println("setImage at less than 500");
}
});
}
}
}
Update
Right now, try to receive the size of image from server, and make became right size of byte array. then decodeByteArray(), it become Factory returned Null again. here is my revised code:
public class connection extends AsyncTask {
#Override
protected Object doInBackground(Object... arg0) {
try {
clientSocket = new Socket("134.129.125.126", 8080);
System.out.println("client connect to server");
input = clientSocket.getInputStream();
System.out.println("getinputstream");
} catch (UnknownHostException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// while (true) {
int totalBytesRead = 0;
int chunkSize = 0;
int tempRead = 0;
String msg = null;
// byte[] data = null;
byte[] tempByte = new byte[1024 * 1024 * 4];
try {
tempRead = input.read(tempByte);
System.out.println("read:" + tempRead);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (tempRead < 2000) {
String message = new String(tempByte, 0, tempRead);
msg = message.substring(0, 6);
System.out.println("message head:" + msg);
byteSize = Integer.parseInt(msg);
System.out.println("ByteSize:" + byteSize);
data = new byte[byteSize];
}
try {
while (chunkSize > -1) {
System.out.println("data length:" + data.length);
chunkSize = input.read(data, totalBytesRead, data.length
- totalBytesRead);
System.out.println("chunkSize is " + chunkSize);
totalBytesRead += chunkSize;
System.out.println("Total byte read " + totalBytesRead);
if (totalBytesRead == data.length) {
if (input.read() != -1) {
// error, the file is larger than our buffer
throw new RuntimeException("Buffer overflow error!");
}
}
}
} catch (Exception e) {
}
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
System.out.println("deco");
runOnUiThread(new Runnable() {
public void run() {
image.setImageBitmap(bitmap);
System.out.println("setImage at less than 500");
}
});
return null;
}
}
For one thing, there is no guarantee that your read command is reading the whole image. InputStreams tend to read the number of bytes currently available, which may or may not be until end of file, that is implementation dependent.
You should continue reading into your byte array until the value returned from input.read() is -1. This will obviously require a rework of some of the logic to loop until you get a -1 back and have a variable that tracks total bytes read as the summation of all the read calls.
For example, to read everything out of a stream
int totalBytesRead = 0;
int chunkSize = 0;
byte[] fileBuffer = new byte[4 * 1024 * 1024]; // 4MB buffer
while (chunkSize > -1) {
chunkSize = inputStream.read(fileBuffer, totalBytesRead, fileBuffer.length - totalBytesRead);
totalBytesRead += chunkSize;
if (totalBytesRead == fileBuffer.length) {
if (inputStream.read() != -1) {
// error, the file is larger than our buffer
throw new RuntimeException("Buffer overflow error!");
}
}
}

Android how to record music from media player

I'm working on a project where I'm playing music over url using android build in media player. The things that I want to achieve this is to be able to save the streaming data in a file on sd card. I've tried to do this using android build in media recorder, but it's recording everything around the phone, not only the sound which is coming from media player.
So my question is which is the best way to achieve this?
Here is an example which I've tested already, but I can't play the mp3 file after that to see if everything went ok :
Log.e("URL AGAIN","url : "+url);
try {
if(!isrecording){
URL urlStream = new URL(url);
InputStream inputStream = urlStream.openStream();
Log.d("", "urlStream.openStream()");
String filename = Environment.getExternalStorageDirectory().getAbsolutePath();
filename += "/deliciousradio.mp3";
File outputSource= new File(filename);
fileOutputStream = new FileOutputStream(outputSource);
Log.d("", "FileOutputStream: " + outputSource);
int bytesRead = -1;
isrecording = true;
byte[] buffer = new byte[30 * 1024];
while ((bytesRead = inputStream.read(buffer)) > 0) {
byte[] buffer2 = new byte[bytesRead];
fileOutputStream.write(buffer2);
Log.d("","bytes size :"+buffer2.length);
Log.d("","bytesRead : "+bytesRead);
}
} else if(isrecording){
fileOutputStream.close();
}
} catch(Exception e){}
The problem here is that I'm receiving 30 as length of buffer2, and i cannot undersand why.
Thanks for any kind of help!
Button recstart, recstop,replay;
//onCreate
File f;
File externalStorage;
String path = "";
MediaRecorder recorder;
Timer time;
String filename1 = "";
boolean s = false;
externalStorage = Environment.getExternalStorageDirectory();
String sdCardPath = externalStorage.getAbsolutePath();
recorder = new MediaRecorder();
path = sdCardPath + "/";
recstart.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
String state = android.os.Environment.getExternalStorageState();
if (!state.equals(android.os.Environment.MEDIA_MOUNTED)) {
try {
throw new IOException("SD Card is not mounted. It is "
+ state + ".");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Date dt = new Date();
int hours = dt.getHours();
int minutes = dt.getMinutes();
int seconds = dt.getSeconds();
String curTime = hours + "_" + minutes + "_" + seconds;
filename1 = "phonecall_at" + curTime + ".mp4";
if (recorder == null) {
recorder = new MediaRecorder();
}
f = new File(path, filename1);
try {
s = f.createNewFile();
} catch (IOException e1) {
// TODO Auto-generated catch block
// Toast.makeText(AudioRecording.this,"hiiiii...."+e1.getMessage(),2000).show();
e1.printStackTrace();
}
// MediaRecorder.AudioSource.VOICE_CALL +
// MediaRecorder.AudioSource.MIC
// recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_UPLINK
// + MediaRecorder.AudioSource.VOICE_DOWNLINK );
if (s == true) {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(f.getAbsolutePath());
// record.setText("stop");
// record.setBackgroundColor( Color.BLUE);
// Toast.makeText(AudioRecording.this, String.valueOf(s),
// 3000).show();
try {
recorder.prepare();
Toast.makeText(AudioRecording.this, "Recording starts",
5000).show();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Log.e(".................................",
"" + e.toString());
}
recorder.start();
} else {
Toast.makeText(getApplicationContext(),
"No space Left on device", 2000).show();
}
/*
* try { recorder.prepare(); Toast.makeText(Recordingvoice
* .this,"Recording starts",5000).show(); recorder.start();
*
*
* } catch (IllegalStateException e) { // TODO Auto-generated
* catch block e.printStackTrace(); } catch (IOException e) { //
* TODO Auto-generated catch block // e.printStackTrace(); }
*/
}
});
recstop.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
if (s == true) {
Toast.makeText(AudioRecording.this, "Recording stopped",
2000).show();
if (recorder == null) {
recorder = new MediaRecorder();
}
if (recorder != null) {
recorder.stop();
recorder.release();
}
recorder = null;
// time.cancel();
// uploadCode();
}
}
});
replay.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
if (f.exists()) {
Toast showMsg = Toast.makeText(getApplicationContext(), "Playing", Toast.LENGTH_SHORT);
showMsg.show();
String path = f.getAbsolutePath();
Uri myUri = Uri.parse(path);
MediaPlayer mp = new MediaPlayer();
mp.setLooping(false);
mp = MediaPlayer.create(AudioRecording.this, myUri);
mp.start();
}
}
});

Android AudioTrack clicks at start and end of sound

I get clicks at the start and end of playing a sound (a wav from the sdcard). It must be something to do with the track buffering but I dont know the solution. Also, I create a new one of these every time the sound plays, is this ok or is there a better way? There are many sounds playing over and over. Here is the code:
public void PlayAudioTrack(final String filePath, final Float f) throws IOException
{
new Thread(new Runnable() { public void run() {
//play sound here
int minSize = AudioTrack.getMinBufferSize( 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT );
AudioTrack track = new AudioTrack( AudioManager.STREAM_MUSIC, 44100,
AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
minSize, AudioTrack.MODE_STREAM);
track.setPlaybackRate((int) (44100*f));
if (filePath==null)
return;
int count = 512 * 1024;
//Read the file..
byte[] byteData = null;
File file = null;
file = new File(filePath);
byteData = new byte[(int)count];
FileInputStream in = null;
try {
in = new FileInputStream( file );
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int bytesread = 0, ret = 0;
int size = (int) file.length();
while (bytesread < size) {
try {
ret = in.read( byteData,0, count);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
track.play();
if (ret != -1) {
// Write the byte array to the track
track.write(byteData,0, ret); bytesread += ret;
}
else break; }
try {
in.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} track.stop(); track.release();
}
}).start();
}
Many thanks for any help
Isn't it possible that you play the PCM wave file header too?
Each PCM wave file has a small header at the beginning of the file, if you play that, you play the header bytes which could result in a click at te beginning.
I have had these same clicks at the beginning of each track using AudioTrack. I solved it by turning track volume off, waiting half a second, and then restoring normal volume. I no longer have any clicks. Here's the relevant bit of the code.
at.play();
at.setStereoVolume (0.0f, 0.0f);
new Thread (new Runnable(){
public void run() {
try{
Thread.sleep(500);
} catch (InterruptedException ie) { ; }
at.setStereoVolume (1.0f, 1.0f);
}
}).start();
new Thread (new Runnable(){
public void run() {
int i = 0;
try{
buffer = new byte[512];
while(((i = is.read(buffer)) != -1) && !paused){
at.write(buffer, 0, i);
position += i;
}
} catch (IOException e) {
e.printStackTrace();
}
if (!paused){
parent.trackEnded();
}
}
}).start();
}

Categories

Resources