How to merge two audio .wav file - android

I know this question is discussed already but after googled a lot I cant able to find the solution why two .wav merged file is not playable?I am merging two .wav file into one file.But the merged file is being played for just a second.Nothing else.After Click on play button it plays for a second.Please give me suggestion or check my code to identify the problem.Thanks in advance.The code I am using two merge two files is as below:
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_BPP = 16;
private void CombineWaveFile(String file1, String file2, String path) {
FileInputStream in1 = null, in2 = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[8192];
try {
in1 = new FileInputStream(file1);
in2 = new FileInputStream(file2);
File file_path = new File(path);
file_path.setReadable(true);
out = new FileOutputStream(file_path);
totalAudioLen = in1.getChannel().size() + in2.getChannel().size();
totalDataLen = totalAudioLen + 36;
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while (in1.read(data) != -1) {
out.write(data);
}
while (in2.read(data) != -1) {
out.write(data);
}
out.close();
in1.close();
in2.close();
Log.e("mPAthTemp Combine", file_path.toString());
Log.e("mFileSecond Combine", file2);
Toast.makeText(this, "Done!!", Toast.LENGTH_LONG).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R';
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f';
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16;
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1;
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8);
header[33] = 0;
header[34] = RECORDER_BPP;
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
}
Code I am using to play the audio file is as below:
private void startPlaying(String file_path) {
mPlayer = new MediaPlayer();
try {
Log.e("playing", file_path);
mPlayer.setDataSource(file_path);
mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mPlayer.prepare();
mPlayer.start();
mPlayer.setOnCompletionListener(new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
// TODO Auto-generated method stub
showToast("Complete");
setProgressBar(false);
mButtonStart.setEnabled(true);
mButtonStop.setEnabled(false);
stopPlaying();
}
});
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Give me a solution why this happen.Thanks..

I don't know what specifically causes your specific problem, but this code is littered with bugs, some combination of which might explain your problem. Here's what I spotted by inspection:
It assumes that the incoming files are the same format (16 bit stereo, 44100, etc) without checking.
It doesn't parse the incoming files at all. Thus it treats the headers of the incoming files as audio data and appends that to the outgoing file (yikes!). This is especially strange considering it carefully writes a header for the output file.
It doesn't check the return value of read(), thus writing larger blocks than it reads in some cases (especially the last block of each incoming file).
Perhaps you need some new sample code. JSResources might be a good place to start.

Related

write .wav empty after receive data from wear

I'm working on a android wear app. This app records local audio from microphone of smartwatch and sends it to the handle device. The handle device receives data and writes a .wav file. The file is created, but when I listen the file is empty , I can listen only silence.
this is the wear code:
public void replyAudioByByte(final byte data[]) {
final String path = "/start_activity";
final Byte[] text= new Byte[1024];
GoogleApiClient client = new GoogleApiClient.Builder(getApplicationContext())
.addApi(Wearable.API)
.build();
new Thread(new Runnable() {
#Override
public void run() {
NodeApi.GetConnectedNodesResult nodes = Wearable.NodeApi.getConnectedNodes(mApiClient).await();
for (Node node : nodes.getNodes()) {
MessageApi.SendMessageResult result = Wearable.MessageApi.sendMessage(
mApiClient, node.getId(),AUDIO_RECORDER, data).await();
if (result.getStatus().isSuccess()) {
Log.d("sendMessage","Message send!!");
for (int j=0; j<data.length; j++ ){
Log.v("Mobile", "Message: {" + data[j] + "} sent to: " + node.getDisplayName());
}
} else {
// Log an error
Log.v("Mobile", "ERROR: failed to send Message");
}
}
}
}).start();
client.disconnect();
Log.d("MOBILE", "send message end");
}
public void startRecordingAudio() {
recorder = findAudioRecord();
Log.d("recorder:","recorder="+recorder.toString());
CountDownTimer countDowntimer = new CountDownTimer(8000, 1000) {
public void onTick(long millisUntilFinished) {
}
public void onFinish() {
try {
//Toast.makeText(getBaseContext(), "Stop recording Automatically ", Toast.LENGTH_LONG).show();
Log.d("wear", "stopRecorder=" + System.currentTimeMillis());
recorder.stop();
Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate=");
Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate());
isRecording=false;
replyAudioByByte(data);
for (int j=0; j< data.length;j++){
Log.d("watch audio registrato", "data[]="+data[j]);
}
Log.d("wear", "recorder.stop ok!");
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("wear", "recorder.stop catch");
e.printStackTrace();
}
}
};
recorder.startRecording();
countDowntimer.start();
Log.d("wear", "startRecorder=" + System.currentTimeMillis());
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
while (isRecording ) {
recorder.read(data, 0, bufferSize);
Log.d("WEAR","recorder.read="+recorder.read(data, 0, bufferSize));
}
recorder.stop();
recorder.release();
for (int i = 0; i < bufferSize; i++) {
Log.d("startrecording", "data=" + data[i]);
}
}
}, "AudioRecorder Thread");
recordingThread.start();
int a= recorder.getSampleRate();
Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate="+a);
Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate());
}
public AudioRecord findAudioRecord() {
/** The settings that i must use are not the same for every device, so i try if they work */
for (int rate : mSampleRates) {
for (short audioFormat : audioF) {
for (short channelConfig : channelC) {
try {
//Log.d("Check", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
//It checks if it can instantiate the audiorecorder without problems
AudioRecord recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize + 2000);
Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.MIC);
Log.d("AudioRecorder data","Rate="+ rate);
Log.d("AudioRecorder data","Channel.config="+ channelConfig);
Log.d("AudioRecorder data","AudioFormat= "+audioFormat);
bufferSize=bufferSize+2000;
Log.d("AudioRecorder data","buffersize="+ bufferSize );
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
Log.d("audiorec","rate="+rate);
return recorder;
}
} catch (Exception e) {
Log.e("Check", rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
this is the handle code:
public Void doInBackground(byte [] dataToWrite) {
Log.d("doInBackground","entrato");
byte data[] = new byte[bufferSize];
String tempfilename = "";
FileOutputStream os = null;
//if(allowRecorder){
tempfilename = getTempFilename();
Log.d("doInBackground","getTempFilename=" +tempfilename.toString());
try {
os = new FileOutputStream(tempfilename);
Log.d("doInBackground","os new ok" );
} catch (FileNotFoundException e) {
e.printStackTrace();
}
dbData = new ArrayList<Double>();
Log.d("doInBackGround", "dateToWrite.length=" + dataToWrite.length);
for (int j = 0; j < dataToWrite.length; j++) {
try {
os.write(dataToWrite);
Log.d("os,write", "dataToWrite");
} catch (IOException e) {
e.printStackTrace();
}
}
if(data[data.length-1]!=0){
double Db = 20 * Math.log10(Math.abs((data[data.length-1]/51805.5336) / 0.00002));
dbData.add(Db);
}
try {
os.close();
Log.d("os.close", "dataToWrite");
copyWaveFile(tempfilename,getFilename());
deleteTempFile();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = 0;
long longSampleRate = 8000;
System.out.println("SAMPLE RATE = "+longSampleRate);
int channels = 12;
audioFormat = 16;
long byteRate = audioFormat * longSampleRate * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
Log.d("RecorderRead","totalAudioLen=" +totalAudioLen);
Log.d("RecorderRead","totalDatalen=" +totalDataLen);
System.out.println("Temp File size: " + totalDataLen);
Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.DEFAULT);
Log.d("AudioRecorder data","Rate="+ longSampleRate);
Log.d("AudioRecorder data","Channel.config="+ channels);
Log.d("AudioRecorder data","AudioFormat= "+audioFormat);
//bufferSize=bufferSize+2000;
Log.d("AudioRecorder data","buffersize="+ bufferSize );
if(totalDataLen != 36){
writeWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
Log.d("writeWAVEFILE", "chiamato");
while(in.read(data) != -1){
out.write(data);
}
System.out.println("Wav File size: " + out.getChannel().size());
}
else{
System.out.println("Non creo il file .wav");
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void writeWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = (byte) audioFormat; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
in wear mainifest I have
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
in handle manifest I have
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
when I run app in the logfile of wear app I have this error:
AudioRecord-JNI: Error -4 during AudioRecord native read
What should I do to solve it?
someone can help me? What's wrong? every type of help is appreciated, code or tutorial.
Thanks in advance
You need to set the audio encoder to be used for recording.
void setAudioEncoder (int audio_encoder)
If this method is not called, the output file will not contain an audio track. Call this after setOutputFormat() but before prepare().
Try to follow listed steps below:
Create a new instance of android.media.MediaRecorder.
Set the audio source using MediaRecorder.setAudioSource(). You will probably want to use MediaRecorder.AudioSource.MIC.
Set output file format using MediaRecorder.setOutputFormat().
Set output file name using MediaRecorder.setOutputFile().
Set the audio encoder using MediaRecorder.setAudioEncoder().
Call MediaRecorder.prepare() on the MediaRecorder instance.
To start audio capture, call MediaRecorder.start().
To stop audio capture, call MediaRecorder.stop().
When you are done with the MediaRecorder instance, call MediaRecorder.release() on it. Calling MediaRecorder.release() is always recommended to free the resource immediately.
Here's a sample code how to record audio and to play the recorded audio: https://developer.android.com/guide/topics/media/audio-capture.html#example

Audio Recording not smooth and continuous

Please help me to figure out that i am recording sound .
I want to record via service which produces jerks in sound while recording.I have buttons in activity from where i Start Recording and Stop Recording.
The Same code work absolutely fine if i do in activity.
The Service i am providing below
public class ServiceMusic extends Service {
private static String LOG_TAG = "BoundService";
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private static ServiceMusic self = null;
#Override
public void onCreate() {
super.onCreate();
Log.v(LOG_TAG, "in onCreate");
}
public int onStartCommand(Intent intent, int flags, int startId)
{
Log.i("onStartComman?d", "onStartCommand");
self = ServiceMusic.this;
self.bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING);
self.recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
TelephonyManager mgr=(TelephonyManager) getSystemService(TELEPHONY_SERVICE);
if (mgr!=null)
{
mgr.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
}
return START_REDELIVER_INTENT;
}
#Override
public void onRebind(Intent intent) {
Log.v(LOG_TAG, "in onRebind");
super.onRebind(intent);
}
#Override
public boolean onUnbind(Intent intent) {
Log.v(LOG_TAG, "in onUnbind");
return true;
}
#Override
public void onDestroy() {
super.onDestroy();
Log.v(LOG_TAG, "in onDestroy");
// mChronometer.stop();
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "Ravindra" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
void startRecording(final boolean b){
if (self.recorder==null) {
//Log.i("startRecording", "iffffffffffffffffffff");
self.recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
}
self.recorder.startRecording();
isRecording = true;
self.recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile(b);
}
},"AudioRecorder Thread");
self.recordingThread.start();
}
void stopRecording(boolean b){
Log.i("stopRecording", "stopRecording");
if(self.recorder != null){
isRecording = false;
self.recorder.stop();
self.recorder.release();
self.recorder = null;
self.recordingThread = null;
Log.i("stopRecording", "nulll");
}
Log.i("stopRecording", "outer");
if(b == true){
Log.i("stopRecording", "true");
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void writeAudioDataToFile(boolean b){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename,b);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if(os != null){
while(isRecording){
read = self.recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 44;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 44;
//AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
Acquire a wakelock when you start recording, and release it when you stop. Otherwise the device may go to sleep. For an example, see line 820 onwards of the android sound recorder app https://android.googlesource.com/platform/packages/apps/SoundRecorder/+/master/src/com/android/soundrecorder/SoundRecorder.java
(You may consider using a partial wake lock instead of a screen dim wake lock in the code above, if you are running in a background service)
See http://developer.android.com/reference/android/os/PowerManager.html

Android Audio Record to wav

I recorded an audio using the audio recorder on Android and it produces a raw PCM file. I'm trying to convert it to a format I can listen to (wav or mp3 for example.
I've started with this example but don't know where to go from here: Android AudioRecord example
tried following these:
http://computermusicblog.com/blog/2008/08/29/reading-and-writing-wav-files-in-java
Recording .Wav with Android AudioRecorder
Here is my code to record (note I am using Countdown Timer to tell it when to start and stop recording.
public class AudioRecordService extends Service {
Toast toast;
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord record = null;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private Thread recordingThread = null;
private boolean isRecording = false;
int buffsize = 0;
public AudioRecordService() {
}
#Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
throw new UnsupportedOperationException("Not yet implemented");
}
public int onStartCommand(Intent intent, int flags, int startId)
{
try {
buffsize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
record = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, buffsize);
record.startRecording();
CountDownTimer countDowntimer = new CountDownTimer(15000, 1000) {
public void onTick(long millisUntilFinished) {
toast = Toast.makeText(AudioRecordService.this, "Recording", Toast.LENGTH_SHORT);
toast.show();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void onFinish() {
try {
toast.cancel();
Toast.makeText(AudioRecordService.this, "Done Recording ", Toast.LENGTH_SHORT).show();
isRecording = false;
record.stop();
record.release();
record = null;
recordingThread = null;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}};
countDowntimer.start();
}
catch (Exception ex)
{
ex.printStackTrace();
}
return Service.START_STICKY;
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
try {
//String filePath = "/sdcard/voice8K16bitmono.pcm";
String extState = Environment.getExternalStorageState();
// Path to write files to
String path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC + "/test").getAbsolutePath();
String fileName = "audio.pcm";
String externalStorage = Environment.getExternalStorageDirectory().getAbsolutePath();
File file = new File(externalStorage + File.separator + fileName);
// if file doesnt exists, then create it
if (!file.exists()) {
file.createNewFile();
}
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
record.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
catch (Exception ex) {
ex.printStackTrace();
}
}
}
My audio.pcm is created. However I don't know how to play it. I'm assuming bDate[] is the byte array being written. The links I created said they used the files, but showed no examples of how it was done.
If it matters, I've used GoldWave to open the file. It opens but the audio is messed up.
I also noticed my files were 2 seconds and I think it's because of the BytesPerElement and BufferElements2Rec. If you can help me out so it's going to be 15 seconds that would be great.
Thanks in advance!
The only difference between a PCM file and a WAV file is that the PCM file has no header and the WAV file does. The WAV header has key information for playback such as sample rate, number of bits per sample and number of channels. When you load a PCM file either the app has to have prior knowledge of this information or you have to tell it. If you load a PCM file into audacity, for example, it will prompt you to fill in all of that stuff.
In order to make the existing save file a .WAV you need to prepend an appropriate header. I'm not going to go into details about it because there are already many answers on SO detailing it and it is readily available on the web (https://en.wikipedia.org/wiki/WAV)
The second issue you raise about the file length might have something to do with the fact that AudioRecord.read returns an int which is the number of samples actually read as it may be less than you asked for. This is really a second question though
This is a Sample .WAV Header format Extracted from OMRECORDER:
private byte[] wavFileHeader(long totalAudioLen, long totalDataLen, long longSampleRate,
int channels, long byteRate, byte bitsPerSample) {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (channels * (bitsPerSample / 8)); //
// block align
header[33] = 0;
header[34] = bitsPerSample; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
return header;
}
Below is Header format for .aac extracted from WhatsappAudioRecorder:
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}

Android: AudioRecord a wave, mono file

I want record sound for android app, in wave format,16bits/mono (1 channel). I used this code:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.widget.Button;
public class MainActivity extends ActionBarActivity {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
#SuppressWarnings("deprecation")
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
setButtonHandlers();
enableButtons(false);
bufferSize = AudioRecord.getMinBufferSize(8000,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
}
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart,!isRecording);
enableButton(R.id.btnStop,isRecording);
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
private void startRecording(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
int i = recorder.getState();
if(i==1)
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile(){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
if(null != os){
while(isRecording){
read = recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording(){
if(null != recorder){
isRecording = false;
int i = recorder.getState();
if(i==1)
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 1;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
// AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStart:{
// AppLog.logString("Start Recording");
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop:{
// AppLog.logString("Start Recording");
enableButtons(false);
stopRecording();
break;
}
}
}
};
but the record is always a stereo channels, 32 bit. I used AudioFormat.CHANNEL_IN_MONO and AudioFormat.ENCODING_PCM_16BIT.
Can someone help me?
According to what was discussed in the comments, if you're getting a 528 kb file from ~3 seconds of audio, then 1 of two things is happening. Either you're recording in stereo (seems to be the more likely of the two if you say you're seeing two channels in audacity and since 32bit PCM encoding isn't supported in android) or you're recording in 32 bits sample with one channel. It's strange that audacity would tell you that the file sample size is 32 bits/sample.
My reasoning for this is that
(44100 hz * 3 seconds * 32 bits/sample) = 528 kb
and
2 channels * (44100 hz * 3 seconds * 16 bits/sample) = 528 kb
Since you properly setup your audioRecorder object, I'm not sure where the second channel is coming form. I'd recommend manipulating the values in the byteRate long in the copyWaveFile method and seeing the effects that has on your audio files.
Seems like your header contains an error:
header[32] = (byte) (2 * 16 / 8); // block align
it should be
header[32] = (byte) (1 * 16 / 8); // block align
this should be channels*RECORDER_BPP/8
I've tried it and it seems to work.

Android concat 2 audio (wav or mp3) files

I must concat 2 or more audio (wav or mp3) files and get one audio file.
How can I do that in android?
Thanks in advance.
The below code section will work as how many selections have been made.Selection means how much files are selected.
Put your two wav files in /sdcard0/SSoftAudioFiles/ as 1.wav and 2.wav.
And Just execute this method.
public void SSoftAudCombine()
{
try {
String[] selection=new String[2];
selection[0]="1.wav";
selection[1]="2.wav";
DataOutputStream amplifyOutputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(Environment.getExternalStorageDirectory() + "/SSoftAudioFiles/" + year +"-"+ month +"-"+ date +"-"+ hour+"-" + min +"-"+ sec+"ME.wav")));
DataInputStream[] mergeFilesStream = new DataInputStream[selection.length];
long[] sizes=new long[selection.length];
for(int i=0; i<selection.length; i++) {
File file = new File(Environment.getExternalStorageDirectory() + "/SSoftAudioFiles/" +selection[i]);
sizes[i] = (file.length()-44)/2;
}
for(int i =0; i<selection.length; i++) {
mergeFilesStream[i] =new DataInputStream(new BufferedInputStream(new FileInputStream(Environment.getExternalStorageDirectory() + "/SSoftAudioFiles/" +selection[i])));
if(i == selection.length-1) {
mergeFilesStream[i].skip(24);
byte[] sampleRt = new byte[4];
mergeFilesStream[i].read(sampleRt);
ByteBuffer bbInt = ByteBuffer.wrap(sampleRt).order(ByteOrder.LITTLE_ENDIAN);
RECORDER_SAMPLERATE = bbInt.getInt();
mergeFilesStream[i].skip(16);
}
else {
mergeFilesStream[i].skip(44);
}
}
for(int b=0; b<selection.length; b++) {
for(int i=0; i<(int)sizes[b]; i++) {
byte[] dataBytes = new byte[2];
try {
dataBytes[0] = mergeFilesStream[b].readByte();
dataBytes[1] = mergeFilesStream[b].readByte();
}
catch (EOFException e) {
amplifyOutputStream.close();
}
short dataInShort = ByteBuffer.wrap(dataBytes).order(ByteOrder.LITTLE_ENDIAN).getShort();
float dataInFloat= (float) dataInShort/37268.0f;
short outputSample = (short)(dataInFloat * 37268.0f);
byte[] dataFin = new byte[2];
dataFin[0] = (byte) (outputSample & 0xff);
dataFin[1] = (byte)((outputSample >> 8) & 0xff);
amplifyOutputStream.write(dataFin, 0 , 2);
}
}
amplifyOutputStream.close();
for(int i=0; i<selection.length; i++) {
mergeFilesStream[i].close();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
long size =0;
try {
FileInputStream fileSize = new FileInputStream(Environment.getExternalStorageDirectory() + "/SSoftAudioFiles/"+year +"-"+ month +"-"+ date +"-"+ hour+"-" + min +"-"+ sec+"ME.wav");
size = fileSize.getChannel().size();
fileSize.close();
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
final int RECORDER_BPP = 16;
long datasize=size+36;
long byteRate = (RECORDER_BPP * RECORDER_SAMPLERATE)/8;
long longSampleRate = RECORDER_SAMPLERATE;
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (datasize & 0xff);
header[5] = (byte) ((datasize >> 8) & 0xff);
header[6] = (byte) ((datasize >> 16) & 0xff);
header[7] = (byte) ((datasize >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) 1;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) ((RECORDER_BPP) / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (size & 0xff);
header[41] = (byte) ((size >> 8) & 0xff);
header[42] = (byte) ((size >> 16) & 0xff);
header[43] = (byte) ((size >> 24) & 0xff);
// out.write(header, 0, 44);
try {
RandomAccessFile rFile = new RandomAccessFile(Environment.getExternalStorageDirectory() + "/SSoftAudioFiles/" +year +"-"+ month +"-"+ date +"-"+ hour+"-" + min +"-"+ sec+ "ME.wav", "rw");
rFile.seek(0);
rFile.write(header);
rFile.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
It will concate your audio file and save a combined file in /sdcard0/SSoftAudioFiles/...ME.wav.
The below code will surely help you to concatinating two audio files.
try {
FileInputStream fis1 = new FileInputStream("/sdcard/MJdangerous.wav");
FileInputStream fis2 = new FileInputStream("/sdcard/MJBad.wav");
SequenceInputStream sis = new SequenceInputStream(fis1,fis2);
FileOutputStream fos = new FileOutputStream(new File("/sdcard/MJdangerousMJBad.wav"));
int temp;
try {
while ((temp = sis.read())!= -1){
fos.write(temp);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

Categories

Resources