Am developing an android app that has the feature to record the user speech. For this I have used the AndroidRecord Audio API.
Currently the pcm file(recorded audio file - recordedAudio.pcm) getting generated successfully in the sd card. But am not able to play that file. I tried in PC also with windows media palyer and some other players. But nothing helps.
Following are my code snippet.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean status;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
status = true;
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
String filePath = Environment.getExternalStorageDirectory()
.getPath() + "/audioRecord.pcm";
FileOutputStream fileOutputStreamObj = null;
try {
fileOutputStreamObj = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// short[] sData = new short[minBufSize];
byte[] buffer = new byte[minBufSize];
// recorder = findAudioRecord();
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
try {
// writes the data to file from buffer
// stores the voice buffer
// byte bData[] = short2byte(sData);
fileOutputStreamObj.write(buffer, 0, buffer.length);
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
Please help me on this. Thanks in advance.
You don't have to convert it into WAV and Play.
AudioTrack can directly play the recorded Audio.
Following is a Code snippet to Record audio into a file using AudioRecord and playback the same using AudioTrack API.
The operation is controlled from User using Buttons.
Code
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
The Audio Configuration can change as per device.
Refer to this question.
GUI has three buttons, Record, Stop and Play
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setButtonHandlers();
/* Set Button Visibility */
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
enableButton(R.id.btnStartPlay,false);
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}
/* Function to Enable/Disable Buttons */
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
/* Assign OnClickListener to Buttons */
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStartRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStopRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStartPlay)).setOnClickListener(btnClick);
}
Handling Button click:
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStartRec:{
Log.d(TAG, "Start Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,true);
startRecording();
break;
}
case R.id.btnStopRec:{
Log.d(TAG, "Stop Recording");
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
stopRecording();
enableButton(R.id.btnStartPlay,true);
break;
}
case R.id.btnStartPlay:{
Log.d(TAG, "Play Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,false);
StartPlaying();
break;
}
}
}
};
Code for Start Recording
private void startRecording()
{
record = new AudioRecord(AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filename = "/sdcard/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Code for Stop Recording
private void stopRecording()
{
if(null != record)
{
isRecording = false;
if (AudioRecord.STATE_INITIALIZED == record.getState())
{
record.stop();
record.release();
Log.d(TAG, "===== Recording Audio Completed ===== ");
}
record = null;
recordingThread = null;
}
}
Code for Playing the Audio file:
public void startPlaying()
{
enableButton(R.id.btnStartPlay,false);
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int i = 0;
byte[] temp = new byte[minBufferSize];
try {
FileInputStream fin = new FileInputStream("/sdcard/audiofile.pcm");
Log.d(TAG, "===== Opening File for Playing : /sdcard/audiofile.pcm ===== ");
DataInputStream dis = new DataInputStream(fin);
track.play();
while((i = dis.read(temp, 0, minBufferSize)) > -1)
{
track.write(temp, 0, i);
}
Log.d(TAG, "===== Playing Audio Completed ===== ");
track.stop();
track.release();
dis.close();
fin.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
enableButton(R.id.btnStartRec,true);
}
Please include the following in AndroidManifest.xml
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" > </uses-permission>
<uses-permission android:name="android.permission.RECORD_AUDIO" > </uses-permission>
The activity_main.xml looks like this.
The string.xml looks like this.
The above code is working and tested.
You can also do the same, without a file and using a intermediate buffer.
See: Audio Recording and Streaming in Android
Yes finally I found the answer with the clue of Michael's Comment above.
Am posting here the working code.
The Client Side Code as Follow's,
From the client side am streaming the audio data to the web socket server.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
byte[] buffer = new byte[minBufSize];
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
The Server Side Code added implementation as follows,
First the server will create the .pcm from the streamed data. Then from that pcm file it will create the wave file by adding header.
#OnMessage
public void onMessage(byte[] data, boolean arg1)
{
if ((!this.currentCommand.equals("stop")) &&
(this.currentCommand.equals("start")))
try {
System.out.println("Starting new recording.");
FileOutputStream fOut = new FileOutputStream(this.f2, true);
fOut.write(data);
fOut.close();
properWAV(this.f2, 111133.0F);
}
catch (Exception e) {
e.printStackTrace();
}
}
private void properWAV(File fileToConvert, float newRecordingID)
{
try {
long mySubChunk1Size = 16L;
int myBitsPerSample = 16;
int myFormat = 1;
long myChannels = 1L;
long mySampleRate = 44100L;
long myByteRate = mySampleRate * myChannels * myBitsPerSample / 8L;
int myBlockAlign = (int)(myChannels * myBitsPerSample / 8L);
byte[] clipData = getBytesFromFile(fileToConvert);
long myDataSize = clipData.length;
long myChunk2Size = myDataSize * myChannels * myBitsPerSample / 8L;
long myChunkSize = 36L + myChunk2Size;
OutputStream os = new FileOutputStream(new File("D:/audio/" + newRecordingID + ".wav"));
BufferedOutputStream bos = new BufferedOutputStream(os);
DataOutputStream outFile = new DataOutputStream(bos);
outFile.writeBytes("RIFF");
outFile.write(intToByteArray((int)myChunkSize), 0, 4);
outFile.writeBytes("WAVE");
outFile.writeBytes("fmt ");
outFile.write(intToByteArray((int)mySubChunk1Size), 0, 4);
outFile.write(shortToByteArray((short)myFormat), 0, 2);
outFile.write(shortToByteArray((short)(int)myChannels), 0, 2);
outFile.write(intToByteArray((int)mySampleRate), 0, 4);
outFile.write(intToByteArray((int)myByteRate), 0, 4);
outFile.write(shortToByteArray((short)myBlockAlign), 0, 2);
outFile.write(shortToByteArray((short)myBitsPerSample), 0, 2);
outFile.writeBytes("data");
outFile.write(intToByteArray((int)myDataSize), 0, 4);
outFile.write(clipData);
outFile.flush();
outFile.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
private static byte[] intToByteArray(int i)
{
byte[] b = new byte[4];
b[0] = (byte)(i & 0xFF);
b[1] = (byte)(i >> 8 & 0xFF);
b[2] = (byte)(i >> 16 & 0xFF);
b[3] = (byte)(i >> 24 & 0xFF);
return b;
}
public static byte[] shortToByteArray(short data)
{
return new byte[] { (byte)(data & 0xFF), (byte)(data >>> 8 & 0xFF) };
}
public byte[] getBytesFromFile(File file)
throws IOException
{
byte[] buffer = new byte[(int)file.length()];
InputStream ios = null;
try {
ios = new FileInputStream(file);
if (ios.read(buffer) == -1)
throw new IOException("EOF reached while trying to read the whole file");
}
finally {
try {
if (ios != null)
ios.close();
}
catch (IOException localIOException)
{
}
}
try
{
if (ios != null)
ios.close();
}
catch (IOException localIOException1)
{
}
return buffer;
}
Hope this one saves many of the developer's time.
Related
I am using Android Audio Record for my application. But the file created using this is not playable. I tried with all possible format and sampling rate but none of them is playable.
Following is the code:
I tried with all possible output streams(DataOutputSteam, BufferedOutputStream). All possible formats(.3gp, .amr, .wav) and sampling rates(8000,44100 etc).
private static final int RECORDER_CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int SAMPLE_RATE = 44100;
private boolean isRecordingP = false;
int minBufferSizeInBytes;
private void startRecordingP(String fileName) {
Debug.d(TAG,"||| startRecordingP |||");
minBufferSizeInBytes = AudioRecord.getMinBufferSize( SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT );
Debug.d(TAG,"getMinBufferSize size result = "+minBufferSizeInBytes);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_COMMUNICATION,
SAMPLE_RATE, RECORDER_CHANNEL ,
RECORDER_AUDIO_ENCODING, minBufferSizeInBytes * 2);
audioRecord.startRecording();
isRecordingP = true;
mServiceHandler.postDelayed(new Runnable() {
#Override
public void run() {
writeAudioDataToFile(fileName);
}
},0);
}
private void stopRecordingP() {
// stops the recording activity
Debug.d(TAG,"||| stopRecordingP |||");
isRecordingP = false;
if (null != audioRecord) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
}
}
private void writeAudioDataToFile(String filePath) {
String recordingPathFolder = RecordingUtil.getRecordingPath();
File dir = new File(recordingPathFolder);
if (!dir.exists()) {
dir.mkdirs();
}
File file = null;
try {
file = File.createTempFile("call_" + filePath + "_", ".wav", dir);
} catch (IOException e) {
e.printStackTrace();
}
int recBufferByteSize = minBufferSizeInBytes * 2;
byte[] recBuffer = new byte[recBufferByteSize];
FileOutputStream os = null;
try {
os = new FileOutputStream(file.getAbsolutePath());
} catch (FileNotFoundException e) {
e.printStackTrace();
}
DataOutputStream dos = new DataOutputStream(os);
while (isRecordingP) {
Debug.d(TAG, "recording state "+audioRecord.getRecordingState()+" format "+audioRecord.getAudioFormat()
+" channel count "+audioRecord.getChannelCount()+ " sample rate = "+audioRecord.getSampleRate());
int bytesRecorded = audioRecord.read(recBuffer, 0, minBufferSizeInBytes);
if (bytesRecorded == AudioRecord.ERROR_INVALID_OPERATION || bytesRecorded == AudioRecord.ERROR_BAD_VALUE) {
Debug.d(TAG, "error "+bytesRecorded);
continue;
}
Debug.d(TAG,"writing to file "+bytesRecorded);
try {
if (bytesRecorded != 0 && bytesRecorded != -1) {
dos.write(recBuffer, 0, bytesRecorded);
} else {
break;
}
} catch (IOException e) {
e.printStackTrace();
}
}
Debug.d(TAG, "saved at : "+file.getAbsolutePath());
try {
os.flush();
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
The file is not playable.
I am getting PCM streams through ethernet port. So far, I am able to capture the packets and takeout the pcm_payload data from them.
How to play this raw PCM data in android? The PCM data is 16-bit 2 channel, 44.1kHZ rate stream.
I am both new to android application programming and audio programming. Sorry if this is a trivial question.
You can use AudioTrack to play PCM data!
Maybe like this:
int bufsize = AudioTrack.getMinBufferSize(44100,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack audio = new AudioTrack(AudioManager.STREAM_MUSIC,
44100, //sample rate
AudioFormat.CHANNEL_OUT_STEREO, //2 channel
AudioFormat.ENCODING_PCM_16BIT, // 16-bit
bufsize,
AudioTrack.MODE_STREAM );
audio.play()
then invoke audio.write() to write your PCM data.
it's my sollution. write stream to file and play it
public class AudioTrackPlayer {
private String pathAudio;
private AudioTrack audioPlayer;
private Thread mThread;
private int bytesread = 0, ret = 0;
private int size;
private FileInputStream in = null;
private byte[] byteData = null;
private int count = 512 * 1024; // 512 kb
private boolean isPlay = true;
private boolean isLooping = false;
private static Handler mHandler;
public AudioTrackPlayer() {
}
public void prepare(String pathAudio){
this.pathAudio = pathAudio;
mHandler = new Handler();
}
public void play(){
stop();
isPlay = true;
bytesread = 0;
ret = 0;
if (pathAudio == null)
return;
audioPlayer = createAudioPlayer();
if (audioPlayer == null) return;
audioPlayer.play();
mThread = new Thread(new PlayerProcess());
mThread.start();
}
private final Runnable mLopingRunnable = new Runnable() {
#Override
public void run() {
play();
}
};
private AudioTrack createAudioPlayer(){
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (audioTrack == null) {
Log.d("TCAudio", "audio track is not initialised ");
return null;
}
File file = null;
file = new File(pathAudio);
byteData = new byte[(int) count];
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
size = (int) file.length();
return audioTrack;
}
private class PlayerProcess implements Runnable{
#Override
public void run() {
while (bytesread < size && isPlay) {
if (Thread.currentThread().isInterrupted()) {
break;
}
try {
ret = in.read(byteData, 0, count);
} catch (IOException e) {
e.printStackTrace();
}
if (ret != -1) { // Write the byte array to the track
audioPlayer.write(byteData,0, ret);
bytesread += ret;
} else break;
}
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
if (audioPlayer!=null){
if (audioPlayer.getState()!=AudioTrack.PLAYSTATE_STOPPED){
audioPlayer.stop();
audioPlayer.release();
mThread = null;
}
}
if (isLooping && isPlay ) mHandler.postDelayed(mLopingRunnable,100);
}
}
public void setLooping(){
isLooping = !isLooping;
}
public void pause(){
}
public void stop(){
isPlay = false;
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (audioPlayer != null) {
audioPlayer.stop();
audioPlayer.release();
audioPlayer = null;
}
}
public void reset(){
}
}
I am developing an Android app. In my app I need to record audio using AudionRecord and need to play it. But throwing error when play the recorded pcm file. The main problem is I cannot log know the error as well because I have to test on real device and cannot run on emulator. Because when I run on emulator and record, my application crash.
This is my activity
public class MainActivity extends AppCompatActivity {
Boolean recording;
private Thread recordingThread = null;
DataOutputStream dataOutputStream;
short[] audioData;
private Button btnPlay,btnStop,btnRecord;
private String outputFile = null;
//outputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/recording.3gp";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initialize();
initViews();
setUpViews();
}
private void initialize()
{
}
private void initViews()
{
btnPlay = (Button)findViewById(R.id.btn_play);
btnRecord = (Button)findViewById(R.id.btn_record);
btnStop = (Button)findViewById(R.id.btn_stop);
}
private void setUpViews()
{
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startRecord();
}
});
btnStop.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
recording = false;
Toast.makeText(getBaseContext(),"Stopped",Toast.LENGTH_SHORT).show();
}
});
btnPlay.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
playRecord();
}
});
}
private void startRecord(){
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
try {
file.createNewFile();
OutputStream outputStream = new FileOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream);
dataOutputStream = new DataOutputStream(bufferedOutputStream);
final int minBufferSize = AudioRecord.getMinBufferSize(11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioData = new short[minBufferSize];
final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize);
audioRecord.startRecording();
Toast.makeText(getBaseContext(),"Recording",Toast.LENGTH_SHORT).show();
recordingThread = new Thread(new Runnable() {
public void run() {
while(recording){
int numberOfShort = audioRecord.read(audioData, 0, minBufferSize);
for(int i = 0; i < numberOfShort; i++){
try{
dataOutputStream.writeShort(audioData[i]);
}
catch (IOException e)
{
Toast.makeText(getBaseContext(),e.getMessage(),Toast.LENGTH_SHORT).show();
}
}
}
}
}, "AudioRecorder Thread");
audioRecord.stop();
dataOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void playRecord(){
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
int shortSizeInBytes = Short.SIZE/Byte.SIZE;
int bufferSizeInBytes = (int)(file.length()/shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while(dataInputStream.available() > 0){
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
AudioTrack audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes,
AudioTrack.MODE_STREAM);
audioTrack.play();
audioTrack.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
The problem is with playing the recorded pcm. When I click play button, application throws fatal error and stopped. I cannot log the error as well. The pcm file really exists at the defined path as well. What is wrong with my code? Why can it not play?
As you can see below, pcm file created successfully after recording
Without seeing the Logcat, it is hard to figure the issue in your code. Take a look at this working code on 'How to play pcm files using AudioTrack':
private void PlayAudioFileViaAudioTrack(String filePath) throws IOException
{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (at==null){
Log.d("TCAudio", "audio track is not initialised ");
return;
}
int count = 512 * 1024; // 512 kb
//Reading the file..
byte[] byteData = null;
File file = null;
file = new File(filePath);
byteData = new byte[(int)count];
FileInputStream in = null;
try {
in = new FileInputStream( file );
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int bytesread = 0, ret = 0;
int size = (int) file.length();
at.play();
while (bytesread < size) {
ret = in.read( byteData,0, count);
if (ret != -1) { // Write the byte array to the track
at.write(byteData,0, ret);
bytesread += ret;
}
else break;
}
in.close();
at.stop();
at.release();
}
code from http://jongladwin.blogspot.co.uk/2010/03/android-play-pcmwav-audio-buffer-using.html
I have recorded user's voice successfully using mediaplayer and the file is stored in sd card.
now i want to play that voice using audio track. But when I do it is making noise.
Y so.?
Here is the code to play sound..
private void PlayAudioFileViaAudioTrack(String filePath) throws IOException
{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (at==null){
Log.d("TCAudio", "audio track is not initialised ");
return;
}
int count = 512 * 1024; // 512 kb
//Reading the file..
byte[] byteData = null;
File file = null;
file = new File(filePath);
byteData = new byte[(int)count];
FileInputStream in = null;
try {
in = new FileInputStream( file );
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int bytesread = 0, ret = 0;
int size = (int) file.length();
at.play();
while (bytesread < size) { ret = in.read( byteData,0, count); if (ret != -1) { // Write the byte array to the track
at.write(byteData,0, ret); bytesread += ret; } else break; } in.close(); at.stop(); at.release(); }
Noise could be due to a small buffer.
Try:
int intSize = android.media.AudioTrack.getMinBufferSize(44100,
AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
init *= 2;
If you work with exactly the minimum buffer size, it might result in noisy sound. Having twice the minimum size is a good practice (in my experience).
I have just resolve this by adding buffer size and check this code this will resolve your problem.
void playRecord(int position) {
if (position==0){
m=8000;
String folder_main = "MyVoiceChanger";
File filee = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + folder_main + "/Recording.mp3");
int shortSizeInBytes = Short.SIZE / Byte.SIZE;
int bufferSizeInBytes = (int) (filee.length() / shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(filee);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while (dataInputStream.available() > 0) {
try {
audioData[i] = dataInputStream.readShort();
i++;
}catch (EOFException e){
e.printStackTrace();
}
}
dataInputStream.close();
try {
bufferSizeInBytes = AudioTrack.getMinBufferSize(
m,
RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING
);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,44100, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
Log.i("Buffer", "playRecord: "+bufferSizeInBytes);
final int finalBufferSizeInBytes = bufferSizeInBytes;
mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
audioTrack = new AudioTrack(3, m, 2, 2, finalBufferSizeInBytes, 1);
try {
audioTrack.play();
Log.i("Usman", "Audio "+audioTrack);
}catch (Exception e){
e.printStackTrace();
}
}
});
}catch (Exception e){
e.printStackTrace();
}
mediaPlayer.prepareAsync();
audioTrack.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
I am trying to record and play the audio at a time on android. But the recorded audio has more noise like zzzzz....
I want to filter the noise from the audio. My code is
private void record() {
// Get the minimum buffer size required for the successful creation
// of an AudioRecord object.
int N = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, N * 10,
AudioTrack.MODE_STREAM);
AudioRecord audioRecorder = null;
int bufferSizeInShorts;
int shortsRead;
short audioBuffer[];
try {
bufferSizeInShorts = (N / 2);
// Initialize Audio Recorder.
audioRecorder = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION, RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, N * 10);
NoiseSuppressor.create(audioRecorder.getAudioSessionId());
// Start Recording.
audioBuffer = new short[bufferSizeInShorts];
audioRecorder.startRecording();
isRecording = true;
audioTrack.play();
while (isRecording) {
shortsRead = audioRecorder.read(audioBuffer, 0, bufferSizeInShorts);
if (shortsRead == AudioRecord.ERROR_BAD_VALUE || shortsRead == AudioRecord.ERROR_INVALID_OPERATION) {
Log.e("record()", "Error reading from microphone.");
isRecording = false;
break;
}
audioTrack.write(audioBuffer, 0, audioBuffer.length);
}
} finally {
if (audioRecorder != null) {
audioRecorder.stop();
audioRecorder.release();
}
if (audioTrack != null) {
audioTrack.stop();
audioTrack.release();
}
}
}
How can I filter the background noise, so that I can hear only voices.
For clear and good quality voice
Try using *44100 or 16000 sample rate *
Note:- 44100 sample rate may not work on Amulator.
and also make sure you have correct header format
Ignore variables which are not related to this
Using AudioRecord class to record
public class Mediarec extends Activity {
public static final int SAMPLE_RATE = 44100;
public static int count=0;
private AudioRecord mRecorder;
private File mRecording;
private byte[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
OnGainSelected gs;
SharedPreferences sp;
String Shared = "Shared";
String stored_gain;
AudioManager am;
protected int bitsPerSamples = 16;
private Button show_gain;
Switch bluetooth;
Button button;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_mediarec);
sp = getSharedPreferences(Shared, Context.MODE_PRIVATE);
button = (Button) findViewById(R.id.start);
bluetooth = (Switch) findViewById(R.id.switch1);
initRecorder();
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
Log.d("Normal()","Recordng frm Normal MIC");
Normal();
}
});
}
protected void Normal() {
// TODO Auto-generated method stub
if (!mIsRecording ) {
button.setText(stopRecordingLabel);
mIsRecording = true;
Log.d("Normal","Rec Started");
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(Mediarec.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(Mediarec.this, "Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new byte[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
for (int i = 0; i < readSize; i++) {
output.write(mBuffer[i]);
}
}
} catch (IOException e) {
Toast.makeText(Mediarec.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(Mediarec.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(Mediarec.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (byte) 1); // audio format (1 = PCM)
writeShort(output, (byte) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (byte) 2); // block align
writeShort(output, (byte) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
output.write(rawData);
} catch (Exception e) {
Toast.makeText(getApplicationContext(), "" + e, Toast.LENGTH_LONG)
.show();
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}
This kind of noise come from you or your phone acting like antenna for the electromagnetic waves coming from the wiring in your house, they have a frequency from 50-60Hz, you can filter them using high pass digital filter ( it is just an equation that you can apply to your signal before playing it back)