Sorry for my bad english :(
I start my project for android application, this app record microphone, if I click to start record button the app get microphone and write it on a file and when I click to stop , a file saved into SD card.
project code :
The output file
OUTPUT_FILE = Environment.getExternalStorageState() + "/myaudio.3gp";
Start recording
public void startRecord() throws IOException{
if (recorder != null)
{
recorder.release();
}
File outFile = new File(OUTPUT_FILE);
if (outFile.exists())
{
outFile.delete();
}
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setOutputFormat(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(OUTPUT_FILE);
recorder.prepare();
recorder.start();
}
Stop recording
public void stopRec(){
recorder.stop();
}
PlaySound recorded file
public void playRecFile() throws IOException{
mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(OUTPUT_FILE);
mediaPlayer.prepare();
mediaPlayer.start();
}
I want to get recorded voice and put it into a variable ByteArray and play it whitout saving the audio file to SD card
I have a project like what I want but it is written in actionscript 3
import flash.media.*;
import flash.events.*;
import flash.utils.ByteArray;
var ch:SoundChannel
var mic:Microphone = Microphone.getMicrophone();
mic.addEventListener(SampleDataEvent.SAMPLE_DATA, micSampleDataHandler);
mic.addEventListener(ActivityEvent.ACTIVITY,onAct);
function onAct(evt:ActivityEvent):void
{
trace(evt.activating,mic.activityLevel);
if (!evt.activating)
{
if (soundBytes.length)
{
timerHandler();
}
}
}
var soundBytes:ByteArray = new ByteArray();
var soundO:ByteArray = new ByteArray();
function micSampleDataHandler(event:SampleDataEvent):void
{
trace(event.data.length,event.data.bytesAvailable, soundBytes.length);
while (event.data.bytesAvailable)
{
var sample:Number = event.data.readFloat();
soundBytes.writeFloat(sample);
}
}
function timerHandler():void
{
mic.removeEventListener(SampleDataEvent.SAMPLE_DATA, micSampleDataHandler);
soundBytes.position = 0;
soundO.writeBytes(soundBytes);
soundO.position = 0;
soundBytes.position = 0;
soundBytes.length=0;
var sound:Sound= new Sound();
sound.addEventListener(SampleDataEvent.SAMPLE_DATA, playbackSampleHandler);
ch=sound.play();
ch.addEventListener(Event.SOUND_COMPLETE,onSC)
trace("OUTPUT",soundO.bytesAvailable);
}
function onSC(evt:Event):void
{
trace("SOUND_COMPLETE");
}
function playbackSampleHandler(event:SampleDataEvent):void
{
trace("SAMPLE_DATA: ",soundO.bytesAvailable)
for (var i:int = 0; i < 8192; i++)
{
if (soundO.bytesAvailable < 4)
{
break;
}
var sample:Number = soundO.readFloat();
event.data.writeFloat(sample);
event.data.writeFloat(sample);
}
if (soundO.bytesAvailable < 4 && soundO.position!==0)
{
mic.addEventListener(SampleDataEvent.SAMPLE_DATA, micSampleDataHandler);
soundO.position=0
soundO.length = 0;
trace("END
}
}
Check Out this answer! It works exactly with MediaRecorder not AudioRecord
Try to use the following solution to record audio to byte array using MediaRecorder:
// Byte array for audio record
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ParcelFileDescriptor[] descriptors = ParcelFileDescriptor.createPipe();
ParcelFileDescriptor parcelRead = new ParcelFileDescriptor(descriptors[0]);
ParcelFileDescriptor parcelWrite = new ParcelFileDescriptor(descriptors[1]);
InputStream inputStream = new ParcelFileDescriptor.AutoCloseInputStream(parcelRead);
MediaRecorder recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(parcelWrite.getFileDescriptor());
recorder.prepare();
recorder.start();
int read;
byte[] data = new byte[16384];
while ((read = inputStream.read(data, 0, data.length)) != -1) {
byteArrayOutputStream.write(data, 0, read);
}
byteArrayOutputStream.flush();
I wrap this code in AsyncTask for start execution.
Also, don't forgot to run the following code to stop recording:
recorder.stop();
recorder.reset();
recorder.release();
To convert byteArrayOutputStream to byte[] use byteArrayOutputStream.toByteArray()
Use the following class to get the Recorded Mic data As the Byte Array. You will get the data as buffer.. try to use that.. hope this will helps you ..
class AudioRecordThread implements Runnable {
#Override
public void run() {
int bufferLength = 0;
int bufferSize;
short[] audioData;
int bufferReadResult;
try {
bufferSize = AudioRecord.getMinBufferSize(sampleAudioBitRate,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize <= 2048) {
bufferLength = 2048;
} else if (bufferSize <= 4096) {
bufferLength = 4096;
}
/* set audio recorder parameters, and start recording */
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioBitRate,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferLength);
audioData = new short[bufferLength];
audioRecord.startRecording();
Log.d(LOG_TAG, "audioRecord.startRecording()");
isAudioRecording = true;
/* ffmpeg_audio encoding loop */
while (isAudioRecording) {
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult == 1024 && isRecorderStart) {
Buffer realAudioData1024 = ShortBuffer.wrap(audioData,0,1024);
***********************************
recorder.record(realAudioData1024);
***********************************
} else if (bufferReadResult == 2048 && isRecorderStart) {
Buffer realAudioData2048_1=ShortBuffer.wrap(audioData, 0, 1024);
Buffer realAudioData2048_2=ShortBuffer.wrap(audioData, 1024, 1024);
for (int i = 0; i < 2; i++) {
if (i == 0) {
***********************************
recorder.record(realAudioData2048_1);
***********************************
} else if (i == 1) {
***********************************
recorder.record(realAudioData2048_2);
***********************************
}
}
}
}
/* encoding finish, release recorder */
if (audioRecord != null) {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
audioRecord = null;
}
if (recorder != null && isRecorderStart) {
try {
recorder.stop();
recorder.release();
} catch (Exception e) {
e.printStackTrace();
}
recorder = null;
}
} catch (Exception e) {
Log.e(LOG_TAG, "get audio data failed:"+e.getMessage()+e.getCause()+e.toString());
}
}
}
I want to get recorded voice and put it into a variable ByteArray and play it whitout saving the audio file to SD card
Use the AudioRecord class to grab audio from the mic into an array and then feed it into an AudioTrack.
Related
I'm making a very simple music app, and I try to figure out how to do this:
download an audio file and record the file locally
at any moment, extract the audio frames from the file (during or after the download)
1) For the downloading part, I use Retrofit following this example. To make it short, it allows me to download a file, and recording it locally while it's downloading (so I don't have to wait for the end of the download to access the data of the file).
2) For the frame extracting part, I use MediaExtractor and MediaCodec like this:
MediaCodec codec;
MediaExtractor extractor;
MediaFormat format;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
Boolean sawInputEOS = false;
Boolean sawOutputEOS = false;
AudioTrack mAudioTrack;
MediaCodec.BufferInfo info;
File outputFile = null;
FileDescriptor fileDescriptor = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
...
// the file being downloaded:
outputFile = new File(directory, "test.mp3");
try {
FileInputStream fileInputStream = new FileInputStream(outputFile);
fileDescriptor = fileInputStream.getFD();
}
catch (Exception e) {}
}
// Called once when enough data to extract.
private void onAudioFileReady() {
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
// thread :
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
// audio :
extractor = new MediaExtractor();
// the extractor only extracts the already downloaded part of the file:
try {
// extractor.setDataSource(url);
// extractor.setDataSource(outputFile.getAbsolutePath());
// extractor.setDataSource(MainActivity.this, Uri.parse(outputFile.getAbsolutePath()), null);
extractor.setDataSource(fileDescriptor);
}
catch (IOException e) {}
format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
try {
codec = MediaCodec.createDecoderByType(mime);
}
catch (IOException e) {}
codec.configure(format, null, null, 0);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
int minBufferSize = AudioTrack.getMinBufferSize(
sampleRate,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize,
AudioTrack.MODE_STREAM
);
info = new MediaCodec.BufferInfo();
mAudioTrack.play();
do {
input();
output();
}
while (!sawInputEOS);
}
});
thread.start();
}
private void input() {
int inputBufferIndex = codec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer byteBuffer = codecInputBuffers[inputBufferIndex];
int sampleSize = extractor.readSampleData(byteBuffer, 0);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.w(LOG_TAG, "Saw input end of stream!");
sampleSize = 0;
}
else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufferIndex,
0,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
// doesn't seem to work:
extractor.advance();
}
}
private void output() {
final int res = codec.dequeueOutputBuffer(info, -1);
if (res >= 0) {
ByteBuffer buf = codecOutputBuffers[res];
final byte[] chunk = new byte[info.size];
buf.get(chunk);
buf.clear();
if (chunk.length > 0) {
mAudioTrack.write(chunk, 0, chunk.length);
}
codec.releaseOutputBuffer(res, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
}
else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
What it does:
When onAudioFileReady() is called, this code extracts and plays the audio samples of the file, but only the ones that have already been downloaded. When it reaches the end of the already downloaded part, the MediaExtractor stops (it looks like extractor.advance() doesn't want to continue the extraction...), even if there is more data available...
What I want to achieve:
I want to be able to continue the extraction of the audio samples of the file, as long as there is enough data for it of course.
IMPORTANT:
At that point, you may ask why I don't just use extractor.setDataSource(url). Here are the reasons why:
I want to save the audio file locally, so I can play it later
I want to be able to play the song, even long after the beginning of the download
Does anyone know how to achieve that? Thanks in advance for your help.
I am developing an Android app. In my app I need to record audio using AudionRecord and need to play it. But throwing error when play the recorded pcm file. The main problem is I cannot log know the error as well because I have to test on real device and cannot run on emulator. Because when I run on emulator and record, my application crash.
This is my activity
public class MainActivity extends AppCompatActivity {
Boolean recording;
private Thread recordingThread = null;
DataOutputStream dataOutputStream;
short[] audioData;
private Button btnPlay,btnStop,btnRecord;
private String outputFile = null;
//outputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/recording.3gp";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initialize();
initViews();
setUpViews();
}
private void initialize()
{
}
private void initViews()
{
btnPlay = (Button)findViewById(R.id.btn_play);
btnRecord = (Button)findViewById(R.id.btn_record);
btnStop = (Button)findViewById(R.id.btn_stop);
}
private void setUpViews()
{
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
startRecord();
}
});
btnStop.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
recording = false;
Toast.makeText(getBaseContext(),"Stopped",Toast.LENGTH_SHORT).show();
}
});
btnPlay.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
playRecord();
}
});
}
private void startRecord(){
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
try {
file.createNewFile();
OutputStream outputStream = new FileOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream);
dataOutputStream = new DataOutputStream(bufferedOutputStream);
final int minBufferSize = AudioRecord.getMinBufferSize(11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioData = new short[minBufferSize];
final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize);
audioRecord.startRecording();
Toast.makeText(getBaseContext(),"Recording",Toast.LENGTH_SHORT).show();
recordingThread = new Thread(new Runnable() {
public void run() {
while(recording){
int numberOfShort = audioRecord.read(audioData, 0, minBufferSize);
for(int i = 0; i < numberOfShort; i++){
try{
dataOutputStream.writeShort(audioData[i]);
}
catch (IOException e)
{
Toast.makeText(getBaseContext(),e.getMessage(),Toast.LENGTH_SHORT).show();
}
}
}
}
}, "AudioRecorder Thread");
audioRecord.stop();
dataOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void playRecord(){
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
int shortSizeInBytes = Short.SIZE/Byte.SIZE;
int bufferSizeInBytes = (int)(file.length()/shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while(dataInputStream.available() > 0){
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
AudioTrack audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes,
AudioTrack.MODE_STREAM);
audioTrack.play();
audioTrack.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
The problem is with playing the recorded pcm. When I click play button, application throws fatal error and stopped. I cannot log the error as well. The pcm file really exists at the defined path as well. What is wrong with my code? Why can it not play?
As you can see below, pcm file created successfully after recording
Without seeing the Logcat, it is hard to figure the issue in your code. Take a look at this working code on 'How to play pcm files using AudioTrack':
private void PlayAudioFileViaAudioTrack(String filePath) throws IOException
{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (at==null){
Log.d("TCAudio", "audio track is not initialised ");
return;
}
int count = 512 * 1024; // 512 kb
//Reading the file..
byte[] byteData = null;
File file = null;
file = new File(filePath);
byteData = new byte[(int)count];
FileInputStream in = null;
try {
in = new FileInputStream( file );
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int bytesread = 0, ret = 0;
int size = (int) file.length();
at.play();
while (bytesread < size) {
ret = in.read( byteData,0, count);
if (ret != -1) { // Write the byte array to the track
at.write(byteData,0, ret);
bytesread += ret;
}
else break;
}
in.close();
at.stop();
at.release();
}
code from http://jongladwin.blogspot.co.uk/2010/03/android-play-pcmwav-audio-buffer-using.html
I have an audio recorder class which has been using AudioRecord to record audio and store it in a byte array. It has been working fine until recently when my AudioRecord has been failing to initialise and I can't see where I'm going wrong.
My Recorder class:
public class SoundRecorder {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
private int bufferSize = 0;
private ByteArrayOutputStream bOutputStream;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
SoundRecorder(){
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
bOutputStream = null;
}
public void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
if(recorder.getState() == AudioRecord.STATE_INITIALIZED){
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
storeAudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
}else{
// Audio recorder failed to initialise
}
}
public byte[] stopRecording() {
// stops the recording activity
if (recorder != null) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
byte[] audioBytes = bOutputStream.toByteArray();
try {
bOutputStream.reset();
bOutputStream.close();
Log.d("RECORDER", "bOutputstream closed");
} catch (IOException e) {
Log.d("RECORDER", "Output stream could not be closed or reset");
e.printStackTrace();
}
if(audioBytes != null){
return audioBytes;
}else{
return null;
}
} else {
return null;
}
}
private void storeAudioData() {
// Store the audio in bytes
short sData[] = new short[BufferElements2Rec];
bOutputStream = new ByteArrayOutputStream();
while (isRecording) {
// Get the voice output from microphone and store in byte format in byte output stream
recorder.read(sData, 0, BufferElements2Rec);
try {
// Converts short data to byte array
// Stores in byte array output stream
byte bData[] = short2byte(sData);
bOutputStream.write(bData);
} catch (IOException e) {
e.printStackTrace();
}
}
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
public boolean isRecording(){
return isRecording;
}
}
Can anyone help me out with this? Thanks.
I finally solved this problem after pulling my hair out. On Android wear (moto360), in the settings screen there is menu item called "permissions". In here I set the permission of my app to "microphone>enabled". I have no idea when this feature was added, but it solved my problem.
I want to get the data from mic and store it in a byte array for some processing.
I have tried the code given below but it gives errors on recorder.record function.
I have tried a lot of stuff but it was not worth it. Please do help me.
public class AudioRecordThread implements Runnable {
#Override
public void run() {
int bufferLength = 0;
int bufferSize;
short[] audioData;
int bufferReadResult;
MediaRecorder recorder=new MediaRecorder();
recorder.start();
try {
int sampleAudioBitRate=44100;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioBitRate,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize <= 2048) {
bufferLength = 2048;
} else if (bufferSize <= 4096) {
bufferLength = 4096;
}
AudioRecord audioRecord;
/* set audio recorder parameters, and start recording */
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioBitRate,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferLength);
audioData = new short[bufferLength];
audioRecord.startRecording();
Log.d("My Activity", "audioRecord.startRecording()");
boolean isAudioRecording = true;
boolean isRecorderStart=true;
/* ffmpeg_audio encoding loop */
while (isAudioRecording) {
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult == 1024 && isRecorderStart) {
Buffer realAudioData1024 = ShortBuffer.wrap(audioData, 0, 1024);
***********************************
recorder.record(realAudioData1024);
***********************************
} else if (bufferReadResult == 2048 && isRecorderStart) {
Buffer realAudioData2048_1=ShortBuffer.wrap(audioData, 0, 1024);
Buffer realAudioData2048_2=ShortBuffer.wrap(audioData, 1024, 1024);
for (int i = 0; i < 2; i++) {
if (i == 0) {
***********************************
recorder.record(realAudioData2048_1);
***********************************
} else if (i == 1) {
***********************************
recorder.record(realAudioData2048_2);
***********************************
}
}
}
}
/* encoding finish, release recorder */
if (audioRecord != null) {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
audioRecord = null;
}
if (recorder != null && isRecorderStart) {
try {
recorder.stop();
recorder.release();
} catch (Exception e) {
e.printStackTrace();
}
recorder = null;
}
} catch (Exception e) {
Log.e("My Activity", "get audio data failed:"+e.getMessage()+e.getCause()+e.toString());
}
}
}
Am developing an android app that has the feature to record the user speech. For this I have used the AndroidRecord Audio API.
Currently the pcm file(recorded audio file - recordedAudio.pcm) getting generated successfully in the sd card. But am not able to play that file. I tried in PC also with windows media palyer and some other players. But nothing helps.
Following are my code snippet.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean status;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
status = true;
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
String filePath = Environment.getExternalStorageDirectory()
.getPath() + "/audioRecord.pcm";
FileOutputStream fileOutputStreamObj = null;
try {
fileOutputStreamObj = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// short[] sData = new short[minBufSize];
byte[] buffer = new byte[minBufSize];
// recorder = findAudioRecord();
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
try {
// writes the data to file from buffer
// stores the voice buffer
// byte bData[] = short2byte(sData);
fileOutputStreamObj.write(buffer, 0, buffer.length);
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
Please help me on this. Thanks in advance.
You don't have to convert it into WAV and Play.
AudioTrack can directly play the recorded Audio.
Following is a Code snippet to Record audio into a file using AudioRecord and playback the same using AudioTrack API.
The operation is controlled from User using Buttons.
Code
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
The Audio Configuration can change as per device.
Refer to this question.
GUI has three buttons, Record, Stop and Play
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setButtonHandlers();
/* Set Button Visibility */
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
enableButton(R.id.btnStartPlay,false);
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}
/* Function to Enable/Disable Buttons */
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
/* Assign OnClickListener to Buttons */
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStartRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStopRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStartPlay)).setOnClickListener(btnClick);
}
Handling Button click:
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStartRec:{
Log.d(TAG, "Start Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,true);
startRecording();
break;
}
case R.id.btnStopRec:{
Log.d(TAG, "Stop Recording");
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
stopRecording();
enableButton(R.id.btnStartPlay,true);
break;
}
case R.id.btnStartPlay:{
Log.d(TAG, "Play Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,false);
StartPlaying();
break;
}
}
}
};
Code for Start Recording
private void startRecording()
{
record = new AudioRecord(AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filename = "/sdcard/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Code for Stop Recording
private void stopRecording()
{
if(null != record)
{
isRecording = false;
if (AudioRecord.STATE_INITIALIZED == record.getState())
{
record.stop();
record.release();
Log.d(TAG, "===== Recording Audio Completed ===== ");
}
record = null;
recordingThread = null;
}
}
Code for Playing the Audio file:
public void startPlaying()
{
enableButton(R.id.btnStartPlay,false);
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int i = 0;
byte[] temp = new byte[minBufferSize];
try {
FileInputStream fin = new FileInputStream("/sdcard/audiofile.pcm");
Log.d(TAG, "===== Opening File for Playing : /sdcard/audiofile.pcm ===== ");
DataInputStream dis = new DataInputStream(fin);
track.play();
while((i = dis.read(temp, 0, minBufferSize)) > -1)
{
track.write(temp, 0, i);
}
Log.d(TAG, "===== Playing Audio Completed ===== ");
track.stop();
track.release();
dis.close();
fin.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
enableButton(R.id.btnStartRec,true);
}
Please include the following in AndroidManifest.xml
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" > </uses-permission>
<uses-permission android:name="android.permission.RECORD_AUDIO" > </uses-permission>
The activity_main.xml looks like this.
The string.xml looks like this.
The above code is working and tested.
You can also do the same, without a file and using a intermediate buffer.
See: Audio Recording and Streaming in Android
Yes finally I found the answer with the clue of Michael's Comment above.
Am posting here the working code.
The Client Side Code as Follow's,
From the client side am streaming the audio data to the web socket server.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
byte[] buffer = new byte[minBufSize];
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
The Server Side Code added implementation as follows,
First the server will create the .pcm from the streamed data. Then from that pcm file it will create the wave file by adding header.
#OnMessage
public void onMessage(byte[] data, boolean arg1)
{
if ((!this.currentCommand.equals("stop")) &&
(this.currentCommand.equals("start")))
try {
System.out.println("Starting new recording.");
FileOutputStream fOut = new FileOutputStream(this.f2, true);
fOut.write(data);
fOut.close();
properWAV(this.f2, 111133.0F);
}
catch (Exception e) {
e.printStackTrace();
}
}
private void properWAV(File fileToConvert, float newRecordingID)
{
try {
long mySubChunk1Size = 16L;
int myBitsPerSample = 16;
int myFormat = 1;
long myChannels = 1L;
long mySampleRate = 44100L;
long myByteRate = mySampleRate * myChannels * myBitsPerSample / 8L;
int myBlockAlign = (int)(myChannels * myBitsPerSample / 8L);
byte[] clipData = getBytesFromFile(fileToConvert);
long myDataSize = clipData.length;
long myChunk2Size = myDataSize * myChannels * myBitsPerSample / 8L;
long myChunkSize = 36L + myChunk2Size;
OutputStream os = new FileOutputStream(new File("D:/audio/" + newRecordingID + ".wav"));
BufferedOutputStream bos = new BufferedOutputStream(os);
DataOutputStream outFile = new DataOutputStream(bos);
outFile.writeBytes("RIFF");
outFile.write(intToByteArray((int)myChunkSize), 0, 4);
outFile.writeBytes("WAVE");
outFile.writeBytes("fmt ");
outFile.write(intToByteArray((int)mySubChunk1Size), 0, 4);
outFile.write(shortToByteArray((short)myFormat), 0, 2);
outFile.write(shortToByteArray((short)(int)myChannels), 0, 2);
outFile.write(intToByteArray((int)mySampleRate), 0, 4);
outFile.write(intToByteArray((int)myByteRate), 0, 4);
outFile.write(shortToByteArray((short)myBlockAlign), 0, 2);
outFile.write(shortToByteArray((short)myBitsPerSample), 0, 2);
outFile.writeBytes("data");
outFile.write(intToByteArray((int)myDataSize), 0, 4);
outFile.write(clipData);
outFile.flush();
outFile.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
private static byte[] intToByteArray(int i)
{
byte[] b = new byte[4];
b[0] = (byte)(i & 0xFF);
b[1] = (byte)(i >> 8 & 0xFF);
b[2] = (byte)(i >> 16 & 0xFF);
b[3] = (byte)(i >> 24 & 0xFF);
return b;
}
public static byte[] shortToByteArray(short data)
{
return new byte[] { (byte)(data & 0xFF), (byte)(data >>> 8 & 0xFF) };
}
public byte[] getBytesFromFile(File file)
throws IOException
{
byte[] buffer = new byte[(int)file.length()];
InputStream ios = null;
try {
ios = new FileInputStream(file);
if (ios.read(buffer) == -1)
throw new IOException("EOF reached while trying to read the whole file");
}
finally {
try {
if (ios != null)
ios.close();
}
catch (IOException localIOException)
{
}
}
try
{
if (ios != null)
ios.close();
}
catch (IOException localIOException1)
{
}
return buffer;
}
Hope this one saves many of the developer's time.