The following code should record audio and store it in to SD card in PCM format.
the code is working with me ,but the PCM file doesn't play !!!!
I got this code from this link.... Android : recording audio using audiorecord class play as fast forwarded
I need to play the PCM file How can i do that ??????
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
System.out.println("BUFFER SIZE VALUE IS " + bufferSize);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
Android's media player by default doesn't play PCM files. Either
Copy it from your SD card to your computer and play it there
Write your own player using AudioTrack
Install an app that plays PCM
Here's a tutorial on how to play PCM using the AudioTrack class: (http://jongladwin.blogspot.co.uk/2010/03/android-play-pcmwav-audio-buffer-using.html)
Windows Media Player should be able to play PCM, some alternatives are mentioned here: (http://www.makeuseof.com/answers/play-pcm-file-pc/)
I guess most of the big music player apps on Android will support PCM.
I also used your code, but my voice record was like a "zzzzz" record. So I changed a little the code and now I can listen without problems and distortions the record both by smartphone and by PC (in this case with Audacity).
This is my code:
public class VoiceActivity extends Activity {
private static final String TAG = "VoiceRecord";
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS_IN = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_CHANNELS_OUT = AudioFormat.CHANNEL_OUT_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
// Initialize minimum buffer size in bytes.
private int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING);
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_voice);
((Button) findViewById(R.id.start_button)).setOnClickListener(btnClick);
((Button) findViewById(R.id.stop_button)).setOnClickListener(btnClick);
enableButtons(false);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.start_button, !isRecording);
enableButton(R.id.stop_button, isRecording);
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile() {
//Write the output audio in byte
String filePath = "/sdcard/8k16bitMono.pcm";
byte saudioBuffer[] = new byte[bufferSize];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(saudioBuffer, 0, bufferSize);
try {
// writes the data to file from buffer stores the voice buffer
os.write(saudioBuffer, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() throws IOException {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
PlayShortAudioFileViaAudioTrack("/sdcard/8k16bitMono.pcm");
}
}
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING, intSize, AudioTrack.MODE_STREAM);
if (at!=null) {
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
else
Log.d(TAG, "audio track is not initialised ");
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.start_button: {
enableButtons(true);
startRecording();
break;
}
case R.id.stop_button: {
enableButtons(false);
try {
stopRecording();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
}
}
}
};
// onClick of backbutton finishes the activity.
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
it's my solution
public class AudioTrackPlayer {
private String pathAudio;
private AudioTrack audioPlayer;
private Thread mThread;
private int bytesread = 0, ret = 0;
private int size;
private FileInputStream in = null;
private byte[] byteData = null;
private int count = 512 * 1024; // 512 kb
private boolean isPlay = true;
private boolean isLooping = false;
private static Handler mHandler;
public AudioTrackPlayer() {
}
public void prepare(String pathAudio){
this.pathAudio = pathAudio;
mHandler = new Handler();
}
public void play(){
stop();
isPlay = true;
bytesread = 0;
ret = 0;
if (pathAudio == null)
return;
audioPlayer = createAudioPlayer();
if (audioPlayer == null) return;
audioPlayer.play();
mThread = new Thread(new PlayerProcess());
mThread.start();
}
private final Runnable mLopingRunnable = new Runnable() {
#Override
public void run() {
play();
}
};
private AudioTrack createAudioPlayer(){
int intSize = android.media.AudioTrack.getMinBufferSize(16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (audioTrack == null) {
Log.d("TCAudio", "audio track is not initialised ");
return null;
}
File file = null;
file = new File(pathAudio);
byteData = new byte[(int) count];
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
size = (int) file.length();
return audioTrack;
}
private class PlayerProcess implements Runnable{
#Override
public void run() {
while (bytesread < size && isPlay) {
if (Thread.currentThread().isInterrupted()) {
break;
}
try {
ret = in.read(byteData, 0, count);
} catch (IOException e) {
e.printStackTrace();
}
if (ret != -1) { // Write the byte array to the track
audioPlayer.write(byteData,0, ret);
bytesread += ret;
} else break;
}
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
if (audioPlayer!=null){
if (audioPlayer.getState()!=AudioTrack.PLAYSTATE_STOPPED){
audioPlayer.stop();
audioPlayer.release();
mThread = null;
}
}
if (isLooping && isPlay ) mHandler.postDelayed(mLopingRunnable,100);
}
}
public void setLooping(){
isLooping = !isLooping;
}
public void pause(){
}
public void stop(){
isPlay = false;
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (audioPlayer != null) {
audioPlayer.stop();
audioPlayer.release();
audioPlayer = null;
}
}
public void reset(){
}
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
Replace Recording Code...
Related
I am using api.ai (google dialogflow) service to detect intent in my chat application. I also want to record audio file and send it to backend server.
I am able to do this thing with the following code snippet, but the problem is my application is properly working on vivo v9 (sdk 27), poco f1 (sdk 28),
but crash when i run it on emulator.
i debug the code and found if i comment recorder.startListening() method then my application works fine on all devices, but in this scenario i am not able to record the audio file.
here is my code snippet:
public class ChatActivity extends AppCompatActivity implements View.OnClickListener, AIListener{
private AIService aiService;
private static final int REQUEST_INTERNET = 200;
AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_chat);
//GCP STRAMING INITIALIZATION
final AIConfiguration config = new AIConfiguration("******************",
AIConfiguration.SupportedLanguages.English,
AIConfiguration.RecognitionEngine.System);
aiService = AIService.getService(this, config);
aiService.setListener(this);
}
//GCP STREAMING CODE
public void createFile() {
//Creating file
File dir = Environment.getExternalStorageDirectory();
try {
audiofile = File.createTempFile("sound", ".wav", dir);
} catch (IOException e) {
Log.e("I", "external storage access error");
}
}
#Override
public void onResult(AIResponse response) {
Result result = response.getResult();
Log.d("I", "Query: " + result.getResolvedQuery() +
"\nAction: " + result.getAction() +
"\nParameters: " + parameterString);
}
#Override
public void onError(AIError error) {
Log.d("Error", error + "");
aiService.cancel();
}
#Override
public void onAudioLevel(float level) {
}
#Override
public void onListeningStarted() {
try {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize);
int i = recorder.getState();
if (i == 1)
Thread.sleep(1000);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onListeningCanceled() {
try {
aiService.cancel();
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onListeningFinished() {
if (null != recorder) {
isRecording = false;
int i = recorder.getState();
if (i == 1) {
recorder.stop();
}
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(), getFilename());
deleteTempFile();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
File initialFile = new File(audioPath);
// File initialFile = new File("/storage/sdcard1/download/01 - Baarish (128 Kbps) - DownloadMing.SE.mp3");
InputStream targetStream = new FileInputStream(initialFile);
byte[] buffer = new byte[targetStream.available()];
int bytesRead;
while ((bytesRead = targetStream.read(buffer)) > 0) {
baos.write(buffer, 0, bytesRead);
}
} catch (Exception e) {
e.printStackTrace();
}
//upload to server
uploadRecording(baos.toByteArray());
}
}
I want use api.ai service as well as i also want to record audio file(.wav format) at the same time.
I have a problem with android wear's microphone.
Bluetooth headset works with android wear. My app release VoIP application.
When I play voice (from network) in my app with paired bluetooth headset, headset play this voice. But when I try record voice from the microphone.. turns on the microphone of the android wear, not the headset.
How can I implement voice reading from the Bluetooth headset microphone??
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
if (bytes != null) {
if (bytes.length > 0) {
byte[] audioMessage = bytes[0];
if (audioMessage.length != 0) {
int written = audioTrack.write(audioMessage, 0, audioMessage.length);
if (written != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
}
return null;
}
}}
Attach Recorder.class
public class Recorder {
private static final String TAG = Recorder.class.getName();
private boolean isAlive;
private Thread recordThread;
private IRecorderBytesListener listener;
private AudioRecord audioRecord;
public Recorder() {
isAlive = true;
audioRecord = new AudioRecord.Builder()
.setAudioSource(MediaRecorder.AudioSource.MIC)
.setAudioFormat(new AudioFormat.Builder()
.setSampleRate(AudioConsts.SAMPLERATE)
.setEncoding(AudioConsts.ENCODING_PCM_16BIT)
.build())
.setBufferSizeInBytes(AudioConsts.GetRecorderBufferSize())
.build();
//audioRecord.setPreferredDevice(audioDeviceInfo);
recordThread = new Thread(() -> {
ByteBuffer buffer = ByteBuffer.allocateDirect(AudioConsts.GetRecorderBufferSize());
byte[] audioMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
while (isAlive) {
if (audioRecord.getRecordingState() == 1) {
try {
Thread.sleep(50);
} catch (Exception e) {
Log.d(TAG, "hz");
}
continue;
}
buffer = (ByteBuffer) buffer.rewind();
int len = audioRecord.read(buffer, AudioConsts.GetRecorderBufferSize());
if (len != AudioConsts.GetRecorderBufferSize())
Log.d(TAG, "WTF LEN");
len -= AudioConsts.OFFSET_AUDIO_RECORDER;
if (len > 0) {
try {
System.arraycopy(buffer.array(), AudioConsts.OFFSET_AUDIO_RECORDER,
audioMsg, 0, len);
if (listener != null)
listener.bytesReceived(audioMsg, len);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
} else {
Log.d(TAG, "WTF");
}
}
audioRecord.stop();
});
recordThread.start();
}
public void startRecording() {
audioRecord.startRecording();
}
public void stopRecording() {
audioRecord.stop();
}
public void setListener(IRecorderBytesListener listener) {
this.listener = listener;
}
public void dispose() {
isAlive = false;
}}
Sorry for my English.
I've decided to record audio with AudioRecord rather than MediaRecorder, in order to achieve maximum quality. Problem is, the app won't work for some reason. There are 2 buttons: record and play, record is used to start and stop the recording (using a new thread) and play is supposed to play the file using MediaPlayer.
Code:
public class MyActivity extends Activity {
AudioRecord recorder = null;
int SAMPLE_RATE = 44100;
int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
int SOURCE = MediaRecorder.AudioSource.MIC;
int CONFIG = AudioFormat.CHANNEL_IN_MONO;
int BUFFER_SIZE;
boolean isRecording = false;
boolean isPlaying = false;
String currentFileDir;
byte[] b;
File file;
OutputStream FOS;
int count =0;
MediaPlayer mediaPlayer;
Thread recordThread;
private Button recordButton;
private Button playButton;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_my);
currentFileDir = getFilesDir().getAbsolutePath() + "Record.pcm";
recordButton = (Button)findViewById(R.id.RecordButton);
recordButton.setOnClickListener(new MyOCL());
playButton = (Button)findViewById(R.id.PlayButton);
playButton.setOnClickListener(new MyOCL());
}
protected void record(){
file = new File(currentFileDir);
BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CONFIG, ENCODING);
recorder = new AudioRecord(SOURCE, SAMPLE_RATE, CONFIG, ENCODING, BUFFER_SIZE);
isRecording = true;
b = new byte[BUFFER_SIZE];
try{
FOS = new FileOutputStream(file);
}
catch (Exception e){Log.e("Open FOS", "new failed");}
while (isRecording){
recorder.read(b, 0, BUFFER_SIZE);
try{
FOS.write(b, count * BUFFER_SIZE, BUFFER_SIZE);
count++;
}
catch (Exception e){Log.e("write FOS", "write failed");}
}
try {
FOS.close();
}
catch (Exception e){Log.e("close FOS", "close failed");}
}
private class MyOCL implements View.OnClickListener{
#Override
public void onClick(View view){
switch(view.getId()){
case R.id.PlayButton:
if(isPlaying == false){
playButton.setText("Stop Playing");
setPlaying();
mediaPlayer.start();
}
else {
playButton.setText("Start Playing");
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer.reset();
}
break;
case R.id.RecordButton:
if(isRecording == false) {
recordThread = new Thread(new Runnable() {
#Override
public void run() {
record();
}
});
recordThread.start();
recordButton.setText("Stop Recording");
}
else{
recordButton.setText("Start recording");
isRecording = false;
}
break;
}
}
}
protected void setPlaying(){
try{
mediaPlayer = new MediaPlayer();
mediaPlayer.reset();
mediaPlayer.setDataSource(currentFileDir);
mediaPlayer.prepare();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
//mediaPlayer.start();
}
catch (Exception e){
Log.e("Play initialize", "Can't call prepare function" + e.getMessage());
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.my, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
Here, this is my code, which work for me:
public class MainActivity extends Activity
{
AudioRecord record = null;
AudioTrack track = null;
boolean isRecording;
int sampleRate = 44100;
Button startRecord, stopRecord, playRecord = null;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setVolumeControlStream(AudioManager.MODE_IN_COMMUNICATION);
startRecord = (Button) findViewById(R.id.start_recording);
stopRecord = (Button) findViewById(R.id.stop_recording);
playRecord = (Button) findViewById(R.id.play_recording);
startRecord.setOnClickListener(new StartRecordListener());
stopRecord.setOnClickListener(new StopRecordListener());
playRecord.setOnClickListener(new PlayRecordListener());
stopRecord.setEnabled(false);
}
private void startRecord()
{
File recordFile = new File(Environment.getExternalStorageDirectory(), "Record.pcm");
try
{
recordFile.createNewFile();
OutputStream outputStream = new FileOutputStream(recordFile);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream);
DataOutputStream dataOutputStream = new DataOutputStream(bufferedOutputStream);
int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
short[] audioData = new short[minBufferSize];
record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
minBufferSize);
record.startRecording();
while (isRecording)
{
int numberOfShort = record.read(audioData, 0, minBufferSize);
for (int i = 0; i < numberOfShort; i++)
{
dataOutputStream.writeShort(audioData[i]);
}
}
record.stop();
dataOutputStream.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
public void playRecord()
{
File recordFile = new File(Environment.getExternalStorageDirectory(), "Record.pcm");
int shortSizeInBytes = Short.SIZE / Byte.SIZE;
int bufferSizeInBytes = (int) (recordFile.length() / shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try
{
InputStream inputStream = new FileInputStream(recordFile);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while (dataInputStream.available() > 0)
{
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes, AudioTrack.MODE_STREAM);
track.play();
track.write(audioData, 0, bufferSizeInBytes);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
}
public class StartRecordListener implements View.OnClickListener
{
#Override
public void onClick(View v)
{
Thread recordThread = new Thread(new Runnable()
{
#Override
public void run()
{
isRecording = true;
MainActivity.this.startRecord();
}
});
recordThread.start();
startRecord.setEnabled(false);
stopRecord.setEnabled(true);
}
}
public class StopRecordListener implements View.OnClickListener
{
#Override
public void onClick(View v)
{
isRecording = false;
startRecord.setEnabled(true);
stopRecord.setEnabled(false);
}
}
public class PlayRecordListener implements View.OnClickListener
{
#Override
public void onClick(View v)
{
MainActivity.this.playRecord();
}
}
}
XML layout contains 3 buttons with the following ids: start_recording, stop_recording, play_recording
And add to following permissions:
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
Good luck and I hope it's okay for you, that I'm using 3 buttons in the code above.
I want to detect 'Whistle' sound. For that I have implemented http://code.google.com/p/musicg/
Source code itself having issue. When you start app it is ready for listen but when you go back and again restart detector thread it does not trigger whistle detection.
DetectorThread.java
package weetech.wallpaper.services;
import java.util.LinkedList;
import weetech.wallpaper.utils.Debug;
import android.media.AudioFormat;
import android.media.AudioRecord;
import com.musicg.api.WhistleApi;
import com.musicg.wave.WaveHeader;
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private WhistleApi whistleApi;
private Thread _thread;
private LinkedList<Boolean> whistleResultList = new LinkedList<Boolean>();
private int numWhistles;
private int totalWhistlesDetected = 0;
private int whistleCheckLength = 3;
private int whistlePassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_IN_MONO) {
channel = 1;
}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
whistleApi = new WhistleApi(waveHeader);
}
private void initBuffer() {
numWhistles = 0;
whistleResultList.clear();
// init the first frames
for (int i = 0; i < whistleCheckLength; i++) {
whistleResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
#Override
public void run() {
Debug.e("", "DetectorThread started...");
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
// MainActivity.whistleValue = numWhistles;
// whistle detection
// System.out.println("*Whistle:");
try {
boolean isWhistle = whistleApi.isWhistle(buffer);
Debug.e("", "isWhistle : " + isWhistle + " "
+ buffer.length);
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(isWhistle);
if (isWhistle) {
numWhistles++;
}
// Debug.e("", "numWhistles : " + numWhistles);
if (numWhistles >= whistlePassScore) {
// clear buffer
initBuffer();
totalWhistlesDetected++;
Debug.e("", "totalWhistlesDetected : "
+ totalWhistlesDetected);
if (onWhistleListener != null) {
onWhistleListener.onWhistle();
}
}
} catch (Exception e) {
Debug.w("", "" + e.getCause());
}
// end whistle detection
} else {
// Debug.e("", "no sound detected");
// no sound detected
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(false);
// MainActivity.whistleValue = numWhistles;
}
// end audio analyst
}
Debug.e("", "Terminating detector thread...");
} catch (Exception e) {
e.printStackTrace();
}
}
private OnWhistleListener onWhistleListener;
public void setOnWhistleListener(OnWhistleListener onWhistleListener) {
this.onWhistleListener = onWhistleListener;
}
public interface OnWhistleListener {
void onWhistle();
}
public int getTotalWhistlesDetected() {
return totalWhistlesDetected;
}
}
RecorderThread.java
public class RecorderThread {
private AudioRecord audioRecord;
private int channelConfiguration;
private int audioEncoding;
private int sampleRate;
private int frameByteSize; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
sampleRate = 44100;
frameByteSize = 1024 * 2;
channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
int recBufSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfiguration, audioEncoding); // need to be larger than
// size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
return true;
}
return false;
}
public void startRecording() {
try {
audioRecord.startRecording();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
Debug.e("", "averageAbsValue : " + averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
}
Usage
public class DetectionService extends Service implements
OnWhistleListener {
Handler handler;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
#Override
public void onCreate() {
super.onCreate();
handler = new Handler();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
try {
if (intent != null && intent.getExtras() != null) {
if (intent.getExtras().containsKey("action")) {
Debug.e("", "action : " + intent.getStringExtra("action"));
if (intent.getStringExtra("action").equals("start")) {
startWhistleDetection();
}
if (intent.getStringExtra("action").equals("stop")) {
stopWhistleDetection();
stopSelf();
}
}
} else {
startWhistleDetection();
Debug.e("", "intent is null OR intent.getExtras() is null");
}
} catch (Exception e) {
e.printStackTrace();
}
return super.onStartCommand(intent, flags, startId);
}
private void startWhistleDetection() {
try {
stopWhistleDetection();
} catch (Exception e) {
e.printStackTrace();
}
recorderThread = new RecorderThread();
recorderThread.startRecording();
detectorThread = new DetectorThread(recorderThread);
detectorThread.setOnWhistleListener(this);
detectorThread.start();
}
private void stopWhistleDetection() {
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread.setOnWhistleListener(null);
detectorThread = null;
}
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onWhistle() {
Debug.e("", "onWhistle()");
}
It detects whistle first time until you don't stop service. But after stopping and again starting It does not detect (does not call listener). I just failed to trace, what can be the issue?
Is there any issue with recording?
I invested 6 hours, :D Unbelievable, audio recorder is not released when it is stopped. I just released recorder after stopping.
Source code is having minor silly mistake. It is not releasing recorder.
public void stopRecording() {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
}
This code is ok for me
if (detectorThread != null) {
detectorThread.stopDetection();
recorderThread.stopRecording();
}
I need to record and play audio simultaneously and i use the help of 2 threads for it.here is the code
recorder = new AudioRecord(AudioSource.MIC, 8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (recorder.getState() == android.media.AudioRecord.STATE_INITIALIZED)
recorder.startRecording();
isRecording = true;
audioPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize,
AudioTrack.MODE_STREAM);
audioPlayer.play();
new Thread(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
while(true)
{
readBytes = recorder.read(data, 0, bufferSize);
if (readBytes == AudioRecord.ERROR_INVALID_OPERATION)
System.out.println("ERROR_INVALID_OPERATION");
else if (readBytes == AudioRecord.ERROR_BAD_VALUE)
System.out.println("ERROR_BAD_VALUE");
}
}
}).start();
new Thread(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
if (readBytes > 0)
audioPlayer.write(data, 0, readBytes);
}
}).start();
I get the error below:
02-23 14:19:59.625: E/AudioTrack(1786): Invalid buffer size: minFrameCount 2400, frameCount 2048
Thanks in advance
Try like this
int MIN_BUF_SIZE_REC = AudioRecord.getMinBufferSize(8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioRecord recorder = new AudioRecord(
MediaRecorder.AudioSource.MIC, 8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, MIN_BUF_SIZE_REC);
This the code i used to solve the issue hope it helps
public class Chat extends Activity {
Devc devc;
byte[] bytesent, bytesgot;
private BluetoothSocket mmSocket = null;
private InputStream mmInStream;
private OutputStream mmOutStream;
Handler handler;
Button send;
TextView getmessage;
EditText sentmessage;
TextView nowsentmessage;
TextView you;
int bytes = 0;
Thread thread;
private int audioSource = MediaRecorder.AudioSource.MIC;
private int samplingRate = 44100; /* in Hz */
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private int bufferSize = AudioRecord.getMinBufferSize(samplingRate,
channelConfig, audioFormat);
private int sampleNumBits = 16;
private int numChannels = 1;
AudioTrack audioPlayer;
AudioRecord recorder;
Boolean isRecording;
byte[] bytes2;
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.chat);
getmessage = (TextView) findViewById(R.id.gottext);
sentmessage = (EditText) findViewById(R.id.entertext);
nowsentmessage = (TextView) findViewById(R.id.metext);
you = (TextView) findViewById(R.id.you);
send = (Button) findViewById(R.id.sent);
bufferSize += 2048;
recorder = new AudioRecord(audioSource, samplingRate, channelConfig,
audioFormat, 44100);
audioPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 44100,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize,
AudioTrack.MODE_STREAM);
handler = new Handler() {
#Override
public void handleMessage(Message msg) {
// display each item in a single line
}
};
}
#Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
mmSocket = Devc.bluetoothSocket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
// Get the input and output streams, using temp objects because
// member streams are final
try {
tmpIn = mmSocket.getInputStream();
tmpOut = mmSocket.getOutputStream();
} catch (IOException e) {
}
mmInStream = tmpIn;
mmOutStream = tmpOut;
new Thread(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
recorder.startRecording();
isRecording = true;
while (true) {
int readBytes = 0;
readBytes = recorder.read(bytesent, 0, bufferSize);
if (readBytes > 0) {
write(bytesent);
bufferSize += 2048;
}
}
}
}).start();
new Thread(new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub\
while (true) {
int readBytes = 0;
try {
bytes = mmInStream.read(bytesgot);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
audioPlayer.write(bytesgot, 0, bytes);
audioPlayer.play();
}
}
}).start();
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
super.onActivityResult(requestCode, resultCode, data);
}
#Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
try {
mmInStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
mmOutStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
cancel();
}
public void write(byte[] bytes) {
try {
mmOutStream.write(bytes);
mmOutStream.flush();
} catch (IOException e) {
}
}
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) {
}
}
#Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
cancel();
thread.stop();
Chat.this.finish();
// thread.stop();
}
}
Check this out..i have solved same problem using this code.....
As well as I created two two inner classes for record and playback ( RecordAudio calss and PlayAudio class).
If you want more clear audio clip you can use android NoiseSuppressor .
cheers....:D
public class AltAudioRecorder extends Activity implements OnClickListener {
RecordAudio recordTask;
PlayAudio playTask;
Button startRecordingButton, stopRecordingButton, startPlaybackButton,
stopPlaybackButton;
TextView statusText;
File recordingFile;
boolean isRecording = false;
boolean isPlaying = false;
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
statusText = (TextView) this.findViewById(R.id.StatusTextView);
startRecordingButton = (Button) this
.findViewById(R.id.StartRecordingButton);
stopRecordingButton = (Button) this
.findViewById(R.id.StopRecordingButton);
startPlaybackButton = (Button) this
.findViewById(R.id.StartPlaybackButton);
stopPlaybackButton = (Button) this.findViewById(R.id.StopPlaybakButton);
startRecordingButton.setOnClickListener(this);
stopRecordingButton.setOnClickListener(this);
startPlaybackButton.setOnClickListener(this);
stopPlaybackButton.setOnClickListener(this);
stopRecordingButton.setEnabled(false);
startPlaybackButton.setEnabled(false);
stopPlaybackButton.setEnabled(false);
File path = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath()
+ "/FinalOne/src/com/example/finalone/files/");
path.mkdirs();
try {
recordingFile = File.createTempFile("recording", ".pcm", path);
} catch (IOException e) {
throw new RuntimeException("Couldn't create file on SD card", e);
}
}
#Override
public void onClick(View v) {
if (v == startRecordingButton) {
record();
} else if (v == stopRecordingButton) {
stopRecording();
} else if (v == startPlaybackButton) {
play();
} else if (v == stopPlaybackButton) {
stopPlaying();
}
}
public void play() {
startPlaybackButton.setEnabled(true);
playTask = new PlayAudio();
playTask.execute();
stopPlaybackButton.setEnabled(true);
}
public void stopPlaying() {
isPlaying = false;
stopPlaybackButton.setEnabled(false);
startPlaybackButton.setEnabled(true);
}
public void record() {
startRecordingButton.setEnabled(false);
stopRecordingButton.setEnabled(true);
// For Fun
startPlaybackButton.setEnabled(true);
recordTask = new RecordAudio();
recordTask.execute();
}
public void stopRecording() {
isRecording = false;
}