Detect 'Whistle' sound in android - android

I want to detect 'Whistle' sound. For that I have implemented http://code.google.com/p/musicg/
Source code itself having issue. When you start app it is ready for listen but when you go back and again restart detector thread it does not trigger whistle detection.
DetectorThread.java
package weetech.wallpaper.services;
import java.util.LinkedList;
import weetech.wallpaper.utils.Debug;
import android.media.AudioFormat;
import android.media.AudioRecord;
import com.musicg.api.WhistleApi;
import com.musicg.wave.WaveHeader;
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private WhistleApi whistleApi;
private Thread _thread;
private LinkedList<Boolean> whistleResultList = new LinkedList<Boolean>();
private int numWhistles;
private int totalWhistlesDetected = 0;
private int whistleCheckLength = 3;
private int whistlePassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_IN_MONO) {
channel = 1;
}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
whistleApi = new WhistleApi(waveHeader);
}
private void initBuffer() {
numWhistles = 0;
whistleResultList.clear();
// init the first frames
for (int i = 0; i < whistleCheckLength; i++) {
whistleResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
#Override
public void run() {
Debug.e("", "DetectorThread started...");
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
// MainActivity.whistleValue = numWhistles;
// whistle detection
// System.out.println("*Whistle:");
try {
boolean isWhistle = whistleApi.isWhistle(buffer);
Debug.e("", "isWhistle : " + isWhistle + " "
+ buffer.length);
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(isWhistle);
if (isWhistle) {
numWhistles++;
}
// Debug.e("", "numWhistles : " + numWhistles);
if (numWhistles >= whistlePassScore) {
// clear buffer
initBuffer();
totalWhistlesDetected++;
Debug.e("", "totalWhistlesDetected : "
+ totalWhistlesDetected);
if (onWhistleListener != null) {
onWhistleListener.onWhistle();
}
}
} catch (Exception e) {
Debug.w("", "" + e.getCause());
}
// end whistle detection
} else {
// Debug.e("", "no sound detected");
// no sound detected
if (whistleResultList.getFirst()) {
numWhistles--;
}
whistleResultList.removeFirst();
whistleResultList.add(false);
// MainActivity.whistleValue = numWhistles;
}
// end audio analyst
}
Debug.e("", "Terminating detector thread...");
} catch (Exception e) {
e.printStackTrace();
}
}
private OnWhistleListener onWhistleListener;
public void setOnWhistleListener(OnWhistleListener onWhistleListener) {
this.onWhistleListener = onWhistleListener;
}
public interface OnWhistleListener {
void onWhistle();
}
public int getTotalWhistlesDetected() {
return totalWhistlesDetected;
}
}
RecorderThread.java
public class RecorderThread {
private AudioRecord audioRecord;
private int channelConfiguration;
private int audioEncoding;
private int sampleRate;
private int frameByteSize; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
sampleRate = 44100;
frameByteSize = 1024 * 2;
channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
int recBufSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfiguration, audioEncoding); // need to be larger than
// size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
return true;
}
return false;
}
public void startRecording() {
try {
audioRecord.startRecording();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
Debug.e("", "averageAbsValue : " + averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
}
Usage
public class DetectionService extends Service implements
OnWhistleListener {
Handler handler;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
#Override
public void onCreate() {
super.onCreate();
handler = new Handler();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
try {
if (intent != null && intent.getExtras() != null) {
if (intent.getExtras().containsKey("action")) {
Debug.e("", "action : " + intent.getStringExtra("action"));
if (intent.getStringExtra("action").equals("start")) {
startWhistleDetection();
}
if (intent.getStringExtra("action").equals("stop")) {
stopWhistleDetection();
stopSelf();
}
}
} else {
startWhistleDetection();
Debug.e("", "intent is null OR intent.getExtras() is null");
}
} catch (Exception e) {
e.printStackTrace();
}
return super.onStartCommand(intent, flags, startId);
}
private void startWhistleDetection() {
try {
stopWhistleDetection();
} catch (Exception e) {
e.printStackTrace();
}
recorderThread = new RecorderThread();
recorderThread.startRecording();
detectorThread = new DetectorThread(recorderThread);
detectorThread.setOnWhistleListener(this);
detectorThread.start();
}
private void stopWhistleDetection() {
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread.setOnWhistleListener(null);
detectorThread = null;
}
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onWhistle() {
Debug.e("", "onWhistle()");
}
It detects whistle first time until you don't stop service. But after stopping and again starting It does not detect (does not call listener). I just failed to trace, what can be the issue?
Is there any issue with recording?

I invested 6 hours, :D Unbelievable, audio recorder is not released when it is stopped. I just released recorder after stopping.
Source code is having minor silly mistake. It is not releasing recorder.
public void stopRecording() {
try {
audioRecord.stop();
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
}

This code is ok for me
if (detectorThread != null) {
detectorThread.stopDetection();
recorderThread.stopRecording();
}

Related

Bluetooth headset's microphone instead android wear's microphone

I have a problem with android wear's microphone.
Bluetooth headset works with android wear. My app release VoIP application.
When I play voice (from network) in my app with paired bluetooth headset, headset play this voice. But when I try record voice from the microphone.. turns on the microphone of the android wear, not the headset.
How can I implement voice reading from the Bluetooth headset microphone??
Attach Player.class
public class Player {
private static final String TAG = Player.class.getName();
private AudioTrack audioTrack;
private boolean isWorking;
public Player() {
try {
audioTrack = new AudioTrack(
new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setLegacyStreamType(AudioManager.STREAM_MUSIC)
.build(),
new AudioFormat.Builder()
.setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(AudioConsts.SAMPLERATE)
.build(),
AudioConsts.GetPlayerBufferSize(),
AudioTrack.MODE_STREAM,
AudioManager.AUDIO_SESSION_ID_GENERATE);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
public void play() {
audioTrack.play();
}
public void stopReading() {
if (!isWorking)
return;
audioTrack.release();
isWorking = false;
}
public void appendForPlayback(byte[] audioMessage, int size) {
new Executor().doInBackground(audioMessage);
}
private class Executor extends AsyncTask<byte[], Void, Void> {
#Override
protected Void doInBackground(byte[]... bytes) {
if (bytes != null) {
if (bytes.length > 0) {
byte[] audioMessage = bytes[0];
if (audioMessage.length != 0) {
int written = audioTrack.write(audioMessage, 0, audioMessage.length);
if (written != audioMessage.length) {
Log.d(TAG, "WTF");
}
}
}
}
return null;
}
}}
Attach Recorder.class
public class Recorder {
private static final String TAG = Recorder.class.getName();
private boolean isAlive;
private Thread recordThread;
private IRecorderBytesListener listener;
private AudioRecord audioRecord;
public Recorder() {
isAlive = true;
audioRecord = new AudioRecord.Builder()
.setAudioSource(MediaRecorder.AudioSource.MIC)
.setAudioFormat(new AudioFormat.Builder()
.setSampleRate(AudioConsts.SAMPLERATE)
.setEncoding(AudioConsts.ENCODING_PCM_16BIT)
.build())
.setBufferSizeInBytes(AudioConsts.GetRecorderBufferSize())
.build();
//audioRecord.setPreferredDevice(audioDeviceInfo);
recordThread = new Thread(() -> {
ByteBuffer buffer = ByteBuffer.allocateDirect(AudioConsts.GetRecorderBufferSize());
byte[] audioMsg = new byte[AudioConsts.FRAME_SIZE * AudioConsts.ENCODING_PCM_16BIT];
while (isAlive) {
if (audioRecord.getRecordingState() == 1) {
try {
Thread.sleep(50);
} catch (Exception e) {
Log.d(TAG, "hz");
}
continue;
}
buffer = (ByteBuffer) buffer.rewind();
int len = audioRecord.read(buffer, AudioConsts.GetRecorderBufferSize());
if (len != AudioConsts.GetRecorderBufferSize())
Log.d(TAG, "WTF LEN");
len -= AudioConsts.OFFSET_AUDIO_RECORDER;
if (len > 0) {
try {
System.arraycopy(buffer.array(), AudioConsts.OFFSET_AUDIO_RECORDER,
audioMsg, 0, len);
if (listener != null)
listener.bytesReceived(audioMsg, len);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
} else {
Log.d(TAG, "WTF");
}
}
audioRecord.stop();
});
recordThread.start();
}
public void startRecording() {
audioRecord.startRecording();
}
public void stopRecording() {
audioRecord.stop();
}
public void setListener(IRecorderBytesListener listener) {
this.listener = listener;
}
public void dispose() {
isAlive = false;
}}
Sorry for my English.

Incorrect decoding of H264 video stream on Nexus devices

I'm trying to use android MediaCodec class to decode h264 video stream of remote camera. My code is:
public class RemoteCamera {
public interface OnCameraListener {
void onConnected();
void onFailureConnection();
void onDisconnected();
void onReady();
}
private static final int MAX_NAL_LEN = 1024 * 1024;
private static final String TAG = "RemoteCamera";
private OutputThread mOutputThread;
private WebSocketManager mWebSocketManager;
private OnCameraListener mOnCameraListener;
private int mSearchState = 0;
private byte[] mNalData;
private int mNalDataPos;
private MediaCodec mDecoder;
private MediaFormat mFormat;
private SurfaceView mSurfaceView;
private MediaCodec.BufferInfo mInfo = new MediaCodec.BufferInfo();
private boolean mIsWaitingForSPS = true;
public RemoteCamera(final SurfaceView surfaceView, final String wss) {
mSurfaceView = surfaceView;
mWebSocketManager = new WebSocketManager(wss);
mWebSocketManager.setWSListener(new WebSocketManager.OnWSListener() {
#Override
public void onOpen() {
if (mOnCameraListener != null) {
mOnCameraListener.onConnected();
}
}
#Override
public void onClosed() {
if (mOnCameraListener != null) {
mOnCameraListener.onDisconnected();
}
}
#Override
public void onFailure() {
if (mOnCameraListener != null) {
mOnCameraListener.onFailureConnection();
}
}
#Override
public synchronized void onMessage(final ByteString bytes) {
final ByteBuffer bb = ByteBuffer.wrap(bytes.toByteArray());
if (mIsWaitingForSPS) {
if (isSPSUnit(bb)) {
mIsWaitingForSPS = false;
if (mOnCameraListener != null) {
mOnCameraListener.onReady();
}
} else {
return;
}
}
parseDatagram(bb.array(), bytes.size());
}
});
mNalData = new byte[MAX_NAL_LEN];
mNalDataPos = 0;
try {
mDecoder = MediaCodec.createDecoderByType("video/avc");
} catch (Exception e) {
Log.d(TAG, e.toString());
return;
}
mFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
}
public void setOnCameraListener(final OnCameraListener cameraListener) {
mOnCameraListener = cameraListener;
}
public void startStreaming() {
mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mDecoder.configure(mFormat, mSurfaceView.getHolder().getSurface(), null, 0);
} catch (Exception e) {
Log.d(TAG, e.toString());
return;
}
mWebSocketManager.wsRegister();
mDecoder.start();
mOutputThread = new OutputThread();
mOutputThread.start();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
});
}
private void feedDecoder(byte[] n, int len) {
for (; ; ) {
try {
int inputBufferIndex = mDecoder.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferIndex);
inputBuffer.put(n, 0, len);
mDecoder.queueInputBuffer(inputBufferIndex, 0, len, System.currentTimeMillis(), 0);
break;
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
}
private void parseDatagram(byte[] p, int plen) {
try {
for (int i = 0; i < plen; ++i) {
mNalData[mNalDataPos++] = p[i];
if (mNalDataPos == MAX_NAL_LEN - 1) {
mNalDataPos = 0;
}
switch (mSearchState) {
case 0:
case 1:
case 2:
if (p[i] == 0)
mSearchState++;
else
mSearchState = 0;
break;
case 3:
if (p[i] == 1) {
mNalData[0] = 0;
mNalData[1] = 0;
mNalData[2] = 0;
mNalData[3] = 1;
feedDecoder(mNalData, mNalDataPos - 4);
mNalDataPos = 4;
}
mSearchState = 0;
break;
default:
break;
}
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
private boolean isSPSUnit(final ByteBuffer unit) {
return unit.get(4) == 0x67;
}
private class OutputThread extends Thread {
#Override
public void run() {
while (true) {
try {
int outputBufferIndex = mDecoder.dequeueOutputBuffer(mInfo, 10);
if (outputBufferIndex >= 0) {
mDecoder.releaseOutputBuffer(outputBufferIndex, true);
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
}
}
I tested the code on Sony Xperia Z5 Compact and Yota Phone 2, and it works fine on these devices. The picture that I got from Sony is really good.
Then I tried the video streamer on Nexus 9 and Nexus 7 devices, but it looks like a row moving from top to bottom. There is no correct output on Nexus devices: nexus results.
I know that it depends on native android media codec, but what should I do to resolve the problem and be able to show video on all devices?
Do not pass the 0x00 0x00 0x00 0x01 NALU start code to the decoder.

Musicg library for Clap Detection not working android

I am using musicg library for clap detection but whenever starts activity it shows continuous detection of clap without any clap.
I thought that there is some value issue with bit rate or frame size.
This is my code
RecorderThread.java
public class RecorderThread extends Thread {
private AudioRecord audioRecord;
private boolean isRecording;
private int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int sampleRate = 44100;
private int frameByteSize = 2048; // for 1024 fft size (16bit sample size)
byte[] buffer;
public RecorderThread() {
int recBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfiguration, audioEncoding); // need to be larger than size of a frame
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfiguration, audioEncoding, recBufSize);
buffer = new byte[frameByteSize];
}
public AudioRecord getAudioRecord() {
return audioRecord;
}
public boolean isRecording() {
return this.isAlive() && isRecording;
}
public void startRecording() {
try {
audioRecord.startRecording();
isRecording = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
try {
audioRecord.stop();
isRecording = false;
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrameBytes() {
audioRecord.read(buffer, 0, frameByteSize);
// analyze sound
int totalAbsValue = 0;
short sample = 0;
float averageAbsValue = 0.0f;
for (int i = 0; i < frameByteSize; i += 2) {
sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
totalAbsValue += Math.abs(sample);
}
averageAbsValue = totalAbsValue / frameByteSize / 2;
//System.out.println(averageAbsValue);
// no input
if (averageAbsValue < 30) {
return null;
}
return buffer;
}
public void run() {
startRecording();
}
}
DetectorThread.java
public class DetectorThread extends Thread {
private RecorderThread recorder;
private WaveHeader waveHeader;
private ClapApi clapApi;
private volatile Thread _thread;
private LinkedList<Boolean> clapResultList = new LinkedList<Boolean>();
private int numClaps;
private int totalClapsDetected = 0;
private int clapCheckLength = 3;
private int clapPassScore = 3;
public DetectorThread(RecorderThread recorder) {
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
int bitsPerSample = 0;
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
bitsPerSample = 16;
} else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
bitsPerSample = 8;
}
int channel = 0;
// whistle detection only supports mono channel
//if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_CONFIGURATION_MONO) {
channel = 1;
//}
waveHeader = new WaveHeader();
waveHeader.setChannels(channel);
waveHeader.setBitsPerSample(bitsPerSample);
waveHeader.setSampleRate(audioRecord.getSampleRate());
clapApi = new ClapApi(waveHeader);
}
private void initBuffer() {
numClaps = 0;
clapResultList.clear();
// init the first frames
for (int i = 0; i < clapCheckLength; i++) {
clapResultList.add(false);
}
// end init the first frames
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection() {
_thread = null;
}
public void run() {
try {
byte[] buffer;
initBuffer();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
// detect sound
buffer = recorder.getFrameBytes();
// audio analyst
if (buffer != null) {
// sound detected
MainActivity.clapsValue = numClaps;
// whistle detection
//System.out.println("*Whistle:");
boolean isClap = clapApi.isClap(buffer);
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(isClap);
if (isClap) {
numClaps++;
}
//System.out.println("num:" + numWhistles);
if (numClaps >= clapPassScore) {
// clear buffer
initBuffer();
totalClapsDetected++;
}
// end whistle detection
} else {
// no sound detected
if (clapResultList.getFirst()) {
numClaps--;
}
clapResultList.removeFirst();
clapResultList.add(false);
MainActivity.clapsValue = numClaps;
}
// end audio analyst
}
} catch (Exception e) {
e.printStackTrace();
}
}
public int getTotalClapsDetected() {
return totalClapsDetected;
}
}
MainActivity.java
public class MainActivity extends Activity {
public static final int DETECT_NONE = 0;
public static final int DETECT_CLAP = 1;
public static int selectedDetection = DETECT_NONE;
private DetectorThread detectorThread;
private RecorderThread recorderThread;
private Thread detectedTextThread;
public static int clapsValue = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startVoiceDetection();
}
#Override
protected void onPause() {
super.onPause();
stopVoiceDetection();
}
#Override
protected void onDestroy() {
super.onDestroy();
android.os.Process.killProcess(android.os.Process.myPid());
}
private void startVoiceDetection() {
selectedDetection = DETECT_CLAP;
recorderThread = new RecorderThread();
recorderThread.start();
detectorThread = new DetectorThread(recorderThread);
detectorThread.start();
goListeningView();
}
private void stopVoiceDetection() {
if (recorderThread != null) {
recorderThread.stopRecording();
recorderThread = null;
}
if (detectorThread != null) {
detectorThread.stopDetection();
detectorThread = null;
}
selectedDetection = DETECT_NONE;
}
private void goListeningView() {
if (detectedTextThread == null) {
detectedTextThread = new Thread() {
public void run() {
try {
while (recorderThread != null && detectorThread != null) {
runOnUiThread(new Runnable() {
public void run() {
if (detectorThread != null) {
Log.e("Clap", "Detected");
}
}
});
sleep(100);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
detectedTextThread = null;
}
}
};
detectedTextThread.start();
}
}
}
change
AudioFormat.CHANNEL_CONFIGURATION_MONO;
to
AudioFormat.CHANNEL_IN_MONO
me helps)

how can i play pcm file

The following code should record audio and store it in to SD card in PCM format.
the code is working with me ,but the PCM file doesn't play !!!!
I got this code from this link.... Android : recording audio using audiorecord class play as fast forwarded
I need to play the PCM file How can i do that ??????
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
System.out.println("BUFFER SIZE VALUE IS " + bufferSize);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
Android's media player by default doesn't play PCM files. Either
Copy it from your SD card to your computer and play it there
Write your own player using AudioTrack
Install an app that plays PCM
Here's a tutorial on how to play PCM using the AudioTrack class: (http://jongladwin.blogspot.co.uk/2010/03/android-play-pcmwav-audio-buffer-using.html)
Windows Media Player should be able to play PCM, some alternatives are mentioned here: (http://www.makeuseof.com/answers/play-pcm-file-pc/)
I guess most of the big music player apps on Android will support PCM.
I also used your code, but my voice record was like a "zzzzz" record. So I changed a little the code and now I can listen without problems and distortions the record both by smartphone and by PC (in this case with Audacity).
This is my code:
public class VoiceActivity extends Activity {
private static final String TAG = "VoiceRecord";
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS_IN = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_CHANNELS_OUT = AudioFormat.CHANNEL_OUT_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
// Initialize minimum buffer size in bytes.
private int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING);
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_voice);
((Button) findViewById(R.id.start_button)).setOnClickListener(btnClick);
((Button) findViewById(R.id.stop_button)).setOnClickListener(btnClick);
enableButtons(false);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.start_button, !isRecording);
enableButton(R.id.stop_button, isRecording);
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile() {
//Write the output audio in byte
String filePath = "/sdcard/8k16bitMono.pcm";
byte saudioBuffer[] = new byte[bufferSize];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(saudioBuffer, 0, bufferSize);
try {
// writes the data to file from buffer stores the voice buffer
os.write(saudioBuffer, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() throws IOException {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
PlayShortAudioFileViaAudioTrack("/sdcard/8k16bitMono.pcm");
}
}
private void PlayShortAudioFileViaAudioTrack(String filePath) throws IOException{
// We keep temporarily filePath globally as we have only two sample sounds now..
if (filePath==null)
return;
//Reading the file..
File file = new File(filePath); // for ex. path= "/sdcard/samplesound.pcm" or "/sdcard/samplesound.wav"
byte[] byteData = new byte[(int) file.length()];
Log.d(TAG, (int) file.length()+"");
FileInputStream in = null;
try {
in = new FileInputStream( file );
in.read( byteData );
in.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Set and push to audio track..
int intSize = android.media.AudioTrack.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING);
Log.d(TAG, intSize+"");
AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, RECORDER_SAMPLERATE, RECORDER_CHANNELS_OUT, RECORDER_AUDIO_ENCODING, intSize, AudioTrack.MODE_STREAM);
if (at!=null) {
at.play();
// Write the byte array to the track
at.write(byteData, 0, byteData.length);
at.stop();
at.release();
}
else
Log.d(TAG, "audio track is not initialised ");
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.start_button: {
enableButtons(true);
startRecording();
break;
}
case R.id.stop_button: {
enableButtons(false);
try {
stopRecording();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
}
}
}
};
// onClick of backbutton finishes the activity.
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}
it's my solution
public class AudioTrackPlayer {
private String pathAudio;
private AudioTrack audioPlayer;
private Thread mThread;
private int bytesread = 0, ret = 0;
private int size;
private FileInputStream in = null;
private byte[] byteData = null;
private int count = 512 * 1024; // 512 kb
private boolean isPlay = true;
private boolean isLooping = false;
private static Handler mHandler;
public AudioTrackPlayer() {
}
public void prepare(String pathAudio){
this.pathAudio = pathAudio;
mHandler = new Handler();
}
public void play(){
stop();
isPlay = true;
bytesread = 0;
ret = 0;
if (pathAudio == null)
return;
audioPlayer = createAudioPlayer();
if (audioPlayer == null) return;
audioPlayer.play();
mThread = new Thread(new PlayerProcess());
mThread.start();
}
private final Runnable mLopingRunnable = new Runnable() {
#Override
public void run() {
play();
}
};
private AudioTrack createAudioPlayer(){
int intSize = android.media.AudioTrack.getMinBufferSize(16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 16000, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
if (audioTrack == null) {
Log.d("TCAudio", "audio track is not initialised ");
return null;
}
File file = null;
file = new File(pathAudio);
byteData = new byte[(int) count];
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
size = (int) file.length();
return audioTrack;
}
private class PlayerProcess implements Runnable{
#Override
public void run() {
while (bytesread < size && isPlay) {
if (Thread.currentThread().isInterrupted()) {
break;
}
try {
ret = in.read(byteData, 0, count);
} catch (IOException e) {
e.printStackTrace();
}
if (ret != -1) { // Write the byte array to the track
audioPlayer.write(byteData,0, ret);
bytesread += ret;
} else break;
}
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
if (audioPlayer!=null){
if (audioPlayer.getState()!=AudioTrack.PLAYSTATE_STOPPED){
audioPlayer.stop();
audioPlayer.release();
mThread = null;
}
}
if (isLooping && isPlay ) mHandler.postDelayed(mLopingRunnable,100);
}
}
public void setLooping(){
isLooping = !isLooping;
}
public void pause(){
}
public void stop(){
isPlay = false;
if (mThread != null) {
mThread.interrupt();
mThread = null;
}
if (audioPlayer != null) {
audioPlayer.stop();
audioPlayer.release();
audioPlayer = null;
}
}
public void reset(){
}
}
private void startRecording() {
if( bufferSize == AudioRecord.ERROR_BAD_VALUE)
Log.e( TAG, "Bad Value for \"bufferSize\", recording parameters are not supported by the hardware");
if( bufferSize == AudioRecord.ERROR )
Log.e( TAG, "Bad Value for \"bufferSize\", implementation was unable to query the hardware for its output properties");
Log.e( TAG, "\"bufferSize\"="+bufferSize);
// Initialize Audio Recorder.
recorder = new AudioRecord(AUDIO_SOURCE, RECORDER_SAMPLERATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, RECORDER_AUDIO_ENCODING, bufferSize);
// Starts recording from the AudioRecord instance.
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
Replace Recording Code...

Android AudioRecord.read returns an array of 0 items

What I'm trying to do:
I'm trying to write a program that reads audio from the android microphone (without recording) and captures some measure of how loud it is using a service.
For now I'm sending a pulse from my activity to my service to get a quick sound reading and checking the Logcat printout of the amplitude as my volumeter.
My problem:
The read method of AudioRecord returns 0.
What I've tried:
Recording full audio instead of using the NullOutputStream does not make a difference.
Some earlier versions randomly started working after some trivial changes like a logcat call was added and then stopped working later.
My thoughts:
I thought originally that maybe the microphone was being used by another application, but it still returns 0 even when this is the only notable service running.
my service:
import org.apache.commons.io.output.NullOutputStream;
public class RecordingService extends Service {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
public static final NullOutputStream NULL_OUTPUT_STREAM = new NullOutputStream();
public double amplitude = 0.0;
public String TAG = "TAG";
public void onCreate() {
super.onCreate();
}
public int onStartCommand(Intent intent, int flags, int startId){
initRecorder();
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording, intent);
mRecorder.stop();
mRecorder.release();
stopSelf();
return START_STICKY;
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
}
private void startBufferedWrite(final File file, Intent intent) {
Log.i(TAG, "WRITING");
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
Log.i(TAG, "running");
try {
output = new DataOutputStream(NULL_OUTPUT_STREAM);
Log.i(TAG, "outputset");
double sum = 0;
//problems!
Log.i(TAG, "mBufferlength= " + mBuffer.length);
int readSize = mRecorder.read(mBuffer, 0, mBuffer.length);
Log.i(TAG, "readSize1= " + readSize);
Log.i(TAG, mBuffer.toString());
//problems!
Log.i(TAG, "read");
for (int i = 0; i < readSize; i++) {
output.writeShort(mBuffer[i]);
sum += mBuffer[i] * mBuffer[i];
}
Log.i(TAG, "summed up");
if (readSize > 0) {
Log.i(TAG, "readSize2= "+readSize);
Log.i(TAG, "setting progress");
amplitude = sum / readSize;
Log.i(TAG, "amplitude= " + amplitude);
Log.i(TAG, "sqrt= " + Math.sqrt(amplitude));
}
else {
Log.i(TAG, "readsize <= 0");
}
} catch (IOException e) {
Log.e(TAG, e.getMessage());
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Log.e(TAG, e.getMessage());
} finally {
try {
output.close();
} catch (IOException e) {
Log.e(TAG, e.getMessage());
}
}
}
}
}
}).start();
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(), time.format("%Y%m%d%H%M%S") + "." + suffix);
}
public void onDestroy() {
super.onDestroy();
}
#Override
public IBinder onBind(Intent arg0) {
// TODO Auto-generated method stub
return null;
}
}
my activity:
public class RecordingActivity extends Activity {
private final String startRecordingLabel = "Start recording";
public String TAG = "TAG";
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
final Button button = (Button) findViewById(R.id.button);
button.setText(startRecordingLabel);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
Intent intent = new Intent(RecordingActivity.this, RecordingService.class);
Toast.makeText(RecordingActivity.this, "started", Toast.LENGTH_SHORT).show();
RecordingActivity.this.startService(intent);
}
});
}
#Override
public void onDestroy() {
super.onDestroy();
}
}
It's my first post, but I'm hoping that it's easy to follow. Thank you!

Categories

Resources