Recording .Wav with Android AudioRecorder - android

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.

I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps

PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.

You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}

I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.

First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Related

Android AudioRecord error starting status -38

I'm facing with this error trying to start a recording session with audio record, it seems to be that audio source is busy, but I couldn't find any doc or info about this error code. What I'm trying to do is
private class AudioRecordRunnable implements Runnable {
private final AudioRecord mAudioRecord;
private final AudioDataCallback mAudioDataCallback;
private byte[] mByteBuffer;
private final short[] mShortBuffer;
private final int mByteBufferSize;
private final int mShortBufferSize;
private final int mAudioFormat;
AudioRecordRunnable(int sampleRate, int channelConfig, int audioFormat, int byteBufferSize,
#NonNull AudioDataCallback audioDataCallback) {
mAudioFormat = audioFormat;
int minBufferSize =
AudioRecord.getMinBufferSize(sampleRate, channelConfig, mAudioFormat);
mByteBufferSize = byteBufferSize;
mShortBufferSize = mByteBufferSize / 2;
mShortBuffer = new short[mShortBufferSize];
bufferSize = Math.max(minBufferSize, byteBufferSize);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfig,
audioFormat, bufferSize);
mAudioDataCallback = audioDataCallback;
}
#Override
public void run() {
if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
try {
try {
mAudioRecord.startRecording();
} catch (IllegalStateException e) {
Log.w(TAG, "startRecording fail: " + e.getMessage());
mAudioDataCallback.onError();
return;
}
while (mIsRecording.get()) {
int ret;
if (mAudioFormat == AudioFormat.ENCODING_PCM_16BIT) {
ret = mAudioRecord.read(mShortBuffer, 0, mShortBufferSize);
if (ret > 0) {
mByteBuffer = new byte[mByteBufferSize];
mAudioDataCallback.onAudioData(
short2byte(mShortBuffer, ret, mByteBuffer), ret * 2);
} else {
onError(ret);
break;
}
} else {
ret = mAudioRecord.read(mByteBuffer, 0, mByteBufferSize);
if (ret > 0) {
mAudioDataCallback.onAudioData(mByteBuffer, ret);
} else {
onError(ret);
break;
}
}
}
}finally {
mAudioRecord.stop();
mAudioRecord.release();
}
}
}
private byte[] short2byte(short[] sData, int size, byte[] bData) {
if (size > sData.length || size * 2 > bData.length) {
Log.w(TAG, "short2byte: too long short data array");
}
for (int i = 0; i < size; i++) {
bData[i * 2] = (byte) (sData[i] & 0x00FF);
bData[(i * 2) + 1] = (byte) (sData[i] >> 8);
}
return bData;
}
private void onError(int errorCode) {
mAudioDataCallback.onError();
Log.w(TAG, "record fail: " + String.valueOf(errorCode));
if (errorCode == AudioRecord.ERROR_INVALID_OPERATION) {
Log.w(TAG, "record fail: ERROR_INVALID_OPERATION");
} else if (errorCode == AudioRecord.ERROR_BAD_VALUE) {
Log.w(TAG, "record fail: ERROR_BAD_VALUE");
}
}
}
I'm getting this message in the logcat
E/AudioRecord: start() status -38
But no exception is thrown and result of
ret = mAudioRecord.read(mShortBuffer, 0, mShortBufferSize);
is always 0
How can avoid this error?
Perhaps, another process uses your microphone.

I have a 600ms delay decoding a RTP/UDP h.264 stream in Android using MediaCodec with SurfaceView

I have tried everything I can think of to improve the latency in decoding an h.264 stream from an ethernet camera. The camera's manufacturer states that it has a minimum latency of 50ms using their hardware to display the stream, so I know its possible. I was also able to play the stream on my computer without any delays as well.
I am developing in Android, where I am receiving the UDP packets via DatagramSocket, parsing the RTP packet, assembling the NAL units, passing them to MediaCodec where its decoding the h.264 stream via hardware decoder, and finally displaying the stream on a SurfaceView.
The stream plays perfect without any problems, except that there is a delay of approximately 610ms between what is being recorded and what is displayed. This camera will be used on a vehicle, so a 610ms delay is unacceptable.
Any advice on how to improve on this latency will be greatly appreciated.
Here is my code I have adapted from various public sources:
// configuration constants
private static final int SURFACE_WIDTH = 640;
private static final int SURFACE_HEIGHT = 480;
public static final String CSD_0 = "csd-0";
public static final String CSD_1 = "csd-1";
public static final String DURATION_US = "durationUs";
public static boolean DEBUGGING = false;
private final SurfaceView surfaceView;
private PlayerThread playerThread;
private RTPClientThread rtpSessionThread;
private ByteBuffer inputBuffer;
private MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
private MediaCodec decoder;
private Log log = LogFactory.getLog(RtpMediaDecoder.class);
private final byte[] byteStreamStartCodePrefix = {(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01};
private boolean useByteStreamFormat = true;
private int lastSequenceNumber = 0;
private boolean lastSequenceNumberIsValid = false;
private boolean sequenceError = false;
private boolean currentFrameHasError = false;
private BufferedSample currentFrame;
private ExecutorService executorService;
private enum NalType {
FULL,
STAPA,
STAPB,
MTAP16,
MTAP24,
FUA,
FUB,
UNKNOWN
}
public RtpMediaDecoder(SurfaceView surfaceView) {
this.surfaceView = surfaceView;
surfaceView.getHolder().addCallback(this);
}
public void start() {
rtpStartClient();
}
public void restart() {
rtpStopClient();
try {
sleep(500);
} catch (InterruptedException e) {
}
rtpStartClient();
}
public void release() {
rtpStopClient();
if (decoder != null) {
try {
decoder.stop();
} catch (Exception e) {
log.error("Encountered error while trying to stop decoder", e);
}
decoder.release();
decoder = null;
}
}
private void rtpStartClient() {
rtpSessionThread = new RTPClientThread();
executorService = Executors.newFixedThreadPool(1);
rtpSessionThread.start();
}
private void rtpStopClient() {
rtpSessionThread.interrupt();
executorService.shutdown();
}
public BufferedSample getSampleBuffer() throws Exception {
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex < 0) {
throw new Exception("Didn't get a buffer from the MediaCodec");
}
inputBuffer = decoder.getInputBuffer(inIndex);
return new BufferedSample(inputBuffer, inIndex);
}
public void decodeFrame(BufferedSample decodeBuffer) throws Exception {
if (DEBUGGING) {
log.info(decodeBuffer.toString());
}
decoder.queueInputBuffer(decodeBuffer.getIndex(), 0,
decodeBuffer.getSampleSize(), 0, 0);
int outIndex = decoder.dequeueOutputBuffer(info, 0);
if (outIndex >= 0) {
// outputBuffer = decoder.getOutputBuffer(outIndex);
decoder.releaseOutputBuffer(outIndex,true);
}
// log.error("Completed frame decode: " + decodeBuffer.getRtpTimestamp() + " System Time: " + System.currentTimeMillis());
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
android.view.ViewGroup.LayoutParams layoutParams = surfaceView.getLayoutParams();
layoutParams.width = SURFACE_WIDTH; // required width
layoutParams.height = SURFACE_HEIGHT; // required height
surfaceView.setLayoutParams(layoutParams);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
log.debug("Starting player thread.");
if (playerThread == null) {
playerThread = new PlayerThread(holder.getSurface());
playerThread.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
public MediaFormat getMediaFormat() {
String mimeType = "video/avc";
int width = 640;
int height = 480;
MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
// from avconv, when streaming sample.h264.mp4 from disk
// byte[] header_sps = {0, 0, 0, 1, // header
// 0x67, 0x64, (byte) 0x00, 0x1e, (byte) 0xac, (byte) 0xd9, 0x40, (byte) 0xa0, 0x3d,
// (byte) 0xa1, 0x00, 0x00, (byte) 0x03, 0x00, 0x01, 0x00, 0x00, 0x03, 0x00, 0x3C, 0x0F, 0x16, 0x2D, (byte) 0x96}; // sps
// byte[] header_pps = {0, 0, 0, 1, // header
// 0x68, (byte) 0xeb, (byte) 0xec, (byte) 0xb2, 0x2C}; // pps
byte[] header_sps = {0x00, 0x00, 0x00, 0x01, 0x67, 0x42, 0x00, 0x0a, (byte) 0xf8, 0x41, (byte) 0xa2};
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte) 0xce, 0x38, (byte) 0x80};
format.setByteBuffer(CSD_0, ByteBuffer.wrap(header_sps));
format.setByteBuffer(CSD_1, ByteBuffer.wrap(header_pps));
//format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
format.setInteger(DURATION_US, 12600000);
return format;
}
private class PlayerThread extends Thread {
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
// MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", SURFACE_WIDTH, SURFACE_HEIGHT);
MediaFormat mediaFormat = getMediaFormat();
try {
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
decoder = MediaCodec.createDecoderByType(mime);
}
// decoder = MediaCodec.createByCodecName("OMX.Intel.hw_vd.h264");
// decoder = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
if (decoder == null) {
log.info("Can't find video info!");
return;
}
decoder.configure(mediaFormat, surface, null, 0);
// log.error("Decoder Started, System Time: " + System.currentTimeMillis());
decoder.start();
}
}
private class RTPClientThread extends Thread {
private DatagramSocket mDataGramSocket;
#Override
public void run() {
try {
sleep(200);
} catch (InterruptedException e) {
}
try {
mDataGramSocket = new DatagramSocket(50004);
mDataGramSocket.setReuseAddress(true);
mDataGramSocket.setSoTimeout(1000);
} catch (Exception e) {
e.printStackTrace();
}
byte[] recvPacket = {0};
int seqNum = 0, prevSeqNum = 0, length = 0;
byte[] message = new byte[1450];
DatagramPacket p = new DatagramPacket(message, message.length);
try {
while (!Thread.interrupted()) {
try {
mDataGramSocket.receive(p);
length = p.getLength();
recvPacket = new byte[length];
System.arraycopy(message,0,recvPacket,0,length);
seqNum = ((message[2] & 0xff) << 8) | (message[3] & 0xff);
if(seqNum != prevSeqNum) {
prevSeqNum = seqNum;
if (!executorService.isTerminated() && !executorService.isShutdown()) {
executorService.execute(new PacketRunnable(recvPacket, seqNum));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
log.error("We Stopped");
mDataGramSocket.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public class PacketRunnable implements Runnable {
private byte[] data;
private int localSeqNum;
private PacketRunnable(byte[] _data, int _seqNum) {
this.data = _data;
this.localSeqNum = _seqNum;
}
public void run() {
DataPacket packet = DataPacket.decode(data);
String debugging = "RTP data. ";
debugging += packet.getDataSize() + "b ";
debugging += "#" + packet.getSequenceNumber();
debugging += " " + packet.getTimestamp();
if (lastSequenceNumberIsValid && ((lastSequenceNumber + 1) != localSeqNum) && (localSeqNum != 0)) {
sequenceError = true;
log.error("Seq#: "+ localSeqNum + " PrevSeq#: " + lastSequenceNumber + " SKIPPED (" + (localSeqNum - lastSequenceNumber - 1) + ")");
debugging += " SKIPPED (" + (localSeqNum - lastSequenceNumber - 1) + ")";
} else {
sequenceError = false;
}
if (RtpMediaDecoder.DEBUGGING) {
log.error(debugging);
}
H264Packet h264Packet = new H264Packet(packet);
if (h264Packet.getNRIBits() > 0) {
switch (h264Packet.h264NalType) {
case FULL:
if (RtpMediaDecoder.DEBUGGING) {
log.info("NAL: full packet");
}
startFrame(packet.getTimestamp());
if (currentFrame != null) {
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
currentFrame.getBuffer().put(packet.getData().toByteBuffer());
sendFrame();
}
break;
case FUA:
if (h264Packet.isStart()) {
if (RtpMediaDecoder.DEBUGGING) {
log.info("FU-A start found. Starting new frame");
}
startFrame(packet.getTimestamp());
if (currentFrame != null) {
// Add stream header
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
byte reconstructedNalTypeOctet = h264Packet.getNalTypeOctet();
currentFrame.getBuffer().put(reconstructedNalTypeOctet);
}
}
if (currentFrame != null) {
if (packet.getTimestamp() != currentFrame.getRtpTimestamp()) {
if (RtpMediaDecoder.DEBUGGING) {
log.warn("Non-consecutive timestamp found");
}
currentFrameHasError = true;
}
if (sequenceError) {
currentFrameHasError = true;
}
// If we survived possible errors, collect data to the current frame buffer
if (!currentFrameHasError) {
currentFrame.getBuffer().put(packet.getData().toByteBuffer(2, packet.getDataSize() - 2));
} else {
if (RtpMediaDecoder.DEBUGGING) {
log.info("Dropping frame");
}
}
if (h264Packet.isEnd()) {
if (RtpMediaDecoder.DEBUGGING) {
log.info("FU-A end found. Sending frame!");
}
try {
sendFrame();
} catch (Throwable t) {
log.error("Error sending frame.", t);
}
}
}
break;
case STAPA:
if (RtpMediaDecoder.DEBUGGING) {
log.info("NAL: STAP-A");
}
ChannelBuffer buffer = packet.getData();
buffer.readByte();
while (buffer.readable()) {
short nalUnitSize = buffer.readShort();
byte[] nalUnitData = new byte[nalUnitSize];
buffer.readBytes(nalUnitData);
startFrame(packet.getTimestamp());
if (currentFrame != null) {
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
currentFrame.getBuffer().put(nalUnitData);
sendFrame();
}
}
break;
case STAPB:
case MTAP16:
case MTAP24:
case FUB:
case UNKNOWN:
log.warn("NAL: Unimplemented unit type: " + h264Packet.getNalType());
break;
}
} else {
log.warn("Useless packet received.");
}
lastSequenceNumber = localSeqNum;
lastSequenceNumberIsValid = true;
}
}
}
private void startFrame(long rtpTimestamp) {
// Reset error bit
currentFrameHasError = false;
// Deal with potentially non-returned buffer due to error
if (currentFrame != null) {
currentFrame.getBuffer().clear();
// Otherwise, get a fresh buffer from the codec
} else {
try {
// Get buffer from decoder
currentFrame = getSampleBuffer();
currentFrame.getBuffer().clear();
} catch (Exception e) {
currentFrameHasError = true;
e.printStackTrace();
}
}
if (!currentFrameHasError) {
// Set the sample timestamp
currentFrame.setRtpTimestamp(rtpTimestamp);
}
}
private void sendFrame() {
currentFrame.setSampleSize(currentFrame.getBuffer().position());
currentFrame.getBuffer().flip();
// log.error("Sending Frame: " + currentFrame.getRtpTimestamp() + " System Time: " + System.currentTimeMillis());
try {
decodeFrame(currentFrame);
} catch (Exception e) {
log.error("Exception sending frame to decoder", e);
}
// Always make currentFrame null to indicate we have returned the buffer to the codec
currentFrame = null;
}
private class H264Packet {
private final byte nalFBits;
private final byte nalNriBits;
private final byte nalType;
private boolean fuStart = false;
private boolean fuEnd = false;
private byte fuNalType;
private NalType h264NalType = NalType.UNKNOWN;
public H264Packet(DataPacket packet) {
// Parsing the RTP Packet - http://www.ietf.org/rfc/rfc3984.txt section 5.3
byte nalUnitOctet = packet.getData().getByte(0);
nalFBits = (byte) (nalUnitOctet & 0x80);
nalNriBits = (byte) (nalUnitOctet & 0x60);
nalType = (byte) (nalUnitOctet & 0x1F);
// If it's a single NAL packet then the entire payload is here
if (nalType > 0 && nalType < 24) {
h264NalType = NalType.FULL;
} else if (nalType == 24) {
h264NalType = NalType.STAPA;
} else if (nalType == 25) {
h264NalType = NalType.STAPB;
} else if (nalType == 26) {
h264NalType = NalType.MTAP16;
} else if (nalType == 27) {
h264NalType = NalType.MTAP24;
} else if (nalType == 28) {
h264NalType = NalType.FUA;
} else if (nalType == 29) {
h264NalType = NalType.FUB;
}
byte fuHeader = packet.getData().getByte(1);
fuStart = ((fuHeader & 0x80) != 0);
fuEnd = ((fuHeader & 0x40) != 0);
fuNalType = (byte) (fuHeader & 0x1F);
}
public byte getNalTypeOctet() {
// Excerpt from the spec:
/* "The NAL unit type octet of the fragmented
NAL unit is not included as such in the fragmentation unit payload,
but rather the information of the NAL unit type octet of the
fragmented NAL unit is conveyed in F and NRI fields of the FU
indicator octet of the fragmentation unit and in the type field of
the FU header" */
return (byte) (fuNalType | nalFBits | nalNriBits);
}
public boolean isStart() {
return fuStart;
}
public boolean isEnd() {
return fuEnd;
}
public byte getNalType() {
return nalType;
}
public byte getNRIBits() {
return nalNriBits;
}
}
}

How to record audio using AudioRecorder in Android

I want to capture audio from an Android device. My code below seems to successfully make a wav file on the SD card but it cannot be played. I tried to play it using different media players but none work. There is an issue in my code that is causing this problem.
code
public class MainActivity extends ActionBarActivity {
private static final String LOG_TAG = "AudioRecordTest";
static final int AUDIO_PORT = 2048;
static final int SAMPLE_RATE = 8000;
static final int SAMPLE_INTERVAL = 20; // milliseconds
static final int SAMPLE_SIZE = 2; // bytes per sample
static final int BUF_SIZE = SAMPLE_INTERVAL * SAMPLE_INTERVAL * SAMPLE_SIZE * 2;
private static int[] mSampleRates = new int[]{44100, 44056, 47250, 48000, 22050, 16000, 11025, 8000};
private Thread recordingThread = null;
private boolean isRecording = false;
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we
// use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private int bufferSize;
private AudioRecord recorder;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startRecording();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
private void startRecording() {
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
// convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) {
for (short channelConfig : new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) {
try {
Log.d(LOG_TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e(LOG_TAG, rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
private void writeAudioDataToFile() {
/*// Write the output audio in byte
short sData[] = new short[BufferElements2Rec];
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
// // stores the voice buffer
byte bData[] = short2byte(sData);
sendLiveAudio(bData);
}*/
String filePath = "/sdcard/test.wav";
short sData[] = new short[bufferSize / 2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, bufferSize / 2);
Log.d("eray", "Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, bufferSize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopRecording();
}
}
Try This.....
public class Audio_Record extends Activity {
private static final int RECORDER_SAMPLERATE = 8000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private Thread recordingThread = null;
private boolean isRecording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
}
int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
int BytesPerElement = 2; // 2 bytes in 16bit format
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, BufferElements2Rec * BytesPerElement);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
//convert short to byte
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.pcm";
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
recorder.read(sData, 0, BufferElements2Rec);
System.out.println("Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, BufferElements2Rec * BytesPerElement);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private void stopRecording() {
// stops the recording activity
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch (v.getId()) {
case R.id.btnStart: {
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop: {
enableButtons(false);
stopRecording();
break;
}
}
}
};
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
}
return super.onKeyDown(keyCode, event);
}
}

How convert audio raw to flac in Android

I recording audio with class audoiRecord. Now I want convert audio raw file to *flac format. I convert *raw file to wav next way:
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = sampleRate;
int channels = 2;
long byteRate = RECORDER_BPP * sampleRate * channels/8;
byte[] data_pcm = new byte[mAudioBufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
Log.i(TAG,"File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data_pcm) != -1){
out.write(data_pcm);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
This piece of code is responsible for the file header
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
I do not understand what should be the parameters of the *flac file
You need an encoder to convert pcm data to flac format. You cannot just change the header and expect the content to work as flac.
Android (at least till 4.1) does not include a FLAC encoder, although there is a decoder supported from 3.1 onwards (Source: http://developer.android.com/guide/appendix/media-formats.html).
I do not have direct experience, but have seen people use ffmpeg as a flac encoder. This project audioboo-android, which contains the native libFLAC/libFLAC++ encoder, looks interesting.
So, from Android 4.1 you can do so:
Initializing:
MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/flac");
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, NUM_CHANNELS);
String codecname = mcl.findEncoderForFormat(format);
Log.w(TAG, "Codec: "+codecname);
MediaCodec codec = null;
try
{
codec = MediaCodec.createByCodecName(codecname);
} catch (IOException e)
{
e.printStackTrace();
}
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
MediaFormat outputFormat = codec.getOutputFormat(); // option B
long usec = 1000000000L * FRAME_SIZE/SAMPLE_RATE;
MediaCodec.BufferInfo bufinfo = new MediaCodec.BufferInfo();
bufinfo.set(0, FRAME_SIZE * NUM_CHANNELS * 2, usec, 0);
codec.start();
byte[] inBuf = new byte[FRAME_SIZE * NUM_CHANNELS * 2];
byte[] encBuf = new byte[10240];
In the recorder loop:
int encoded = 0;
int inputBufferId = codec.dequeueInputBuffer(1000);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
inputBuffer.put(inBuf, 0, inBuf.length);
codec.queueInputBuffer(inputBufferId, 0, inBuf.length, usec, 0);
}
int outputBufferId = codec.dequeueOutputBuffer(bufinfo, 1000);
if (outputBufferId >= 0) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is identical to outputFormat
// outputBuffer is ready to be processed or rendered.
outputBuffer.rewind();
encoded = outputBuffer.remaining();
outputBuffer.get(encBuf, 0, encoded);
codec.releaseOutputBuffer(outputBufferId, false);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
outputFormat = codec.getOutputFormat(); // option B
}
if (encoded > 0)
{
// Process data in encBuf
}
Here's a pure java FLAC encoder: http://javaflacencoder.sourceforge.net
Some of the classes use the javax apis, but they can be safely deleted without affecting the main encoder classes.
Here's some sample code. The record object is of type AudioRecord
try {
// Path to write files to
String path = Environment.getExternalStoragePublicDirectory("/test").getAbsolutePath();
String fileName = name+".flac";
String externalStorage = path;
File file = new File(externalStorage + File.separator + fileName);
// if file doesnt exists, then create it
if (!file.exists()) {
file.createNewFile();
}
short sData[] = new short[BufferElements2Rec];
FileOutputStream os = null;
try {
os = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
FLACEncoder flacEncoder = new FLACEncoder();
StreamConfiguration streamConfiguration = new StreamConfiguration(1,StreamConfiguration.MIN_BLOCK_SIZE,StreamConfiguration.MAX_BLOCK_SIZE,44100,16);
FLACFileOutputStream flacOut = new FLACFileOutputStream(os);
flacEncoder.setStreamConfiguration(streamConfiguration);
flacEncoder.setOutputStream(flacOut);
flacEncoder.openFLACStream();
record.startRecording();
int totalSamples = 0;
while (isRecording) {
record.read(sData, 0, BufferElements2Rec);
totalSamples+=BufferElements2Rec;
flacEncoder.addSamples(short2int(sData),BufferElements2Rec);
flacEncoder.encodeSamples(BufferElements2Rec, false);
}
int available = flacEncoder.samplesAvailableToEncode();
while(flacEncoder.encodeSamples(available,true) < available) {
available = flacEncoder.samplesAvailableToEncode();
}
try {
flacOut.close();
} catch (IOException e) {
e.printStackTrace();
}
record.stop();
} catch(IOException ex) {
ex.printStackTrace();
}
record.release();
record = null;
}
For converting the short data into int data:
private int[] short2int(short[] sData) {
int length = sData.length;
int[] iData = new int[length];
for(int i=0;i<length;i++) {
iData[i] = sData[i];
}
return iData;
}
Based on https://github.com/nieldeokar/WhatsappAudioRecorder/blob/master/app/src/main/java/com/nieldeokar/whatsappaudiorecorder/recorder/AudioRecordThread.java
My solution for save recording in .m4a file while working speech recognition :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.HashSet;
import timber.log.Timber;
public class SpeechRecognizer {
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
protected static final String TAG = SpeechRecognizer.class.getSimpleName();
public int bufferSize;
public final Collection<RecognitionListener> listeners = new HashSet();
public final Handler mainHandler = new Handler(Looper.getMainLooper());
public final Recognizer recognizer;
private Thread recognizerThread;
public final AudioRecord recorder;
private SoundAmplitudeCallback soundAmplitudeCallback;
private File recordFile = null;
private boolean isRecordingToFileEnabled = false;
private boolean isRecordingToFilePrepared = false;
private boolean isContinueRecordingToFile = false;
public interface SoundAmplitudeCallback {
void onAmplitude(int amplitude);
}
public void setSoundAmplitudeCallback(SoundAmplitudeCallback callback) {
soundAmplitudeCallback = callback;
}
public SpeechRecognizer(Mabcd model) throws IOException {
this.recognizer = new Recognizer(model, SAMPLE_RATE);
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.recorder = createAudioRecorder(this.bufferSize);
if (this.recorder.getState() == 0) {
this.recorder.release();
throw new IOException("Failed to initialize recorder. Microphone might be already in use.");
}
}
public void addListener(RecognitionListener listener) {
synchronized (this.listeners) {
this.listeners.add(listener);
}
}
public void removeListener(RecognitionListener listener) {
synchronized (this.listeners) {
this.listeners.remove(listener);
}
}
public boolean startListening() {
if (this.recognizerThread != null) {
return false;
}
this.recognizerThread = new RecognizerThread(this);
this.recognizerThread.start();
return true;
}
public boolean startListening(int timeout) {
if (this.recognizerThread != null) {
return false;
}
this.recognizerThread = new RecognizerThread(timeout);
this.recognizerThread.start();
return true;
}
private boolean stopRecognizerThread() {
if (this.recognizerThread == null) {
return false;
}
try {
this.recognizerThread.interrupt();
this.recognizerThread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
this.recognizerThread = null;
return true;
}
public void startRecordToFile(File fileRecord) {
this.recordFile = fileRecord;
isRecordingToFileEnabled = true;
}
public void resumeRecordToFile(File fileRecord) {
this.recordFile = fileRecord;
isContinueRecordingToFile = true;
isRecordingToFileEnabled = true;
isRecordingToFilePrepared = false;
}
public void stopRecordToFile() {
isRecordingToFileEnabled = false;
isRecordingToFilePrepared = false;
isContinueRecordingToFile = false;
}
public boolean stop() {
boolean result = stopRecognizerThread();
if (result) {
this.mainHandler.post(new ResultEvent(this.recognizer.Rabcd(), true));
}
return result;
}
public boolean cancel() {
boolean result = stopRecognizerThread();
this.recognizer.Rabcd();
return result;
}
public void shutdown() {
this.recorder.release();
}
private final class RecognizerThread extends Thread {
private static final int NO_TIMEOUT = -1;
private int remainingSamples;
private int timeoutSamples;
VoiceRecorder voiceRecorder = null;
public RecognizerThread(int timeout) {
if (timeout != NO_TIMEOUT) {
this.timeoutSamples = (SpeechRecognizer.SAMPLE_RATE * timeout) / 1000;
} else {
this.timeoutSamples = NO_TIMEOUT;
}
this.remainingSamples = this.timeoutSamples;
}
public RecognizerThread(SpeechRecognizer speechRecognizer) {
this(NO_TIMEOUT);
}
public void run() {
voiceRecorder = new VoiceRecorder();
SpeechRecognizer.this.recorder.startRecording();
if (SpeechRecognizer.this.recorder.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
SpeechRecognizer.this.recorder.stop();
SpeechRecognizer.this.mainHandler.post(new OnErrorEvent(new IOException("Failed to start recording. Microphone might be already in use.")));
return;
}
byte[] buffer = new byte[SpeechRecognizer.this.bufferSize];
while (!interrupted() && (this.timeoutSamples == NO_TIMEOUT || this.remainingSamples > 0)) {
int nread = SpeechRecognizer.this.recorder.read(buffer, 0, buffer.length);
if (soundAmplitudeCallback != null) {
int max = 0;
for (short s : buffer) {
if (Math.abs(s) > max) {
max = Math.abs(s);
}
}
soundAmplitudeCallback.onAmplitude(max);
}
if (nread < 0) {
throw new RuntimeException("error reading audio buffer");
}
voiceRecorder.recording(nread, buffer);
if (SpeechRecognizer.this.recognizer.Aabcd(buffer, nread)) {
SpeechRecognizer.this.mainHandler.post(new ResultEvent(SpeechRecognizer.this.recognizer.Rabcd(), true));
} else {
SpeechRecognizer.this.mainHandler.post(new ResultEvent(SpeechRecognizer.this.recognizer.Pabcd(), false));
}
if (this.timeoutSamples != NO_TIMEOUT) {
this.remainingSamples -= nread;
}
}
voiceRecorder.shutdown();
SpeechRecognizer.this.recorder.stop();
SpeechRecognizer.this.mainHandler.removeCallbacksAndMessages((Object) null);
if (this.timeoutSamples != NO_TIMEOUT && this.remainingSamples <= 0) {
SpeechRecognizer.this.mainHandler.post(new TimeoutEvent());
}
}
}
/*
* Voice Recorder to file
* */
private class VoiceRecorder{
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
FileOutputStream fileOutputStream = null;
MediaCodec mediaCodec = null;
void recording(int nread, byte[] buffer){
/*step 1 prepare file*/
if (isRecordingToFileEnabled && !isRecordingToFilePrepared) {
//if we continue recording not create new file
if (recordFile == null) {
throw new IllegalArgumentException("Record file is null");
}
try {
fileOutputStream = new FileOutputStream(recordFile, isContinueRecordingToFile);
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
if (mediaCodec == null){
try {
mediaCodec = createMediaCodec(bufferSize);
mediaCodec.start();
Timber.d("mediaCodec.start()");
} catch (IOException e) {
e.printStackTrace();
}
}
isRecordingToFilePrepared = true;
}
/*prepare file*/
/*step 2 recording*/
if (isRecordingToFileEnabled && isRecordingToFilePrepared) {
try {
if (fileOutputStream != null){
boolean success = handleCodecInput(nread, buffer, mediaCodec, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, bufferInfo, fileOutputStream);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/*recording*/
/*step 3 finish recording and save to file*/
if (!isRecordingToFileEnabled && fileOutputStream != null) {
try {
VoiceRecorder.this.shutdown();
fileOutputStream.flush();
fileOutputStream.close();
fileOutputStream = null;
Timber.d("Finishing file");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/*finish recording and save to file*/
}
void shutdown(){
if (mediaCodec != null){
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
}
}
}
private abstract class RecognitionEvent implements Runnable {
public abstract void execute(RecognitionListener recognitionListener);
private RecognitionEvent() {
}
public void run() {
for (RecognitionListener listener : (RecognitionListener[]) SpeechRecognizer.this.listeners.toArray(new RecognitionListener[0])) {
execute(listener);
}
}
}
private class ResultEvent extends RecognitionEvent {
private final boolean finalResult;
protected final String hypothesis;
ResultEvent(String hypothesis2, boolean finalResult2) {
super();
this.hypothesis = hypothesis2;
this.finalResult = finalResult2;
}
public void execute(RecognitionListener listener) {
if (this.finalResult) {
listener.onResult(this.hypothesis);
} else {
listener.onPartialResult(this.hypothesis);
}
}
}
private class OnErrorEvent extends RecognitionEvent {
private final Exception exception;
OnErrorEvent(Exception exception2) {
super();
this.exception = exception2;
}
public void execute(RecognitionListener listener) {
listener.onError(this.exception);
}
}
private class TimeoutEvent extends RecognitionEvent {
private TimeoutEvent() {
super();
}
public void execute(RecognitionListener listener) {
listener.onTimeout();
}
}
private AudioRecord createAudioRecorder(int bufferSize) {
AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(recorder.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(recorder.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
return recorder;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Timber.tag(TAG).w(e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
private boolean handleCodecInput(int length,
byte[] buffer,
MediaCodec mediaCodec,
boolean running) {
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
Timber.tag(TAG).d( "length != BufferSize calling onRecordFailed");
// if (onRecorderFailedListener != null) {
// Log.d(TAG, "length != BufferSize calling onRecordFailed");
// onRecorderFailedListener.onRecorderFailed();
// }
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = mediaCodec.getInputBuffer(codecInputBufferIndex);
codecBuffer.clear();
codecBuffer.put(buffer);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream) throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = mediaCodec.getOutputBuffer(codecOutputBufferIndex);
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
}
java android speech recognition recording file pcm m4a

AudioRecord with Gain Adjustment not working on Samsung Device

I have written code for recording audio file using AudioRecord and while writing file on SD card i am making two version.
Version 1
Recorded file is saved on SD Card as it is.
Version 2
I am applying Gain feature on recorded file and saving on SD card.
This works awesome on Sony Ericson mobiles.Also audio volume is boost to great extent.
But i am struggling to make it work on Samsung Devices.
when i play recorded file it sound like Talking Tom :P
Initially i thought Samusung device did not like the combinations i have used to create
AudioRecorder.
So i used following approach in which i loop into available configuration and use best configuration to initialize AudioRecord.
public AudioRecord findAudioRecord() {
for (int rate: mSampleRates) {
for (short audioFormat: new short[] {
AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT
}) {
for (short channelConfig: new short[] {
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO
}) {
try {
Log.i("vipul", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(
AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
return null;
}
Below is Code that is working good on Sony mobiles.But Struggling to work on Samsung Devices.
public class EnvironmentRecorder extends Activity implements OnClickListener {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "MyRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private Button start, stop;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private static int[] mSampleRates = new int[] {
8000, 11025, 22050, 44100
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start = (Button) findViewById(R.id.start);
stop = (Button) findViewById(R.id.stop);
start.setOnClickListener(this);
stop.setOnClickListener(this);
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.start:
startRecord();
break;
case R.id.stop:
stopRecording();
break;
}
}
public EnvironmentRecorder() {
try {
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
} catch (Exception e) {
e.printStackTrace();
}
}
private String getFilename1() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "NotGained" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getFilename2() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + "Gained" + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists()) tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
public AudioRecord findAudioRecord() {
for (int rate: mSampleRates) {
for (short audioFormat: new short[] {
AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT
}) {
for (short channelConfig: new short[] {
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO
}) {
try {
Log.v("vipul", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(
AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
return null;
}
public void startRecord() {
/*
* recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
* RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING,
* bufferSize);
*/
recorder = findAudioRecord();
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile() {
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if (null != os) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void stopRecording() {
if (null != recorder) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
copyWaveFile(getTempFilename(), getFilename1(), getFilename2());
deleteTempFile();
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFileName1, String outFileName2) {
FileInputStream in = null;
FileOutputStream out1 = null, out2 = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[bufferSize];
try { in = new FileInputStream(inFilename);
out1 = new FileOutputStream(outFileName1);
out2 = new FileOutputStream(outFileName2);
totalAudioLen = in .getChannel().size();
totalDataLen = totalAudioLen + 36;
WriteWaveFileHeader(out1, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);
WriteWaveFileHeader(out2, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);
while ( in .read(data) != -1) {
out1.write(data); // Writing Non-Gained Data
float rGain = 2.5f;
for (int i = 0; i < data.length / 2; i++) {
short curSample = getShort(data[i * 2], data[i * 2 + 1]);
if (rGain != 1) {
// apply gain
curSample *= rGain;
// convert back from short sample that was "gained" to
// byte data
byte[] a = getByteFromShort(curSample);
// modify buffer to contain the gained sample
data[i * 2] = a[0];
data[i * 2 + 1] = a[1];
}
}
out2.write(data); // Writing Gained Data
}
out1.close();
out2.close(); in .close();
Toast.makeText(this, "Done!!", Toast.LENGTH_LONG).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private short getShort(byte argB1, byte argB2) {
return (short)((argB1 & 0xff) | (argB2 << 8));
}
private byte[] getByteFromShort(short x) {
// variant 1 - noise
byte[] a = new byte[2];
a[0] = (byte)(x & 0xff);
a[1] = (byte)((x >> 8) & 0xff);
// variant 2 - noise and almost broke my ears - very loud
// ByteBuffer buffer = ByteBuffer.allocate(2);
// buffer.putShort(x);
// buffer.flip();
return a;
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen, long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R';
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte)(totalDataLen & 0xff);
header[5] = (byte)((totalDataLen >> 8) & 0xff);
header[6] = (byte)((totalDataLen >> 16) & 0xff);
header[7] = (byte)((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f';
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16;
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1;
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte)(longSampleRate & 0xff);
header[25] = (byte)((longSampleRate >> 8) & 0xff);
header[26] = (byte)((longSampleRate >> 16) & 0xff);
header[27] = (byte)((longSampleRate >> 24) & 0xff);
header[28] = (byte)(byteRate & 0xff);
header[29] = (byte)((byteRate >> 8) & 0xff);
header[30] = (byte)((byteRate >> 16) & 0xff);
header[31] = (byte)((byteRate >> 24) & 0xff);
header[32] = (byte)(2 * 16 / 8);
header[33] = 0;
header[34] = RECORDER_BPP;
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte)(totalAudioLen & 0xff);
header[41] = (byte)((totalAudioLen >> 8) & 0xff);
header[42] = (byte)((totalAudioLen >> 16) & 0xff);
header[43] = (byte)((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
}
I would like to know if i need to add any extra loc to make my AudioRecord comfortable with Samsung Devices.
We're also struggling with audio recording on some Samsung Android Devices. Unfortunately it seems to be very broken, as even different revisions of the same phone model are behaving differently with the same codebase.
Here are my current findings, hoping you find something useful:
1. Broken Initialization:
Unfortunately, the strategy you are using to query for valid recording configurations will fail at least on Samsung Galaxy Young and Ace models running Android 2.3
The problem is that some invalid AudioRecord configurations instead of simply failing, will completely brick the audio capture subsystem if tried. You'll need to reset the phone to recover from this state.
2. Inconsistent Sampling-Rate support along revisions of same phone model
On an older Galaxy Ace Phone, recording # 11025Hz, 16-bit mono will succeed. On newer Ace revisions, this AudioRecord configuration will be accepted as valid, but the resulting recording will be distorted, with a "chipmunk" effect. A very popular guitar tuner app that has hardcoded this sampling rate is failing to give proper tuning readings on these phones precisely because of this problem!
3. Extremely low volume audio capture on some configurations.
In Galaxy Young and Galaxy Ace, recording from the mic or default audio source # 44,100Hz (the supposedly canonical rate where everything should work fine) produces an undistorted, but extremely low-volume recording. I haven't found yet a way to fix this other than software amplification (which is the equivalent of magnifying a very low res image, with the consecuent "jageddnes" of the result).
4. Failure to support the canonical 44,100Hz sampling rate on every audio capture source.
In Galaxy Young and Galaxy Ace, recording from the Camcorder source fails # 44,100Hz. (again, the configuration will be accepted as valid) producing complete garbage. However, recording # 8,000Hz, 16,000Hz and 48,000Hz works fine and produces a recording with very acceptable volume levels. What is frustrating is that according to the Android documentation, 44,100Hz is a sampling rate all devices SHOULD support.
5. OpenSL does not fix any of the problems reported.
Working with the NDK and OpenSL produces the same described results. It seems that the AudioRecorder class is simply wrapping calls to OpenSL, and the problem is either hardware based, or buried at a lower-level tier in the kernel code.
This situation is very unfortunately indeed, as these models are becoming very popular - at least in Mexico.
Good luck - and please report if you had better luck working with these phones.
=)
Audio gain conrol To increase the amplitude of the audio u need to calculate the gain factor and multiply calculated gain factor with every sample captured. The following code does that. P.S. Ignore the unrelated code
public class MainActivity extends Activity {
public static final int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private final String startRecordingLabel = "Start recording";
private final String stopRecordingLabel = "Stop recording";
private boolean mIsRecording = false;
private ProgressBar mProgressBar;
float iGain = 1.0f;
CheckBox gain;
protected int bitsPerSamples = 16;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_main);
initRecorder();
Button bluetooth = (Button)findViewById(R.id.blue);
gain = (CheckBox) findViewById(R.id.checkBox1);
mProgressBar = (ProgressBar) findViewById(R.id.progressBar);
final Button button = (Button) findViewById(R.id.start);
button.setText(startRecordingLabel);
bluetooth.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
Intent i = new Intent("");
}
});
gain.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (gain.isChecked()) {
iGain = 5.0f;
} else {
iGain = 2.0f;
}
}
});
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(final View v) {
if (!mIsRecording) {
button.setText(stopRecordingLabel);
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
} else {
button.setText(startRecordingLabel);
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
try {
rawToWave(mRecording, waveFile);
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
Toast.makeText(MainActivity.this,
"Recorded to " + waveFile.getName(),
Toast.LENGTH_SHORT).show();
}
}
});
}
#Override
public void onDestroy() {
mRecorder.release();
super.onDestroy();
}
private void initRecorder() {
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
bufferSize);
}
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0,
mBuffer.length);
final int bytesPerSample = bitsPerSamples / 8;
final int emptySpace = 64 - bitsPerSamples;
int byteIndex = 0;
int byteIndex2 = 0;
int temp = 0;
int mLeftTemp = 0;
int mRightTemp = 0;
int a = 0;
int x = 0;
for (int frameIndex = 0; frameIndex < readSize; frameIndex++) {
for (int c = 0; c < 1; c++) {
if (iGain != 1) {
long accumulator = 0;
for (int b = 0; b < bytesPerSample; b++) {
accumulator += ((long) (mBuffer[byteIndex++] & 0xFF)) << (b * 8 + emptySpace);
}
double sample = ((double) accumulator / (double) Long.MAX_VALUE);
sample *= iGain;
int intValue = (int) ((double) sample * (double) Integer.MAX_VALUE);
for (int i = 0; i < bytesPerSample; i++) {
mBuffer[i + byteIndex2] = (byte) (intValue >>> ((i + 2) * 8) & 0xff);
}
byteIndex2 += bytesPerSample;
}
}// end for(channel)
// mBuffer[frameIndex] *=iGain;
if (mBuffer[frameIndex] > 32765) {
mBuffer[frameIndex] = 32767;
} else if (mBuffer[frameIndex] < -32767) {
mBuffer[frameIndex] = -32767;
}
output.writeShort(mBuffer[frameIndex]);
sum += mBuffer[frameIndex] * mBuffer[frameIndex];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
mProgressBar.setProgress((int) Math.sqrt(amplitude));
}
}
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
mProgressBar.setProgress(0);
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
} finally {
try {
output.close();
} catch (IOException e) {
Toast.makeText(MainActivity.this, e.getMessage(),
Toast.LENGTH_SHORT).show();
}
}
}
}
}
}).start();
}
private void rawToWave(final File rawFile, final File waveFile)
throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
// see http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN)
.asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
// Apply Gain
/*
* s *= iGain; if(s>32767) { s=32767; } else if(s<-32768) {
* s=-32768; }
*/
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
}
}
}
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
return new File(Environment.getExternalStorageDirectory(),
time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value)
throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value)
throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
}

Categories

Resources