Subtle 'ticks' noise in WAVE files with AudioRecord - android

My app saves WAVE MONO files at 44100 and 16BIT PCM captured with AudioRecord using a RODE VideoMicro. Everything works just fine, except for the fact that I can see some very subtle "ticks" in the WAVE files uniformly spaced by 0.135 seconds. You can hear the ticks if your volume is very loud, or you can see them clearly using Audacity. Here is a sample WAVE file for you to analyze, and here is a screenshot of Audacity with 4 ticks in the soundfile:
Any idea what is happening?
I am not sure where those ticks come from, but I suspect that there is something strange in my code, since the tics are so uniformly spaced. But of course it could be either the phone, or the external microphone, or the interaction between the two. The BufferSize is 3528 and is retrieved with
static int BufferSize = AudioRecord.getMinBufferSize(SampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
This is the relevant part of my code:
...
recorder = new AudioRecord((denoise.isChecked()) ? MediaRecorder.AudioSource.VOICE_RECOGNITION : MediaRecorder.AudioSource.MIC,
SampleRate, AudioFormat.CHANNEL_IN_MONO, AudioEncoding, 2 * BufferSize); // 2 bytes in 16bit format
try {
recordingThread = new Thread(this::writeAudioDataToPCMFile, "AudioRecorder Thread");
recordingThread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void writeAudioDataToPCMFile() {
String auxfile = AveActivity.REC_DIR + "/aux" + new Random().nextInt() + ".pcm";
short[] sData = new short[BufferSize];
int product;
int i, readSize;
int tooloud;
int tooloudlim = BufferSize / 50; // 2% clip
long mean;
FileOutputStream os;
try {
os = new FileOutputStream(auxfile);
} catch (FileNotFoundException e) {
runOnUiThread(() -> textsound.setText(r.getString(R.string.errstart)));
e.printStackTrace();
return;
}
waveformLastList = new ArrayList<>();
//int j=0;
while (isRecording) {
readSize = recorder.read(sData, 0, BufferSize);
tooloud = 0;
mean = 0;
for (i = 0; i < readSize; ++i) {
product = (int) (sData[i] * gain);
if (Math.abs(product) <= Short.MAX_VALUE) {
sData[i] = (short) product;
} else {
// Audio clipping!
sData[i] = (short) (Integer.signum(product) * Short.MAX_VALUE);
++tooloud;
}
mean += sData[i] * sData[i];
}
if (tooloud > tooloudlim)
runOnUiThread(this::tooLoudWarning);
try {
os.write(short2byte(sData), 0, 2 * BufferSize); // 2 bytes in 16bit format
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
CharSequence time = DateFormat.format("yyMMdd-kkmmss", new Date(System.currentTimeMillis()));
song = birdname + "_" + time + ((denoise.isChecked()) ? "_NR" : "") + ((db == 0) ? "" : "_db" + db) + "." + songFormat;
runOnUiThread(() -> textsound.setText(r.getString(R.string.saving, songFormat)));
PCMtoWAV(new File(auxfile), new File(AveActivity.REC_DIR, song));
} catch (IOException e) {
runOnUiThread(() -> textsound.setText(r.getString(R.string.errsaving, songFormat)));
e.printStackTrace();
}
runOnUiThread(() -> {
activatebtns();
micColor(Color.WHITE);
});
}
private void PCMtoWAV(File input, File output) throws IOException {
// Bien rapido: 1/10 sec para cada minuto de gravacion
byte[] data = new byte[2 * BufferSize];
try (FileOutputStream outStream = new FileOutputStream(output)) {
FileInputStream inStream = new FileInputStream(input);
// Write WAVE header
outStream.write(buildWAVHeader(input.length()));
// Write audio data
while (inStream.read(data) != -1)
outStream.write(data);
// Write Metadata to LIST....INFO chunk
outStream.write(buildWAVMeta());
inStream.close();
outStream.close();
input.delete();
}
}
EDIT:
Following Uriel advice, I implemented a BlockingQueue like this:
public void RecordSong() {
if (isRecording) {
isRecording = false;
recorder.stop();
recorder.release();
recordingThread.interrupt();
writingThread.interrupt();
...
} else {
recorder = new AudioRecord(...);
try {
LinkedBlockingDeque<Byte> queue = new LinkedBlockingDeque<>();
recorder.startRecording();
recordingThread = new Thread(() -> captureAudioData(queue), "AudioRecorder Capture Thread");
writingThread = new Thread(() -> writeAudioDataToPCMFile(queue), "AudioRecorder Writing Thread");
recordingThread.setPriority(Thread.MAX_PRIORITY);
writingThread.setPriority(Thread.MAX_PRIORITY);
recordingThread.start();
writingThread.start();
...
}
}
private void captureAudioData(LinkedBlockingDeque<Byte> q) {
...
while (isRecording) {
capture,gain,etc...
try {
for (byte b : short2byte(sData)) q.put(b);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void writeAudioDataToPCMFile(LinkedBlockingDeque<Byte> q) {
String auxfile = AveActivity.REC_DIR + "/aux" + new Random().nextInt() + ".pcm";
FileOutputStream os;
try {
os = new FileOutputStream(auxfile);
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
}
try {
while (true) os.write(q.take());
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
try {
os.close();
} catch (IOException e1) {
e.printStackTrace();
}
}
}
EDIT 2:
I tried the new approach (locked in the bathroom at 2am!). Unfortunately the problem persists. No change. It seems it is either a phone issue or a mic issue. In fact, removing the mic seems to get rid of the ticks, so probably it is a mic issue.

What you are experiencing is probably a pop sound that is called "popcorn". It usually happen when there is gaps in the wave file. what I suggest you to do is 2 things.
set the thread priority to the highest.
don't read and write on the same thread. use one thread to read, put it inside a queue, and on the second thread write the queue data into a file.
This is what i do in my app and there are no pops. LMK if it helped.

Related

Android AudioRecorder read method issue on two mobile device 1)Redmi note 5 pro 2) samsung m10s

App start recording using android AudioRecord. Below is the code for initialized AudioRecord obejct and read data from it using its read method in while loop.In Redmi device loop count and file size is perfect every time but when this code execute on sumsung device loop count and file size are different.
Log.d(TAG, "writeAudioDataToFile: "+recordFile.length());
this is approximately 5 sec recording on Samsung device logcat
2020-08-14 13:27:47.459 31260-31805/com.example.voicerecord D/WavRecorder: writeAudioDataToFile: 71138592
this is approximately 5 sec recording on Redmi device logcat
2020-08-14 13:28:36.384 23780-23906/com.example.voicerecord D/WavRecorder: writeAudioDataToFile: 491008
Same code generate different file size.
file size = sampling Rate * sample size * time in sec * channel /8
in this code
sampling rate = 44100 Hz
sample size = 16 bit
time in sec = 5 sec
channel = 1 for mono
so redmi device generate right file but Samsung device not . what is
the solution for sumsung device.
#Override
public void prepare(String outputFile, int channelCount, int sampleRate, int bitrate) {
this.sampleRate = sampleRate;
this.framesPerVisInterval = (int)((VISUALIZATION_INTERVAL/1000f)/(1f/sampleRate));
Log.d(TAG, "prepare: channelcount"+channelCount);
this.channelCount = channelCount;
recordFile = new File(outputFile);
if (recordFile.exists() && recordFile.isFile()) {
int channel = channelCount == 1 ? AudioFormat.CHANNEL_IN_MONO :
AudioFormat.CHANNEL_IN_STEREO;
try {
bufferSize = AudioRecord.getMinBufferSize(sampleRate,
channel,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize == AudioRecord.ERROR || bufferSize == AudioRecord.ERROR_BAD_VALUE) {
bufferSize = AudioRecord.getMinBufferSize(sampleRate,
channel,
AudioFormat.ENCODING_PCM_16BIT);
}
Log.d(TAG, "prepare: "+bufferSize);
recorder = new AudioRecord(
MediaRecorder.AudioSource.MIC,
sampleRate,
channel,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
);
} catch (IllegalArgumentException e) {
Timber.e(e, "sampleRate = " + sampleRate + " channel = " + channel + " bufferSize = " + bufferSize);
if (recorder != null) {
recorder.release();
}
}
if (recorder != null && recorder.getState() == AudioRecord.STATE_INITIALIZED) {
if (recorderCallback != null) {
recorderCallback.onPrepareRecord();
}
} else {
Timber.e("prepare() failed");
if (recorderCallback != null) {
recorderCallback.onError(new RecorderInitException());
}
}
} else {
if (recorderCallback != null) {
recorderCallback.onError(new InvalidOutputFile());
}
}
}
#Override
public void startRecording() {
if (recorder != null && recorder.getState() == AudioRecord.STATE_INITIALIZED) {
if (isPaused) {
startRecordingTimer();
recorder.startRecording();
if (recorderCallback != null) {
recorderCallback.onStartRecord(recordFile);
}
isPaused = false;
} else {
try {
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
startRecordingTimer();
if (recorderCallback != null) {
recorderCallback.onStartRecord(recordFile);
}
} catch (IllegalStateException e) {
Timber.e(e, "startRecording() failed");
if (recorderCallback != null) {
recorderCallback.onError(new RecorderInitException());
}
}
}
}
}
private void writeAudioDataToFile() {
byte[] data = new byte[bufferSize];
FileOutputStream fos;
try {
if(isAppend){
fos = new FileOutputStream(recordFile,true);
}else{
fos = new FileOutputStream(recordFile);
}
} catch (FileNotFoundException e) {
Timber.e(e);
fos = null;
}
if (null != fos) {
int chunksCount = 0;
ByteBuffer shortBuffer = ByteBuffer.allocate(2);
shortBuffer.order(ByteOrder.LITTLE_ENDIAN);
//TODO: Disable loop while pause.
while (isRecording) {
if (!isPaused) {
if (endProgress == totalProgress) {
chunksCount += recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != chunksCount) {
long sum = 0;
for (int i = 0; i < bufferSize; i += 2) {
//TODO: find a better way to covert bytes into shorts.
shortBuffer.put(data[i]);
shortBuffer.put(data[i + 1]);
sum += Math.abs(shortBuffer.getShort(0));
shortBuffer.clear();
}
lastVal = (int) (sum / (bufferSize / 16));
try {
fos.write(data);
} catch (IOException e) {
Timber.e(e);
stopRecording();
}
}
}
}
}
try {
fos.close();
} catch (IOException e) {
Timber.e(e);
}
Log.d(TAG, "writeAudioDataToFile: "+recordFile.length());
setWaveFileHeader(recordFile, channelCount);
}
}

Android How to convert UDP recieving audio into wav file

I am trying to recieve an streaming audio from my app.
below is my code for recieving audio stream:
public class ClientListen implements Runnable {
private Context context;
public ClientListen(Context context) {
this.context = context;
}
#Override
public void run() {
boolean run = true;
try {
DatagramSocket udpSocket = new DatagramSocket(8765);
InetAddress serverAddr = null;
try {
serverAddr = InetAddress.getByName("127.0.0.1");
} catch (UnknownHostException e) {
e.printStackTrace();
}
while (run) {
try {
byte[] message = new byte[8000];
DatagramPacket packet = new DatagramPacket(message,message.length);
Log.i("UDP client: ", "about to wait to receive");
udpSocket.setSoTimeout(10000);
udpSocket.receive(packet);
String text = new String(packet.getData(), 0, packet.getLength());
Log.d("Received text", text);
} catch (IOException e) {
Log.e(" UDP clien", "error: ", e);
run = false;
udpSocket.close();
}
}
} catch (SocketException e) {
Log.e("Socket Open:", "Error:", e);
} catch (IOException e) {
e.printStackTrace();
}
}
}
In Received text logger i can see data as coming as
D/Received text: �������n�����������q�9�$�0�/�G�{�������s�����JiH&������d�����Z���������d�����E������C�+
��l��y�����������v���9����������u��f�j�������$�����K���������F��~R�2�����T��������������L�����!��G��8������s�;�"�,�R�����(��{�����*_��Z�������5������������\������x���j~������������/��=�����%�������
How can store this data into a wav file ?
What you see is the string representation of single udp packet after it was received and the received block has just being released.
It is a very small fraction of the sound you want to convert to wave.
Soon the while loop will continue and you will receive another packet and many more..
You need to collect all the packets in a buffer and then when you think it is ok - convert them to wave file.
Remember Wave is not just the sound bytes you get from udp but also 44 bytes of prefix you need to add to this file in order to be recognized by players.
Also if the udp is from another encoding format such as G711 - you must encode these bytes to PCM – if not you will hear heavy noise in the
Sound of the wave or the stream you play.
The buffer must be accurate. if it will be too big (many empty bytes in the end of the array) you will hear a sound of helicopter. if you know exactly what is the size of each packet then you can just write it to AudioTrack in order to play stream, or accumulate it and convert it to wave file when you will see fit. But If you are not sure about the size you can use this answer to get a buffer and then write the buffer to AudioTrack:
Android AudioRecord to Server over UDP Playback Issues.
they use Javax because it is very old answer but you just need to use AudioTrack instead in order to stream. It is not in this scope so I will just present the AudioTrack streaming replacements instead of Javax SourceDataLine:
final int SAMPLE_RATE = 8000; // Hertz
final int STREAM_TYPE = AudioManager.STREAM_NOTIFICATION;
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
int encodingFormat = AudioFormat.ENCODING_PCM_16BIT;
AudioTrack track = new AudioTrack(STREAM_TYPE, SAMPLE_RATE, channelConfig,
encodingFormat, BUF_SIZE, AudioTrack.MODE_STREAM);
track.play();
//.. then after receive UDP packets and the buffer is full:
if(track != null && packet != null){
track.write(audioStreamBuffer, 0, audioStreamBuffer.length);
}
You must not do this in the UI thread (I assume you know that).
In the code I will show you - I am getting udp of audio logs from PTT radio. It is encoded in G711 Ulaw . each packet is of 172 bytes exactly. First 12 bytes are for RTP and I need to offset (remove) them in order to eliminate small noises. rest 160 bytes are 20MS of sound.
I must decode the G711 Ulaw bytes to PCM shorts array. Then to take the short array and to make a wave file out of it. I am taking it after I see there was no packet receiving for more than one second (so I know the speech ended and the new block release is because of a new speech so I can take the old speech and make a wave file out of it). You can decide of a different buffer depends on what you are doing.
It works fine. After the decoding the sound of the wave is very good. If you have UDP with PCM so you don’t need to decode G711 - just skip this part.
Finally I want to mention I saw many old answers with code parts using javax.sound.sampled that seems great because it can convert easily an audio file or stream to wave format with AudioFileFormat
And also convert G711 to pcm with AudioFormat manipulations. But unfortunately it is not part of current java for android. We must count on android AudioTrack instead (and AudioRecord if we want to get the sound from the mic) but AudioTrack play only PCM and do not support G711 format – so when streaming G711 with AudioTrack the noise is terrible. We must decode it in our code before writing it to the track. Also we cannot convert to wave file using audioInputStream – I tried to do this easily with javax.sound.sampled jar file I added to my app but android keep giving me errors such as format not supported for wave, and mixer errors when try to stream – so I understood latest android cannot work with javax.sound.sampled and I went to look for law level decoding of G711 and law level creation of wave file out of the buffer of byte array received from the UDP packets .
A. in manifest add:
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET"/>
B. in the worker thread:
#Override
public void run(){
Log.i(TAG, "ClientListen thread started. Thread id: " + Thread.currentThread().getId());
try{
udpSocket = new DatagramSocket(port);
}catch(SocketException e){
e.printStackTrace();
}
byte[] messageBuf = new byte[BUF_SIZE];
Log.i(TAG, "waiting to receive packet in port: " + port);
if(udpSocket != null){
// here you can create new AudioTrack and play.track
byte pttSession[] = null;
while (running){
packet = new DatagramPacket(messageBuf, 0, messageBuf.length);
Log.d(TAG, "inside while running loop");
try{
Log.d(TAG, "receive block: waiting for user to press on
speaker(listening now inside udpSocket for DatagramPacket..)");
//get inside receive block until packet will arrive through this socket
long timeBeforeBlock = System.currentTimeMillis();
udpSocket.receive(packet);
Log.d(TAG, "client received a packet, receive block stopped)");
//this is for sending msg handler to the UI tread (you may skip this)
sendState("getting UDP packets...");
/* if previous block release happened more than one second ago - so this
packet release is for a new speech. so let’s copy the previous speech
to a wave file and empty the speech */
if(System.currentTimeMillis() - timeBeforeBlock > 1000 && pttSession != null){
convertBytesToFile(pttSession);
pttSession = null;
}
/* let’s take the packet that was released and start new speech or add it to the ongoing speech. */
byte[] slice = Arrays.copyOfRange(packet.getData(), 12, packet.getLength());
if(null == pttSession){
pttSession = slice;
}else{
pttSession = concat(pttSession, slice);
Log.d(TAG, "pttSession:" + Arrays.toString(pttSession));
}
}catch(IOException e){
Log.e(TAG, "UDP client IOException - error: ", e);
running = false;
}
}
// let’s take the latest speech and make a last wave file out of it.
if(pttSession != null){
convertBytesToFile(pttSession);
pttSession = null;
}
// if running == false then stop listen.
udpSocket.close();
handler.sendEmptyMessage(MainActivity.UdpClientHandler.UPDATE_END);
}else{
sendState("cannot bind datagram socket to the specified port:" + port);
}
}
private void convertBytesToFile(byte[] byteArray){
//decode the bytes from G711U to PCM (outcome is a short array)
G711UCodec decoder = new G711UCodec();
int size = byteArray.length;
short[] shortArray = new short[size];
decoder.decode(shortArray, byteArray, size, 0);
String newFileName = "speech_" + System.currentTimeMillis() + ".wav";
//convert short array to wav (add 44 prefix shorts) and save it as a .wav file
Wave wave = new Wave(SAMPLE_RATE, (short) 1, shortArray, 0, shortArray.length - 1);
if(wave.writeToFile(Environment.getExternalStoragePublicDirectory
(Environment.DIRECTORY_DOWNLOADS),newFileName)){
Log.d(TAG, "wave.writeToFile successful!");
sendState("create file: "+ newFileName);
}else{
Log.w(TAG, "wave.writeToFile failed");
}
}
C. encoding/decoding G711 U-Law class:
taken from: https://github.com/thinktube-kobe/airtube/blob/master/JavaLibrary/src/com/thinktube/audio/G711UCodec.java
/**
* G.711 codec. This class provides u-law conversion.
*/
public class G711UCodec {
// s00000001wxyz...s000wxyz
// s0000001wxyza...s001wxyz
// s000001wxyzab...s010wxyz
// s00001wxyzabc...s011wxyz
// s0001wxyzabcd...s100wxyz
// s001wxyzabcde...s101wxyz
// s01wxyzabcdef...s110wxyz
// s1wxyzabcdefg...s111wxyz
private static byte[] table13to8 = new byte[8192];
private static short[] table8to16 = new short[256];
static {
// b13 --> b8
for (int p = 1, q = 0; p <= 0x80; p <<= 1, q += 0x10) {
for (int i = 0, j = (p << 4) - 0x10; i < 16; i++, j += p) {
int v = (i + q) ^ 0x7F;
byte value1 = (byte) v;
byte value2 = (byte) (v + 128);
for (int m = j, e = j + p; m < e; m++) {
table13to8[m] = value1;
table13to8[8191 - m] = value2;
}
}
}
// b8 --> b16
for (int q = 0; q <= 7; q++) {
for (int i = 0, m = (q << 4); i < 16; i++, m++) {
int v = (((i + 0x10) << q) - 0x10) << 3;
table8to16[m ^ 0x7F] = (short) v;
table8to16[(m ^ 0x7F) + 128] = (short) (65536 - v);
}
}
}
public int decode(short[] b16, byte[] b8, int count, int offset) {
for (int i = 0, j = offset; i < count; i++, j++) {
b16[i] = table8to16[b8[j] & 0xFF];
}
return count;
}
public int encode(short[] b16, int count, byte[] b8, int offset) {
for (int i = 0, j = offset; i < count; i++, j++) {
b8[j] = table13to8[(b16[i] >> 4) & 0x1FFF];
}
return count;
}
public int getSampleCount(int frameSize) {
return frameSize;
}
}
D. Converting to wave file:
Taken from here:
https://github.com/google/oboe/issues/320
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class Wave
{
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT+ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4*SMALLINT)+(INTEGER*2)+LONGINT+ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE+LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE+ID_STRING_SIZE+WAV_FMT_SIZE+WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end)
{
nSamples=end-start+1;
cursor=0;
output=new byte[nSamples*SMALLINT+WAV_HDR_SIZE];
buildHeader(sampleRate,nChannels);
writeData(data,start,end);
}
/*
by Udi for using byteArray directly
*/
public Wave(int sampleRate, short nChannels, byte[] data, int start, int end)
{
int size = data.length;
short[] shortArray = new short[size];
for (int index = 0; index < size; index++){
shortArray[index] = (short) data[index];
}
nSamples=end-start+1;
cursor=0;
output=new byte[nSamples*SMALLINT+WAV_HDR_SIZE];
buildHeader(sampleRate,nChannels);
writeData(shortArray,start,end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels)
{
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels)
{
write("fmt ");
write(WAV_FMT_SIZE-WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short)(nChannels * SAMPLE_SIZE));
write((short)16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end)
{
write("data");
write(nSamples*SMALLINT);
for(int i=start; i<=end; write(data[i++]));
}
// ------------------------------------------------------------
private void write(byte b)
{
output[cursor++]=b;
}
// ------------------------------------------------------------
private void write(String id)
{
if(id.length()!=ID_STRING_SIZE){
}
else {
for(int i=0; i<ID_STRING_SIZE; ++i) write((byte)id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i)
{
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
// ------------------------------------------------------------
private void write(short i)
{
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
// ------------------------------------------------------------
public boolean writeToFile(File fileParent , String filename)
{
boolean ok=false;
try {
File path=new File(fileParent, filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok=true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok=false;
} catch (IOException e) {
ok=false;
e.printStackTrace();
}
return ok;
}
/**
* by Udi for test: write file with temp name so if you write many packets each packet will be written to a new file instead of deleting
* the previous file. (this is mainly for debug)
* #param fileParent
* #param filename
* #return
*/
public boolean writeToTmpFile(File fileParent , String filename)
{
boolean ok=false;
try {
File outputFile = File.createTempFile(filename, ".wav",fileParent);
FileOutputStream fileoutputstream = new FileOutputStream(outputFile);
fileoutputstream.write(output);
fileoutputstream.close();
ok=true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok=false;
} catch (IOException e) {
ok=false;
e.printStackTrace();
}
return ok;
}
}

I am trying to get code to work that decodes and displays a raw H264 tcp/ip video stream. Can someone diagnose what is wrong?

My goal is to play a raw H264 stream being fed through a tcp/ip port on an Android device (a Samsung S10) using the MediaCodec class. It isn't working, I just see a blank screen.
I have a few guesses to the issue: 1) Do I need to group together NAL units? Right now I feed each unit individually to the MediaCodec. 2) Do I need to make changes to the server? are there variants of H264 that the MediaCodec class cannot handle?
I was able to port FFmpeg to Android studio and got this solution to work. However, it is slow as it uses a software codec. I decided to use MediaCodec to try and use the hardware codec. The code below shows my effort. The codec is initialized in asynchronous mode. I have a separate thread to read and queue up the NAL frames from the tcp socket. Frames are stored in a buffer and if the buffer overflows, then some frames will be discarded. The onInputBufferAvailable codec feeds one NAL unit at a time to the MediaCodec class.
public void initializePlaybackCodec()
{
mWidth = 1536;
mHeight = 864;
MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
try {
codec = MediaCodec.createDecoderByType(MIME_TYPE);
} catch (IOException e) {
Log.e(TAG, "CODEC INIT: Failed to initialize media codec", e);
Toast.makeText(this, "Failed to initialize media codec",
Toast.LENGTH_LONG).show();
finish();
return;
}
Log.i(TAG,"HERE CODEC INITIALIZED");
final int videoQueueSize = 10;
final Semaphore mutex = new Semaphore(1);
final Semaphore queueData = new Semaphore(0);
final ArrayBlockingQueue<ByteBuffer> queue = new ArrayBlockingQueue<ByteBuffer>(videoQueueSize);
codec.setCallback(new MediaCodec.Callback() {
long reference_epoch = System.currentTimeMillis();
long current_epoch = reference_epoch;
byte[] buffer = new byte[blockSize];
int nextStart = 0;
#Override
public void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
current_epoch = System.currentTimeMillis();
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
android.media.MediaCodecInfo info = codec.getCodecInfo();
//Log.i(TAG,"CODEC CALLBACK: info "+info.getName()+" Encoder: "+info.isEncoder()+" ");
//String[] types = info.getSupportedTypes();
//for (int j = 0; j < types.length; j++) {
// Log.i(TAG,"CODEC CALLBACK: supportedType "+types[j]);
//}
// Read data from the Queue
ByteBuffer b = null;
Log.i(TAG,"CODEC CALLBACK: input");
try {
queueData.acquire();
} catch (InterruptedException e) {
Log.e(TAG, "CODEC CALLBACK: queueData acquire interrupted");
codec.stop();
finish();
return;
}
try {
mutex.acquire();
} catch (InterruptedException e) {
Log.e(TAG, "CODEC CALLBACK: mutex acquire interrupted");
codec.stop();
finish();
return;
}
try {
b = queue.take();
} catch (InterruptedException e) {
Log.e(TAG, "CODEC CALLBACK: take interrupted");
codec.stop();
finish();
return;
}
byte[] bb = b.array();
//Log.i(TAG,"CODEC CALLBACK: Contents being sent "+bb[4]/32+" "+bb[4]%32+" "+bb.length);
Log.i(TAG,"CODEC CALLBACK: Contents being sent "+Integer.toHexString(bb[0])+" "+Integer.toHexString(bb[1])+" "+Integer.toHexString(bb[2])+" "+Integer.toHexString(bb[3])+" "+Integer.toHexString(bb[4])+" ");
int ref_idc = bb[4]/32;
int unit_type = bb[4]%32;
//for (int i = 0; i < bb.length && i < 5; ++i) {
// Log.i(TAG, "CODEC CALLBACK: bb["+i+"]="+bb[i]);
//}
mutex.release();
// fill inputBuffer with valid data
//Log.i(TAG,"CODEC CALLBACK: put "+b.remaining()+" "+b.capacity());
inputBuffer.clear();
//Log.i(TAG,"CODEC CALLBACK: before put "+inputBuffer.remaining()+" "+b.position());
b.position(0);
inputBuffer.put(b);
//Log.i(TAG,"CODEC CALLBACK: after put "+inputBuffer.remaining());
//Log.i(TAG,"CODEC CALLBACK: queue "+(current_epoch-reference_epoch)*1000+" "+inputBuffer.capacity()+" "+inputBuffer.remaining());
codec.queueInputBuffer(inputBufferId,0, b.remaining(), (current_epoch-reference_epoch)*1000, 0);
}
#Override
public void onOutputBufferAvailable(MediaCodec mc, int outputBufferId,
MediaCodec.BufferInfo info) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is equivalent to mOutputFormat
// outputBuffer is ready to be processed or rendered.
Log.i(TAG,"CODEC CALLBACK: output");
codec.releaseOutputBuffer(outputBufferId, true);
Log.i(TAG,"CODEC CALLBACK: output done");
}
#Override
public void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
//mOutputFormat = format; // option B
Log.i(TAG,"CODEC CALLBACK: output format changed");
}
#Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Log.e(TAG,"CODEC CALLBACK: Media Codec Error");
}
});
codec.configure(decoderFormat, m_surface.getHolder().getSurface(), null, 0);
Thread thread = new Thread(){
public void run(){
Socket socket;
InputStream input;
try {
socket = new Socket(mServerAddr, Integer.parseInt(mServerPort));
input = socket.getInputStream();
} catch (IOException e) {
Log.e(TAG, "RLOOP: Failed to open video socket", e);
Toast.makeText(ARActivity.this, "Failed to open video socket",
Toast.LENGTH_LONG).show();
finish();
return;
}
Log.i(TAG,"RLOOP: HERE SOCKET OPENED");
System.out.println("Socket opened");
byte[] buffer = new byte[blockSize];
java.nio.ByteBuffer byteBuffer = java.nio.ByteBuffer.allocate(blockSize);
int nextStart = 0;
while (true) {
int size = 1;
try {
size = input.read(buffer,nextStart,blockSize-nextStart);
Log.i(TAG,"RLOOP: Read from video stream "+size+" bytes start="+nextStart);
Log.i(TAG, "RLOOP: First bytes "+buffer[nextStart]+" "+buffer[nextStart+1]+" "+
buffer[nextStart+2]+" "+buffer[nextStart+3]+" "+buffer[nextStart+4]);
if (size==0) {
Log.e(TAG, "RLOOP: Video stream finished");
Toast.makeText(ARActivity.this, "Video stream finished",
Toast.LENGTH_LONG).show();
codec.stop();
finish();
return;
}
int endPos = 2;
while (endPos > 0) {
endPos = -1;
int zeroCount = 0;
for (int i = nextStart; (i < size+nextStart && endPos < 1); ++i) {
//Log.i(TAG,"Zero count pos "+i+" "+zeroCount);
if (buffer[i]==0) {
++zeroCount;
} else if (buffer[i]==1 && zeroCount > 1) {
if (zeroCount > 3) {
zeroCount = 3;
}
endPos = i-zeroCount;
Log.i(TAG,"RLOOP: Found marker at pos "+(i-zeroCount));
zeroCount = 0;
} else {
zeroCount = 0;
}
}
Log.i(TAG,"RLOOP: State nextStart="+nextStart+" endPos="+endPos+" size="+size);
if (endPos < 0) {
if (size + nextStart == blockSize) {
Log.e(TAG, "RLOOP: Error reading video stream2");
//Toast.makeText(ARActivity.this, "Error reading video stream2",
// Toast.LENGTH_LONG).show();
//finish();
endPos = blockSize;
nextStart = 0;
Log.i(TAG, "RLOOP: BLOCK OVERFLOW " + endPos);
} else {
nextStart = size + nextStart;
}
} else if (endPos==0) {
Log.i(TAG, "RLOOP: BLOCK NOT COMPLETE " + endPos);
//nextStart = size+nextStart;
} else {
Log.i(TAG, "RLOOP: PROCESSING BLOCK " + endPos);
//Log.i(TAG,"BUFFER REMAINING "+byteBuffer.remaining());
//Log.i(TAG,"BUFFER POSITION "+byteBuffer.position());
//System.arraycopy(buffer, 4, buffer, 0, size + nextStart - 4);
//nextStart = nextStart - 4;
//if (nextStart < 0) {
// size = size + nextStart;
// nextStart = 0;
//}
//endPos = endPos-4;
byteBuffer = java.nio.ByteBuffer.allocate(endPos+3);
byteBuffer.put(buffer, 0, endPos);
//byteBuffer = java.nio.ByteBuffer.wrap(buffer, 0, endPos);
//byteBuffer.put(buffer,0, endPos);
Log.i(TAG, "RLOOP: BUFFER REMAINING2 " + byteBuffer.remaining());
Log.i(TAG, "RLOOP: BUFFER POSITION2 " + byteBuffer.position());
Log.i(TAG, "RLOOP: First send bytes " + buffer[0] + " " + buffer[1] + " " +
buffer[2] + " " + buffer[3] + " " + buffer[4]);
//byte[] bb = byteBuffer.array();
Log.i(TAG,"RLOOP: Contents being sent");
//for (int i = 0; i < bb.length && i < 10; ++i) {
// Log.i(TAG, "RLOOP: bb["+i+"]="+bb[i]);
//}
try {
mutex.acquire();
} catch (InterruptedException e) {
Log.e(TAG, "RLOOP: Mutex interrupted");
codec.stop();
finish();
return;
}
Log.i(TAG,"RLOOP: HERE1");
if (queue.size() == videoQueueSize) {
try {
queue.take();
} catch (InterruptedException e) {
Log.e(TAG, "RLOOP: queue.take interrupted 2");
codec.stop();
finish();
return;
}
Log.i(TAG,"RLOOP: HERE2");
try {
queueData.acquire();
} catch (InterruptedException e) {
Log.e(TAG, "RLOOP: queueData.acquire() interrupted 2");
codec.stop();
finish();
return;
}
}
Log.i(TAG,"RLOOP: HERE3");
try {
queue.put(byteBuffer);
} catch (InterruptedException e) {
Log.e(TAG, "RLOOP: queue put interrupted");
codec.stop();
finish();
return;
}
queueData.release();
mutex.release();
if (endPos < size+nextStart) {
System.arraycopy(buffer, endPos, buffer, 0, size + nextStart - endPos);
nextStart = nextStart - endPos;
if (nextStart < 0) {
size = size + nextStart;
nextStart = 0;
}
}
}
}
nextStart = nextStart + size;
} catch (IOException e) {
Log.e(TAG, "RLOOP: Error reading from video stream");
Toast.makeText(ARActivity.this, "Error reading from video stream",
Toast.LENGTH_LONG).show();
codec.stop();
finish();
return;
}
}
}
};
thread.start();
codec.start();
return;
}
My expected result is to see a video on the android device. My actual result is that the onOutputBufferAvailable function is never called.
I am including a sample debugging output to show some of the NAL units being sent to the MediaCodec class.
2019-06-19 12:22:38.229 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: input
2019-06-19 12:22:38.249 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: Contents being sent 0 0 0 1 61
2019-06-19 12:22:38.251 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: input
2019-06-19 12:22:38.266 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: Contents being sent 0 0 0 1 61
2019-06-19 12:22:38.268 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: input
2019-06-19 12:22:38.281 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: Contents being sent 0 0 0 1 61
2019-06-19 12:22:38.282 3325-3539/com.example.unrealar I/MediaCodec: setCodecState state : 0
2019-06-19 12:22:38.282 3325-3325/com.example.unrealar I/ARActivity: CODEC CALLBACK: input
i don't see you configuring the Codec. With that i mean sending SPS and PPS with flag BUFFER_FLAG_CODEC_CONFIG.
Such data [CSD] must be marked using the flag BUFFER_FLAG_CODEC_CONFIG in a call to queueInputBuffer
it's documented here.
There are many ways of transferring H264. The most common ones (at least to me i guess) are:
At the beginning of the stream and every time the encoding parameters change.
With every NALU. Every NALU carries it's own set of CSD. You only need to reconfigure if the values change.
SPS and PPS before each key frame and PPS before other slices. It is called AnnexB
As FFMPEG was able to decode the stream, i'd guess that these values are part of the stream.
So i guess you need to parse your H264 stream to determine the SPS and PPS and send a buffer with these values and the BUFFER_FLAG_CODEC_CONFIG to the decoder.
Or if you decide buffer some frames at the begining, before you start decoding, you could also put these values inside your MediaFormat as "csd-0" (SPS) and "csd-1" (PPS)
SPS start with the NALU sequence 0x00 0x00 0x00 0x01 0x67.
PPS start with the NALU sequence 0x00 0x00 0x00 0x01 0x68.

Using AudioRecord.read and always getting buffer with maximum amplitudes

I'm trying to record audio and detect silence to stop recording and write file. Below is the code snippet already available here: Android audio capture silence detection
public class RecordAudio extends AsyncTask<Void, Double, Void> {
#Override
protected Void doInBackground(Void... arg0) {
Log.w(TAG, "doInBackground");
try {
String filename = getTempFilename();
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[bufferSize];
audioRecord.startRecording();
while (started) {
int bufferReadResult = audioRecord.read(buffer, 0,bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != bufferReadResult){
//check signal
//put a threshold
int foundPeak = searchThreshold(buffer,threshold);
if (foundPeak >- 1) { //found signal
//record signal
byte[] byteBuffer =ShortToByte(buffer,bufferReadResult);
try {
os.write(byteBuffer);
} catch (IOException e) {
e.printStackTrace();
}
} else {//count the time
//don't save signal
Log.d(TAG, "Silence...");
}
} else {
Toast.makeText(getBaseContext(), "Error!!!!", Toast.LENGTH_SHORT).show();
}
}
audioRecord.stop();
//close file
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
} catch (Throwable t) {
t.printStackTrace();
Log.e("AudioRecord", "Recording Failed");
}
return null;
}
byte [] ShortToByte(short [] input, int elements) {
int short_index, byte_index;
int iterations = elements; //input.length;
byte [] buffer = new byte[iterations * 2];
short_index = byte_index = 0;
for(/*NOP*/; short_index != iterations; /*NOP*/)
{
buffer[byte_index] = (byte) (input[short_index] & 0x00FF);
buffer[byte_index + 1] = (byte) ((input[short_index] & 0xFF00) >> 8);
++short_index; byte_index += 2;
}
return buffer;
}
int searchThreshold(short[]arr,short thr){
int peakIndex;
int arrLen=arr.length;
for (peakIndex=0;peakIndex<arrLen;peakIndex++){
if ((arr[peakIndex]>=thr) || (arr[peakIndex]<=-thr)){
Log.d(TAG, String.valueOf(arr[peakIndex]) + " - " + String.valueOf(peakIndex));
return peakIndex;
}
}
return -1; //not found
}
The problem is I'm not able to detect silence in the signal. Every time I read the buffer I get values greater than 32000 or less than -32000 (which I interpreted as maximum possible amplitudes for the signal, since the buffer is made up of short integer values), even when there is silence, so the threshold that determines what is "noise" and what is silence is always reached, and the searchThreshold function never returns -1.
Did anyone experience the same? I'm using a Samsung S5 device for tests.
Thanks in advance
In fact, values near 32000 or -32000 in buffer represents silence, and the values tends to zero when signal is detected. So I inverted the conditions inside searchThreshold function as below and I achieved what I was looking for.
int searchThreshold(short[]arr,short thr_upper, short thr_lower){
int peakIndex;
int arrLen=arr.length;
for (peakIndex=0;peakIndex<arrLen;peakIndex++){
if ((arr[peakIndex] <= thr_upper) && (arr[peakIndex] >= thr_lower )){
return peakIndex;
}
}
return -1; //not found
}

Recording .Wav with Android AudioRecorder

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Categories

Resources