Stream G711 ulaw on android with AudioTrack - android

I am trying to stream live audio from an Axis network security camera over a Multipart HTTP stream that is encoded in g711 ulaw 8 khz, 8 bit samples on an Android phone. It seems like this should be pretty straight forward, and this is the basis of my code. I reused some streaming code I had that grabbed JPEG frames from a MJPEG stream, and now it grabs 512 byte blocks of audio data and hands it down to the AudioTrack. The audio sounds all garbled and distorted though, am I missing something obvious?
#Override
public void onResume() {
super.onResume();
int bufferSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT, bufferSize, AudioTrack.MODE_STREAM);
mAudioTrack.play();
thread.start();
}
class StreamThread extends Thread {
public boolean running = true;
public void run() {
try {
MjpegStreamer streamer = MjpegStreamer.read("/axis-cgi/audio/receive.cgi?httptype=multipart");
while(running) {
byte[] buf = streamer.readMjpegFrame();
if(buf != null && mAudioTrack != null) {
mAudioTrack.write(buf, 0, buf.length);
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

Related

How to write Short[] to wav output file in Android?

I am trying to write Short[] to wav audio file using file output stream but the file only contains scratch sound.
The reason i am using short[] rather than byte[] is because i am trying to use an external library which provides Voice Activity Detection . I did add wav header provided in Android Audio Record to wav and i tried to convert Short[] to byte[] using Converting Short array from Audio Record to Byte array without degrading audio quality? but none of the above links were able to help me.
Here is my code:
private class ProcessVoice implements Runnable {
#Override
public void run() {
File fl = new File(filePath, AUDIO_RECORDING_FILE_NAME);
try {
os = new BufferedOutputStream(new FileOutputStream(fl));
} catch (FileNotFoundException e) {
Log.w(TAG, "File not found for recording ");
}
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
while (!Thread.interrupted() && isListening && audioRecord != null) {
short[] buffer = new short[vad.getConfig().getFrameSize().getValue() * getNumberOfChannels() * 2];
audioRecord.read(buffer, 0, buffer.length);
isSpeechDetected(buffer);
}
}
private void isSpeechDetected(final short[] buffer) {
vad.isContinuousSpeech(buffer, new VadListener() {
#Override
public void onSpeechDetected() {
callback.onSpeechDetected();
bytes2 = new byte[buffer.length * 2];
ByteBuffer.wrap(bytes2).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(buffer);
//Log.w(TAG, String.valueOf(buffer));
try {
// // writes the data to file from buffer
// // stores the voice buffer
os.write(header, 0, 44);
working = true;
os.write(bytes2, 0, bytes2.length);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void onNoiseDetected() {
callback.onNoiseDetected();
if(working == true){
working = false;
try {
doneRec();
} catch (IOException e) {
e.printStackTrace();
}
}
//Log.w(TAG, String.valueOf(bytes2));
}
});
}
}

Cracking noise when streaming audio in Android

I am trying to create an app to stream audio from a local file in one device, to a different device using the Nearby communications API.
The problem is that I manage to stream the audio, but the only thing I can hear on the remote device is some sort of non-sense cracking noise.
What I´ve read so far is that I need to adjust the value in the minBufferSize I´m using and the value in the sampleRate, but I´ve been trying this and I´haven´t achieved much.
This is my code to send the byte chunks:
AudioTrack speaker;
//Audio Configuration.
private int sampleRate = 16000; //How much will be ideal?
private int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
minBufSize=2048;
speaker = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, 10*minBufSize, AudioTrack.MODE_STREAM);
}
final InputStream file;
final byte [] arrayStream = new byte [minBufSize];
try {
file= new FileInputStream (filepath);
bytesRead = file.read(arrayStream);
while (bytesRead!=-1) {
new Thread(new Runnable() {
public void run() {
sendMessage(arrayStream);
}
}).start();
bytesRead = file.read(arrayStream);
}
Toast.makeText(this, "Mensaje totalmente completado",Toast.LENGTH_SHORT).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
private void sendMessage(byte [] payload) {
Nearby.Connections.sendReliableMessage(mGoogleApiClient, mOtherEndpointId, payload);
//mMessageText.setText(null);
}
And this is the code to receive and playback the message on the remote device:
#Override
public void onMessageReceived(String endpointId, byte[] payload, boolean isReliable) {
// A message has been received from a remote endpoint.
Toast.makeText(this,"Mensaje recibido",Toast.LENGTH_SHORT).show();
debugLog("onMessageReceived:" + endpointId + ":" + new String(payload));
playMp3(payload);
}
private void playMp3(final byte[] mp3SoundByteArray) {
if (isPlaying==false){
speaker.play();
isPlaying=true;
}else{
//sending data to the Audiotrack obj i.e. speaker
speaker.write(mp3SoundByteArray, 0, minBufSize);
Log.d("VR", "Writing buffer content to speaker");
}
}
Can anyone help me with this??
Thanks!
You need to buffer some audio on the client before trying to play it. Most likely you get some data and play it and the next chunk of data has not arrived in time.
See this post and this post about buffering data and streaming audio.

Getting only white noise when playing sound by Audio Track Class

I want to play a mp3 with mono and stereo effect. Currently i am working on play stereo effect I have read all the documents, but i am getting only white noise.
my code is:
public class AudioTest extends Activity
{ byte[] b;
public void onCreate(Bundle savedInstanceState)
{
AndroidAudioDevice device = new AndroidAudioDevice( );
super.onCreate(savedInstanceState);
File f=new File("/sdcard/lepord.mp3");
try {
FileInputStream in=new FileInputStream(f);
int size=in.available();
b=new byte[size];
in.read(b);
in.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
while( true )
{
device.writeSamples(b);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
and my AndroidAudioDevice class is:
public class AndroidAudioDevice
{
AudioTrack track;
byte[] buffer = new byte[158616];
#SuppressWarnings("deprecation")
public AndroidAudioDevice( )
{
int minSize =AudioTrack.getMinBufferSize( 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT );
track = new AudioTrack( AudioManager.STREAM_MUSIC, 44100,
AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT,
158616, AudioTrack.MODE_STREAM);
Log.e(""," sizewe are using for track buffer is 158616");
track.setStereoVolume(.6f,.6f);
track.play();
}
public void writeSamples(byte[] b) {
// TODO Auto-generated method stub
Log.e("","bytes to be write in track is "+b.length );
fillBuffer(b);
track.write( buffer, 0, b.length );
}
private void fillBuffer( byte[] samples )
{
Log.e("","track buffer length="+buffer.length+" samle length"+samples.length );
if( buffer.length < samples.length )
buffer = new byte[samples.length];
for( int i = 0; i < samples.length; i++ )
buffer[i] = (byte)(samples[i] * Byte.MAX_VALUE);;
}
}
first i am not getting any sound rather than white noise, i just want to play a sound by AudioTrack and then i will work on mono and stereo sound effect
please help me
Thanks in advance.*

Android record and playback

I am currently trying to build an amplifier for the Android. The goal is to record and playback what is being recorded simultaneously. I created a thread that would take care of this. However, the sound comes out choppy. Here is what I tried.
private class RecordAndPlay extends Thread{
int bufferSize;
AudioRecord aRecord;
short[] buffer;
public RecordAndPlay() {
bufferSize = AudioRecord.getMinBufferSize(22050, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
buffer = new short[bufferSize];
}
#Override
public void run() {
aRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 22050, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
try {
aRecord.startRecording();
} catch (Exception e) {
}
int bufferedResult = aRecord.read(buffer,0,bufferSize);
final AudioTrack aTrack = new AudioTrack(AudioManager.STREAM_MUSIC, samplingRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferedResult, AudioTrack.MODE_STREAM);
aTrack.setNotificationMarkerPosition(bufferedResult);
aTrack.setPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener() {
#Override
public void onPeriodicNotification(AudioTrack track) {
// TODO Auto-generated method stub
}
#Override
public void onMarkerReached(AudioTrack track) {
Log.d("Marker reached", "...");
aTrack.release();
aRecord.release();
run();
}
});
aTrack.play();
aTrack.write(buffer, 0, buffer.length);
}
public void cancel(){
aRecord.stop();
aRecord.release();
}
}
Your playback is choppy because the AudioTrack is getting starved and not getting data smoothly. In your code you are recursively calling run and creating a new AudioTrack per marker. Instead, instantiate AudioRecord and AudioTrack only once and just handle their events. Also, to help smooth out playback you should probably start recording slightly before playback and maintain a queue of the recorded buffers. You can then manage passing these buffers to the AudioTrack and make sure there is always a new buffer to submit on each marker event.

Recording with AudioRecord on Android speeds up the audio?

I am using AudioRecord to record raw audio for processing.
The audio records entirely without any noise but when the raw PCM data generated is played back, it plays as if it has been speeded up a lot (upto about twice as much).
I am viewing and playing the PCM data in Audacity. I am using actual phone (Samsung Galaxy S5670) for testing.
The recording is done at 44100 Hz, 16 bit. Any idea what might cause this?
Following is the recording code:
public class TestApp extends Activity
{
File file;
OutputStream os;
BufferedOutputStream bos;
AudioRecord recorder;
int iAudioBufferSize;
boolean bRecording;
int iBytesRead;
Thread recordThread = new Thread(){
#Override
public void run()
{
byte[] buffer = new byte[iAudioBufferSize];
int iBufferReadResult;
iBytesRead = 0;
while(!interrupted())
{
iBufferReadResult = recorder.read(buffer, 0, iAudioBufferSize);
// Android is reading less number of bytes than requested.
if(iAudioBufferSize > iBufferReadResult)
{
iBufferReadResult = iBufferReadResult +
recorder.read(buffer, iBufferReadResult - 1, iAudioBufferSize - iBufferReadResult);
}
iBytesRead = iBytesRead + iBufferReadResult;
for (int i = 0; i < iBufferReadResult; i++)
{
try
{
bos.write(buffer[i]);
} catch (IOException e)
{
e.printStackTrace();
}
}
}
}
};
#Override
public void onCreate(Bundle savedInstanceState)
{
// File Creation and UI init stuff etc.
bRecording = false;
bPlaying = false;
int iSampleRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
iAudioBufferSize = AudioRecord.getMinBufferSize(iSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
recorder = new AudioRecord(AudioSource.MIC, iSampleRate, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, iAudioBufferSize);
bt_Record.setOnClickListener(new OnClickListener()
{
#Override
public void onClick(View v)
{
if (!bRecording)
{
try
{
recorder.startRecording();
bRecording = true;
recordThread.start();
}
catch(Exception e)
{
tv_Error.setText(e.getLocalizedMessage());
}
}
else
{
recorder.stop();
bRecording = false;
recordThread.interrupt();
try
{
bos.close();
}
catch(IOException e)
{
}
tv_Hello.setText("Recorded Sucessfully. Total " + iBytesRead + " bytes.");
}
}
});
}
}
RESOLVED : I posted this after struggling with it for 1-2 days. But, ironically, I found the solution soon after posting. The buffered output stream write was taking too much time in the for loop, so the stream was skipping samples. changed it to block write, removing the for loop. Works perfectly.
The audio skipping was caused by the delay in writing to buffer.
the solution is to just replace this FOR loop:
for (int i = 0; i < iBufferReadResult; i++)
{
try
{
bos.write(buffer[i]);
} catch (IOException e)
{
e.printStackTrace();
}
}
by a single write, like so:
bos.write(buffer, 0, iBufferReadResult);
I had used the code from a book which worked, I guess, for lower sample rates and buffer updates.

Categories

Resources