Get frequency wav audio using FFT and Complex class - android

It's been asked a lot, but I still stuck about implement FFT class on Android
I need to process my audio data using FFT...
I already read the almost same question here How can I get frequency data from PCM using FFT
and here How to get frequency from fft result?
and more questions but still find no answer even after I tried the answers given...
FFT Class I'm using:
http://www.cs.princeton.edu/introcs/97data/FFT.java
The complex class to go with it: http://introcs.cs.princeton.edu/java/97data/Complex.java.html
Here's my code
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.widget.Button;
public class Latihan extends Activity{
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
short[] audioData;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
Complex[] fftTempArray;
Complex[] fftArray;
int[] bufferData;
int bytesRecorded;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.p1);
setButtonHandlers();
enableButtons(false);
bufferSize = AudioRecord.getMinBufferSize
(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING)*3;
audioData = new short [bufferSize]; //short array that pcm data is put into.
}
private void setButtonHandlers() {
((Button)findViewById(R.id.btStart)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btStop)).setOnClickListener(btnClick);
}
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btStart,!isRecording);
enableButton(R.id.btStop,isRecording);
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV);
}
public void convert(){
}
public void calculate(){
Complex[] fftTempArray = new Complex[bufferSize];
for (int i=0; i<bufferSize; i++)
{
fftTempArray[i] = new Complex(audioData[i], 0);
}
Complex[] fftArray = FFT.fft(fftTempArray);
double[] micBufferData = new double[bufferSize];
final int bytesPerSample = 2;
final double amplification = 100.0;
for (int index = 0, floatIndex = 0; index < bytesRecorded - bytesPerSample + 1; index += bytesPerSample, floatIndex++) {
double sample = 0;
for (int b = 0; b < bytesPerSample; b++) {
int v = bufferData[index + b];
if (b < bytesPerSample - 1 || bytesPerSample == 1) {
v &= 0xFF;
}
sample += v << (b * 8);
}
double sample32 = amplification * (sample / 32768.0);
micBufferData[floatIndex] = sample32;
}
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
private void startRecording(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile(){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
if(null != os){
while(isRecording){
read = recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording(){
if(null != recorder){
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(),getFilename());
// deleteTempFile();
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
//another code
}
private View.OnClickListener btnClick = new View.OnClickListener() {
public void onClick(View v) {
switch(v.getId()){
case R.id.btStart:{
AppLog.logString("Start Recording");
enableButtons(true);
startRecording();
break;
}
case R.id.btStop:{
AppLog.logString("Stop Recording");
enableButtons(false);
stopRecording();
calculate();
break;
}
}
}
};
}
I assume the audioData array contains the raw audio data,but my code catch the exception and return "N is not a power of 2"
Is it anything wrong with my code ??
How do I pass it to FFT.java class and get the fftResult ??
Or is there an other way to convert time domain data to frequency data that more easier ?
It's been a few months since I get stuck with this... My project is too compare 2 audio of *.wav files,
Any help would be appreciated... :)

I already found the answer... :)
I create method to calculate array value from audio...
public double[] calculateFFT(byte[] signal)
{
final int mNumberOfFFTPoints =1024;
double mMaxFFTSample;
double temp;
Complex[] y;
Complex[] complexSignal = new Complex[mNumberOfFFTPoints];
double[] absSignal = new double[mNumberOfFFTPoints/2];
for(int i = 0; i < mNumberOfFFTPoints; i++){
temp = (double)((signal[2*i] & 0xFF) | (signal[2*i+1] << 8)) / 32768.0F;
complexSignal[i] = new Complex(temp,0.0);
}
y = FFT.fft(complexSignal); // --> Here I use FFT class
mMaxFFTSample = 0.0;
mPeakPos = 0;
for(int i = 0; i < (mNumberOfFFTPoints/2); i++)
{
absSignal[i] = Math.sqrt(Math.pow(y[i].re(), 2) + Math.pow(y[i].im(), 2));
if(absSignal[i] > mMaxFFTSample)
{
mMaxFFTSample = absSignal[i];
mPeakPos = i;
}
}
return absSignal;
}
Then I called it in class Write Audio..
private void writeAudioDataToFile(){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
if(null != os){
while(isRecording){
read = recorder.read(data, 0, bufferSize);
if(read > 0){
absNormalizedSignal = calculateFFT(data); // --> HERE ^__^
}
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

It sounds like your immediate problem is "N is not a power of 2." In this case, N is probably referring to the size of the data you are putting into your FFT. Most FFT algorithms only work on blocks of data that have a size that is a power of 2.
Are you trying to put the entire file into an FFT at once? If so, you may need to read more background material to understand what you are doing. Maybe start here: http://blog.bjornroche.com/2012/07/frequency-detection-using-fft-aka-pitch.html

Related

how to save changed pitch audio file to sd card android

In my android application I want to change the pitch of an audio file and save it to sd card.
I am using sonic ndk link for changing pitch of my audio file but I am unable to save the changed audio file.
Is there any way to save this changed pitch audio file? Thanks in advance
Below is the code that I am using for my audio modification and save.
public class SonicTest extends Activity implements View.OnClickListener {
private Button saveBtn;
private int bufferSize=0;
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
saveBtn = findViewById(R.id.button1);
saveBtn.setOnClickListener(this);
}
public void play(View view)
{
new Thread(new Runnable()
{
public void run()
{
final EditText speedEdit = findViewById(R.id.speed);
final EditText pitchEdit = findViewById(R.id.pitch);
final EditText rateEdit = findViewById(R.id.rate);
float speed = Float.parseFloat(speedEdit.getText().toString());
float pitch = Float.parseFloat(pitchEdit.getText().toString());
float rate = Float.parseFloat(rateEdit.getText().toString());
AndroidAudioDevice device = new AndroidAudioDevice(22050, 1);
Sonic sonic = new Sonic(22050, 1);
byte samples[] = new byte[4096];
byte modifiedSamples[] = new byte[2048];
InputStream soundFile = getResources().openRawResource(R.raw.talking);
// InputStream soundFile = getResources().openRawResource(R.raw.sample);
int bytesRead;
if(soundFile != null) {
sonic.setSpeed(speed);
sonic.setPitch(pitch);
sonic.setRate(rate);
do {
try {
bytesRead = soundFile.read(samples, 0, samples.length);
} catch (IOException e) {
e.printStackTrace();
return;
}
if(bytesRead > 0) {
sonic.putBytes(samples, bytesRead);
} else {
sonic.flush();
}
int available = sonic.availableBytes();
if(available > 0) {
if(modifiedSamples.length < available) {
modifiedSamples = new byte[available*2];
}
sonic.receiveBytes(modifiedSamples, available);
device.writeSamples(modifiedSamples, available);
}
} while(bytesRead > 0);
device.flush();
}
}
} ).start();
}
#Override
public void onClick(View v) {
LinkedList<byte[]>linkedList = AndroidAudioDevice.list;
for (int i=0; i<linkedList.size(); i++){
bufferSize = bufferSize+linkedList.get(i).length;
}
byte[]buffer = new byte[bufferSize];
int k=0;
for (int i=0 ; i< linkedList.size(); i++){
for(int j=0; j<linkedList.get(i).length; j++){
buffer[k]=linkedList.get(i)[j];
k++;
}
Log.v("Buffer array length",""+k);
}
File file = new File(Environment.getExternalStorageDirectory(),"demo.mp3");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
fos.write(buffer);
fos.flush();
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
package org.vinuxproject.sonic;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Environment;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.LinkedList;
public class AndroidAudioDevice
{
AudioTrack track;
static int bufferLength;
public static LinkedList<byte[]>list= new LinkedList<>();
private int findFormatFromChannels(int numChannels)
{
switch(numChannels) {
case 1: return AudioFormat.CHANNEL_OUT_MONO;
case 2: return AudioFormat.CHANNEL_OUT_STEREO;
default: return -1; // Error
}
}
public AndroidAudioDevice(int sampleRate, int numChannels)
{
int format = findFormatFromChannels(numChannels);
int minSize = AudioTrack.getMinBufferSize(sampleRate, format, AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack( AudioManager.STREAM_MUSIC, sampleRate,
format, AudioFormat.ENCODING_PCM_16BIT,
minSize*4, AudioTrack.MODE_STREAM);
track.play();
}
public void flush()
{
track.flush();
}
public void writeSamples(byte[] samples, int length)
{
bufferLength = bufferLength+length;
track.write( samples, 0, length);
list.add(samples);
}
}

Android - How to convert raw data of audio to wav?

I have implemented a feature of recording audio in my android app. My app is working fine on linux. but when i run same app in mac osx and do recording audio, it crashes my app.
private AudioRecordingThread recordingThread;
recordingThread = new AudioRecordingThread(fileName,
new AudioRecordingHandler() {
#Override
public void onFftDataCapture(final byte[] bytes) {
runOnUiThread(new Runnable() {
public void run() {
if (visualizerView != null) {
visualizerView.updateVisualizerFFT(bytes);
}
}
});
}
#Override
public void onRecordSuccess() {
}
#Override
public void onRecordingError() {
runOnUiThread(new Runnable() {
public void run() {
recordStop();
NotificationUtils.showInfoDialog(
ActivityCropImage.this,
"Error in saving");
}
});
}
#Override
public void onRecordSaveError() {
runOnUiThread(new Runnable() {
public void run() {
recordStop();
NotificationUtils.showInfoDialog(
ActivityCropImage.this,
"Error in recording");
}
});
}
});
PcmAudioHelper.convertRawToWav(WavAudioFormat.mono16Bit(SAMPLING_RATE), file_raw, file_wav);
This is the line of code where my app crashed. I have used https://github.com/steelkiwi/AndroidRecording library in my project.
package com.record.util;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Environment;
import android.text.format.Time;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class RecordWavMaster {
private static final int samplingRates[] = {16000, 11025, 11000, 8000, 6000};
public static int SAMPLE_RATE = 16000;
private AudioRecord mRecorder;
private File mRecording;
private short[] mBuffer;
private String audioFilePath;
private boolean mIsRecording = false;
private String RECORD_WAV_PATH = Environment.getExternalStorageDirectory() + File.separator + "AudioRecord";
/* Initializing AudioRecording MIC */
public RecordWavMaster() {
initRecorder();
}
/* Get Supported Sample Rate */
public static int getValidSampleRates() {
for (int rate : samplingRates) {
int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_CONFIGURATION_DEFAULT, AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize > 0) {
return rate;
}
}
return SAMPLE_RATE;
}
/* Start AudioRecording */
public void recordWavStart() {
mIsRecording = true;
mRecorder.startRecording();
mRecording = getFile("raw");
startBufferedWrite(mRecording);
}
/* Stop AudioRecording */
public String recordWavStop() {
try {
mIsRecording = false;
mRecorder.stop();
File waveFile = getFile("wav");
rawToWave(mRecording, waveFile);
Log.e("path_audioFilePath",audioFilePath);
return audioFilePath;
} catch (Exception e) {
Log.e("Error saving file : ", e.getMessage());
}
return null;
}
/* Release device MIC */
public void releaseRecord() {
mRecorder.release();
}
/* Initializing AudioRecording MIC */
private void initRecorder() {
SAMPLE_RATE = getValidSampleRates();
int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mBuffer = new short[bufferSize];
mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
new File(RECORD_WAV_PATH).mkdir();
}
/* Writing RAW file */
private void startBufferedWrite(final File file) {
new Thread(new Runnable() {
#Override
public void run() {
DataOutputStream output = null;
try {
output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)));
while (mIsRecording) {
double sum = 0;
int readSize = mRecorder.read(mBuffer, 0, mBuffer.length);
for (int i = 0; i < readSize; i++) {
output.writeShort(mBuffer[i]);
sum += mBuffer[i] * mBuffer[i];
}
if (readSize > 0) {
final double amplitude = sum / readSize;
}
}
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
} finally {
if (output != null) {
try {
output.flush();
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
} finally {
try {
output.close();
} catch (IOException e) {
Log.e("Error writing file : ", e.getMessage());
}
}
}
}
}
}).start();
}
/* Converting RAW format To WAV Format*/
private void rawToWave(final File rawFile, final File waveFile) throws IOException {
byte[] rawData = new byte[(int) rawFile.length()];
DataInputStream input = null;
try {
input = new DataInputStream(new FileInputStream(rawFile));
input.read(rawData);
} finally {
if (input != null) {
input.close();
}
}
DataOutputStream output = null;
try {
output = new DataOutputStream(new FileOutputStream(waveFile));
// WAVE header
writeString(output, "RIFF"); // chunk id
writeInt(output, 36 + rawData.length); // chunk size
writeString(output, "WAVE"); // format
writeString(output, "fmt "); // subchunk 1 id
writeInt(output, 16); // subchunk 1 size
writeShort(output, (short) 1); // audio format (1 = PCM)
writeShort(output, (short) 1); // number of channels
writeInt(output, SAMPLE_RATE); // sample rate
writeInt(output, SAMPLE_RATE * 2); // byte rate
writeShort(output, (short) 2); // block align
writeShort(output, (short) 16); // bits per sample
writeString(output, "data"); // subchunk 2 id
writeInt(output, rawData.length); // subchunk 2 size
// Audio data (conversion big endian -> little endian)
short[] shorts = new short[rawData.length / 2];
ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
ByteBuffer bytes = ByteBuffer.allocate(shorts.length * 2);
for (short s : shorts) {
bytes.putShort(s);
}
output.write(bytes.array());
} finally {
if (output != null) {
output.close();
rawFile.delete();
}
}
}
/* Get file name */
private File getFile(final String suffix) {
Time time = new Time();
time.setToNow();
audioFilePath = time.format("%Y%m%d%H%M%S");
return new File(RECORD_WAV_PATH, time.format("%Y%m%d%H%M%S") + "." + suffix);
}
private void writeInt(final DataOutputStream output, final int value) throws IOException {
output.write(value >> 0);
output.write(value >> 8);
output.write(value >> 16);
output.write(value >> 24);
}
private void writeShort(final DataOutputStream output, final short value) throws IOException {
output.write(value >> 0);
output.write(value >> 8);
}
private void writeString(final DataOutputStream output, final String value) throws IOException {
for (int i = 0; i < value.length(); i++) {
output.write(value.charAt(i));
}
}
public String getFileName (final String time_suffix) {
return (RECORD_WAV_PATH+time_suffix+ "." + "wav");
}
public Boolean getRecordingState () {
if( mRecorder.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) {
return false;
}
return true;
}
}
Use the following permissions in manifest
<!--Permission record-->
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />

Button.onTouch() not being called

I have a button btnSpeak which i find by ID, i then try to set the MotionEventListener
package com.ctc.android.widget;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.widget.Button;
import com.ctc.android.widget.ImageMap.Area;
public class ImageMapTestActivity extends Activity{
/********** Dan's Variables *********/
private static final String LOG_TAG = "AudioRecordTest";
private static String mFileName = null;
private MediaRecorder mRecorder = null;
private MediaPlayer mPlayer = null;
boolean isRecording = false;
boolean isStreaming = false;
private String hostIP = "192.168.0.14";
private static final int IDLE_EMPTY_BUFFER = 0;
private static final int IDLE_BUFFER_READY = 1;
private static final int RECORDING_STATE = 2;
private static final int PREVIEW_STATE = 3;
private static final int PLAYOUT_STATE = 4;
private int currentState = IDLE_EMPTY_BUFFER;
private static InetAddress multicastaddress = null;
private List<String> IPs = new ArrayList<String>();
private ArrayList<InetAddress> SelectedIPs = new ArrayList<InetAddress>();
private boolean listUpdated = false;
/********** ****** *********/
ImageMap mImageMap;
String strCoordinates;
int intSelectedAreaID;
Button btnPlaySiren;
// Button btnSpeak;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// find the image map in the view
mImageMap = (ImageMap)findViewById(R.id.map);
btnPlaySiren = (Button)findViewById(R.id.btnPlaySiren);
Button btnSpeak = (Button)findViewById(R.id.btnSpeak);
btnSpeak.setOnTouchListener(new OnTouchListener(){
public boolean onTouch(View v, MotionEvent event){
switch(event.getAction())
{
case MotionEvent.ACTION_DOWN:
startStreamingRun();
break;
case MotionEvent.ACTION_UP:
stopStreamingRun();
break;
}
return true;
}
});
// add a click handler to react when areas are tapped
mImageMap.addOnImageMapClickedHandler(new ImageMap.OnImageMapClickedHandler() {
#Override
public void onImageMapClicked(int id) {
// when the area is tapped, show the name in a
// text bubble
intSelectedAreaID = id;
mImageMap.showBubble(id);
// TODO: Change colour of selected Area.
// TODO: Change state of selected Area.
}
#Override
public void onBubbleClicked(int id) {
// react to info bubble for area being tapped
}
});
// TODO: Set bitmap as Area decoration for each area added.
// TODO: Add an onClickListener for each area here.
ArrayList<Area> mAreas = mImageMap.GetAllAreas();
for(Area objArea : mAreas){
Bitmap objBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.remvox_green);
objArea.setBitmap(objBitmap);
Log.v("Area ID: :", Integer.toString(objArea._id)); // 07-30 14:40:05.409: V/Area ID: :(28449): 2131230738
} // End of for(Area...)
} // End of onCreate(Bundle ...)
public void btnPlaySirenClicked(View v){
if(v.getId() == R.id.btnPlaySiren){
Log.v("Siren Button Clicked", "Playing siren");
Thread thread = new Thread(new Runnable() {
public void run() {
playOverSocket(); // To resolve "Network on main thread" error!
}
});
thread.start();
}
}
private void playOverSocket() {
Log.e("AudioRecord", "Top of network play");
/*****
// Get the file we want to playback.
File file = new File("android.resource://com.ctc.android.widget.ImageMapTestActivity/res/raw/british"); // Do not add extension (file type)
File objSirenFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/reverseme.pcm");
// Get the length of the audio stored in the file (16 bit so 2 bytes per
// short)
// and create a short array to store the recorded audio.
int musicLength = (int) (file.length() / 2);
// short[] music = new short[musicLength];
* *****/
try {
// Create a DataInputStream to read the audio data back from the
// saved file.
InputStream is = getResources().openRawResource(R.raw.british);
//InputStream objSiren = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
Socket s = null;
try {
s = new Socket("192.168.1.101", 6666); // hostIP - Hard coded until implemented.
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
BufferedOutputStream buff = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dataOutputStreamInstance = new DataOutputStream(
buff);
// Read the file into the music array.
short lastByte = dis.readShort();
while(lastByte != -1)
{
dataOutputStreamInstance.writeChar(lastByte);
lastByte = dis.readShort();
}
dataOutputStreamInstance.flush();
dataOutputStreamInstance.close();
buff.flush();
buff.close();
s.close();
// Close the input streams.
dis.close();
} catch (Throwable t) {
Log.e("AudioTrack", "Playback Failed" + t.getMessage());
}
}
public void btnRecordClicked(View v){
//Button btnStopRecordToggle = (Button) findViewById(R.id.btnRecord);
if(v.getId() == R.id.btnRecord){
Log.v("Record Button Clicked", "Recording");
Thread thread = new Thread(new Runnable() {
public void run() {
record();
}
});
isRecording = true;
thread.start();
}
}
private void record() {
Log.v("AudioRecord", "Top of Record");
int frequency = 11025;
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
File file = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/reverseme.pcm");
// Delete any previous recording.
if (file.exists())
file.delete();
// Create the new file.
try {
file.createNewFile();
} catch (IOException e) {
throw new IllegalStateException("Failed to create "
+ file.toString());
}
try {
// Create a DataOuputStream to write the audio data into the saved
// file.
OutputStream os = new FileOutputStream(file);
BufferedOutputStream bos = new BufferedOutputStream(os);
DataOutputStream dos = new DataOutputStream(bos);
// Create a new AudioRecord object to record the audio.
int bufferSize = 8 * 1024;// AudioRecord.getMinBufferSize(frequency,
// channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[2048];
audioRecord.startRecording();
while (isRecording) {
int bufferReadResult = audioRecord.read(buffer, 0, 2048);
for (int i = 0; i < bufferReadResult; i++)
dos.writeShort(buffer[i]);
}
audioRecord.stop();
dos.close();
Log.e("AudioRecord", "Recording Finished");
} catch (Throwable t) {
Log.e("AudioRecord", "Recording Failed");
Log.e("AudioRecord Error", t.getLocalizedMessage());
}
}
private void playRecordedOverSocket(InetAddress objAddress) {
Log.e("AudioRecord", "Top of network play recorded");
// Get the file we want to playback.
File file = new File(Environment.getExternalStorageDirectory()
.getAbsolutePath() + "/reverseme.pcm");
// Get the length of the audio stored in the file (16 bit so 2 bytes per
// short)
// and create a short array to store the recorded audio.
int musicLength = (int) (file.length() / 2);
// short[] music = new short[musicLength];
try {
// Create a DataInputStream to read the audio data back from the
// saved file.
InputStream is = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
Socket s = new Socket(objAddress, 6666);
BufferedOutputStream buff = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dataOutputStreamInstance = new DataOutputStream(
buff);
// Read the file into the music array.
for (int i = 0; i < musicLength; i++) {
// music[i] =
dataOutputStreamInstance.writeChar(dis.readShort());
}
dataOutputStreamInstance.flush();
dataOutputStreamInstance.close();
buff.flush();
buff.close();
s.close();
// Close the input streams.
dis.close();
} catch (Throwable t) {
Log.e("AudioTrack", "Playback Failed" + t.getMessage());
}
}
public void btnRecordedMessageClicked(View v)
{
for(final InetAddress oneSpeaker : mImageMap.arrSelectedAddresses)
{
Thread thread = new Thread(new Runnable() {
public void run() {
playRecordedOverSocket(oneSpeaker);
}
});
thread.start();
}
}
public void btnStopClicked(View v)
{
isRecording = false;
}
public void btnSelectAllClicked(View v)
{
for(Area objOneArea : mImageMap.mAreaList)
{
objOneArea.blnIsSelected = false;
objOneArea.touched(SelectedIPs);
}
mImageMap.mBubbleMap.clear();
mImageMap.invalidate();
}
public void btnCallClicked(View v)
{
}
/*
*
* Multi-Cast Streaming implementation
*
*/
private void startStreamingRun() {
Thread thread = new Thread(new Runnable() {
public void run() {
doTestStream();
//doMCastStream();
}
});
// isStreaming = true;
thread.start();
}
private void stopStreamingRun() {
isStreaming = false;
}
private void doTestStream() {
int frequency = 11025;
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
try {
// Create a DataOuputStream to write the audio data into the saved
// file.
Socket s = new Socket(hostIP, 6666);
BufferedOutputStream bos = new BufferedOutputStream(
s.getOutputStream()); // out1 is the socket's outputStream
DataOutputStream dos = new DataOutputStream(bos);
// Create a new AudioRecord object to record the audio.
int bufferSize = 8 * 1024;// AudioRecord.getMinBufferSize(frequency,
// channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
//short[] buffer = new short[bufferSize];
byte[] buffer = new byte[bufferSize];
audioRecord.startRecording();
byte[] MiniBuff = new byte[2];
while (isStreaming) {
int bufferReadResult = audioRecord.read(buffer, 0, bufferSize);
for (int i = 0; i < bufferReadResult; i++) {
MiniBuff[1] = buffer[i];
i++;
MiniBuff[0] = buffer[i];
dos.write(MiniBuff);
}
/*
for (int i = 0; i < bufferReadResult; i++) {
short val=(short)( ((buffer[i+1]&0xFF)<<8) | (buffer[i]&0xFF) );
i++;
dos.writeShort(val);
}
*/
//dos.write(buffer[i]);
dos.flush();
}
dos.flush();
dos.close();
s.close();
// Close the input streams.
audioRecord.stop();
Log.e("AudioRecord", "Streaming Finished");
} catch (Throwable t) {
Log.e("AudioRecord", "Streaming Failed");
Log.e("AudioRecord Error", t.getLocalizedMessage());
}
}
} // End of class
This is done in the onCreate method of the activity, but when the activity runs and the button is touched, the event is never triggered and I can't see where I am going wrong. Not sure if its a case of not being able to see the wood for the trees.
I have copied your class, commenting the part of code i don't have relation (owner classes, own files...).
I made a test main.xml and everything works. Have you tried a "Project/Clean", maybe the ID from your btnSpeak is not updated
main.xml:
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<Button android:id="#+id/btnPlaySiren" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Siren" />
<Button android:id="#+id/btnSpeak" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Speak" />
<Button android:id="#+id/btnRecord" android:layout_width="wrap_content" android:layout_height="wrap_content" android:text="Record" />
</LinearLayout>

Recording .Wav with Android AudioRecorder

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Result from processing audio signal with Goertzel algorithm

I made a little signal processing app. It processes audio signal (morse code) on certain frequency with Goerztel algorithm. Application saves temporary file to the filesystem and after recording is finished, starts to detect signals. Now I got the result with bunch of magnitudes.
I don't really know what to read from those magnitudes. How can I decode the morse code from those magnitudes? How can I read them? Tried to find references, but nowhere is explained what is the result and how to read it.
EDIT:
My morse code application is made with Delphi and uses Windows Beep function to send signals with certain frequency. I'm using 1200 Hz for signals. Also pauses between signals and words and morse beeps are like wikipedia described. All is accurate.
Goertzel.java:
public class Goertzel {
private float samplingRate;
private float targetFrequency;
private int n;
private double coeff, Q1, Q2;
private double sine, cosine;
public Goertzel(float samplingRate, float targetFrequency, int inN) {
this.samplingRate = samplingRate;
this.targetFrequency = targetFrequency;
n = inN;
sine = Math.sin(2 * Math.PI * (targetFrequency / samplingRate));
cosine = Math.cos(2 * Math.PI * (targetFrequency / samplingRate));
coeff = 2 * cosine;
}
public void resetGoertzel() {
Q1 = 0;
Q2 = 0;
}
public void initGoertzel() {
int k;
float floatN;
double omega;
floatN = (float) n;
k = (int) (0.5 + ((floatN * targetFrequency) / samplingRate));
omega = (2.0 * Math.PI * k) / floatN;
sine = Math.sin(omega);
cosine = Math.cos(omega);
coeff = 2.0 * cosine;
resetGoertzel();
}
public void processSample(double sample) {
double Q0;
Q0 = coeff * Q1 - Q2 + sample;
Q2 = Q1;
Q1 = Q0;
}
public double[] getRealImag(double[] parts) {
parts[0] = (Q1 - Q2 * cosine);
parts[1] = (Q2 * sine);
return parts;
}
public double getMagnitudeSquared() {
return (Q1 * Q1 + Q2 * Q2 - Q1 * Q2 * coeff);
}
}
SoundCompareActivity.java
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class SoundCompareActivity extends Activity {
private static final int RECORDER_SAMPLE_RATE = 8000; // at least 2 times
// higher than sound
// frequency,
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private Button startRecBtn;
private Button stopRecBtn;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
startRecBtn = (Button) findViewById(R.id.button1);
stopRecBtn = (Button) findViewById(R.id.button2);
startRecBtn.setEnabled(true);
stopRecBtn.setEnabled(false);
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLE_RATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
startRecBtn.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.d("SOUNDCOMPARE", "Start Recording");
startRecBtn.setEnabled(false);
stopRecBtn.setEnabled(true);
stopRecBtn.requestFocus();
startRecording();
}
});
stopRecBtn.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.d("SOUNDCOMPARE", "Stop recording");
startRecBtn.setEnabled(true);
stopRecBtn.setEnabled(false);
startRecBtn.requestFocus();
stopRecording();
}
});
}
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLE_RATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToTempFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private String getTempFilename() {
File file = new File(getFilesDir(), "tempaudio");
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(getFilesDir(), "signal.raw");
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + "signal.raw");
}
private void writeAudioDataToTempFile() {
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if (os != null) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (read != AudioRecord.ERROR_INVALID_OPERATION) {
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void stopRecording() {
if (recorder != null) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
new MorseDecoder().execute(new File(getTempFilename()));
}
}
MorseDecoder.java:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.os.AsyncTask;
import android.util.Log;
public class MorseDecoder extends AsyncTask<File, Void, Void> {
private FileInputStream is = null;
#Override
protected Void doInBackground(File... files) {
int index;
//double magnitudeSquared;
double magnitude;
int bufferSize = AudioRecord.getMinBufferSize(8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
Goertzel g = new Goertzel(8000, 1200, bufferSize);
g.initGoertzel();
for (int i = 0; i < files.length; i++) {
byte[] data = new byte[bufferSize];
try {
is = new FileInputStream(files[i]);
while(is.read(data) != -1) {
ShortBuffer sbuf = ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
short[] audioShorts = new short[sbuf.capacity()];
sbuf.get(audioShorts);
float[] audioFloats = new float[audioShorts.length];
for (int j = 0; j < audioShorts.length; j++) {
audioFloats[j] = ((float)audioShorts[j]) / 0x8000;
}
for (index = 0; index < audioFloats.length; index++) {
g.processSample(data[index]);
}
magnitude = Math.sqrt(g.getMagnitudeSquared());
Log.d("SoundCompare", "Relative magnitude = " + magnitude);
g.resetGoertzel();
}
is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
}
EDIT2:
Notices some bugs in processing samples. Changed code in while loop.
while(is.read(data) != -1) {
ShortBuffer sbuf = ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
short[] audioShorts = new short[sbuf.capacity()];
sbuf.get(audioShorts);
float[] audioFloats = new float[audioShorts.length];
for (int j = 0; j < audioShorts.length; j++) {
audioFloats[j] = ((float)audioShorts[j]) / 0x8000;
}
for (index = 0; index < audioFloats.length; index++) {
g.processSample(audioFloats[index]);
magnitude = Math.sqrt(g.getMagnitudeSquared());
Log.d("SoundCompare", "Relative magnitude = " + magnitude);
}
//magnitude = Math.sqrt(g.getMagnitudeSquared());
//Log.d("SoundCompare", "Relative magnitude = " + magnitude);
g.resetGoertzel();
}
Regards,
evilone
The output of your Goertzel filter will increase when a tone within its passband is present, and then decrease when the tone is removed. In order to detect pulses of a tone, e.g. morse code, you need some kind of threshold detector on the output of the filter which will just give a boolean value for "tone present" / "tone not present" on a sample-by-sample basis. Try plotting the output values and it should be obvious once you see it in graphical form.
Plot the signal magnitudes on a graph versus time (some CW decoding apps for the PC do this in real-time). Now figure out what the graph for each Morse code symbol should look like. Then study some pattern matching algorithms. If there is enough noise present, you may want to try some statistical pattern matching methods.
Here's the Wikipedia link for proper Morse Code timing.

Categories

Resources