Queueing the audio files in android - android

I want to play my .mp3 files sequentially, but they are running almost at the same time.How can I solve it?
I parsed the "long list of station names" to groups of 3 station names.I send them and get InputStream, convert them into mp3's and then try to play them in media player one after another.
Here is the Code as it flows: first Method to run is readStationNames():
public void readStationNames(String[] arrayOfStations) throws IOException
{
int mod = (arrayOfStations.length) % 3;
int length = (arrayOfStations.length);
int m = 0;
for(int k = 1; k <= (length) / 3; k++)
{
String stationNamesAsString = "";
for(int j = m; j < 3 * k ; j++)
{
stationNamesAsString = stationNamesAsString+"\t"+","+arrayOfStations[j];
}
speak(stationNamesAsString);
m += 3;
}
if(mod != 0)
{
String stationNamesAsString = "";
for(int g = (length-1)-mod; g < length; g++)
{
stationNamesAsString = stationNamesAsString+"\t"+","+ arrayOfStations[g];
}
speak(stationNamesAsString);
}
ListenMicrophone();
}
Then the speak() method that is called from arrayOfStations() method is below:
private void speak(String StationNamesAsString) throws IOException
{
String encodedString = URLEncoder.encode(StationNamesAsString,"UTF-8");
sound = audio.getAudio(encodedString, Language.ENGLISH);
File convertedFile = File.createTempFile("convertedFile", ".mp3", null); //getDir("filez", 0)
FileOutputStream out = new FileOutputStream(convertedFile);
byte buffer[] = new byte[16384];
int length = 0;
while ( (length = sound.read(buffer)) != -1 )
{
out.write(buffer,0, length);
}
out.close();
playFile(convertedFile);
}
The method which is going to play the convertedfile is below:
public void playFile(File playThis) throws IllegalArgumentException, IllegalStateException, IOException
{
MediaPlayer mp = new MediaPlayer();
FileInputStream fis = null;
fis = new FileInputStream(playThis);
mp.setDataSource(fis.getFD());
fis.close();
mp.prepare();
mp.start();
mp.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
mp.stop();
}
});
}

Related

tflite.run() returning same output for different input values

I am trying to make an android app for monument recognition. The input changes on every run but output returned is always same.
Below are the code snippets
to load tflite model stored in assets directory
private ByteBuffer loadModelFile(String filename) throws IOException {
AssetFileDescriptor fileDescriptor = this.getAssets().openFd(filename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
to initialize tflite interpreter
predict.setOnClickListener(new View.OnClickListener() {
#RequiresApi(api = Build.VERSION_CODES.O)
#Override
public void onClick(View v) {
try {
tflite = new Interpreter(loadModelFile("converted_model.tflite"));
Log.println(7,"tflite", "tflite init");
doInference(picFile);
} catch (Exception e) {
System.out.println(e);
}
}
});
to run the model
#RequiresApi(api = Build.VERSION_CODES.O)
public void doInference(File photo) throws IOException {
img = findViewById(R.id.imgToDisp);
Bitmap bitmapImg = BitmapFactory.decodeFile(pathToFile);
img.setImageBitmap(bitmapImg);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmapImg.compress(Bitmap.CompressFormat.JPEG, 50, stream);
byte[] arr = stream.toByteArray();
changedim = new float[1][150][150][3];
outputval = new float[1][28];
int m = 0;
for (int i = 0; i < 1; i++) {
for (int j = 0; j < 150; j++) {
for (int k = 0; k < 150; k++) {
for (int l = 0; l < 3; l++) {
byte a = arr[m++];
changedim[i][j][k][l] = Byte.toUnsignedLong(a);
}
}
}
}
tflite.run(changedim, outputval);
for(int i=0;i<28;i++) {
Log.println(7,"outputval",i+" "+outputval[0][i]);
}
path = findViewById(R.id.path);
String out = "";
float[] op = outputval[0];
int ind = 0;
float max = op[0];
while (op[ind] != 1) {
ind++;
//Log.println(7,"op", " "+op[ind]+" "+ind);
}
for (float f : op) {
out += Float.toString(f) + ",";
}
predict.setText("result: " + labels.get(ind));
Log.println(7, "label", ind + " " + labels.get(ind));
//path.setText(""+pathToFile);
}
input to the model must be an image of size 150*150 converted to 4d float32 array of shape 1*150*150*3
Input to the model is the color values of individual pixels.
Which can be extracted using
int p = bitmapImg.getPixel(j, k);
int R = (p >> 16) & 0xff;
int G = (p >> 8) & 0xff;
int B = p & 0xff;
change that and your model will work correctly!

How to capture audio data and save to wav file?

I'm trying to record (Capture) audio data to a new wav file.
I'm using Oboe c++ for buffering the audio according to this Example code
This is how I capture audio data from the mixer to RecordBuffer array:
void Mixer::renderAudio(int16_t *audioData, int32_t numFrames) {
int32_t count = numFrames * kChannelCount;
// Zero out the incoming container array
for (int j = 0; j < count; ++j) {
audioData[j] = 0;
}
for (int i = 0; i < mNextFreeTrackIndex; ++i) {
mTracks[i]->renderAudio(mixingBuffer, numFrames);
for (int j = 0; j < count; ++j) {
data = mixingBuffer[j];
data*= volume;
audioData[j] += data;
if(recording && recordFrames < kMaxRecordSize){
if(data != 0)
recordBuffer[recordFrames++] = data;
}
}
}
}
Stop record and get the short array to Java code
jshortArray Mixer::getRecordingData(JNIEnv *env) {
recording = false;
jshortArray result = env->NewShortArray(recordFrames);
env->SetShortArrayRegion(result,0,recordFrames,recordBuffer);
return result;
}
Then in Java code, I create the wav file:
public class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT+ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4*SMALLINT)+(INTEGER*2)+LONGINT+ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE+LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE+ID_STRING_SIZE+WAV_FMT_SIZE+WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
int sampleRate = 48000;
short channels = 2;
public Wave( short[] data, int start, int end) {
nSamples=end-start+1;
cursor=0;
output=new byte[nSamples*SMALLINT+WAV_HDR_SIZE];
buildHeader(sampleRate,channels);
writeData(data,start,end);
}
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
public void writeFormat(int sampleRate, short nChannels){
write("fmt ");
write(WAV_FMT_SIZE-WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short)(nChannels * SAMPLE_SIZE));
write((short)16);
}
public void writeData(short[] data, int start, int end)
{
write("data");
write(nSamples*SMALLINT);
for(int i=start; i<=end; write(data[i++]));
}
private void write(byte b) {
output[cursor++]=b;
}
private void write(String id) {
if(id.length()!=ID_STRING_SIZE) {}
else {
for(int i=0; i<ID_STRING_SIZE; ++i) write((byte)id.charAt(i));
}
}
private void write(int i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
private void write(short i) {
write((byte) (i&0xFF)); i>>=8;
write((byte) (i&0xFF));
}
public boolean wroteToFile() {
boolean ok;
try {
File path = FileManager.generateNewFile(String.valueOf(sampleRate + " " + channels));
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok=true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok=false;
} catch (IOException e) {
ok=false;
e.printStackTrace();
}
return ok;
}
}
The results are pretty close, but the quality is very bad.
Does anyone know what am I doing wrong?
btw It works well only if I played 1 sample each.
Here is a way to record a Oboe input stream to a .Wav file in C++ for Android :
https://github.com/reuniware/OboeAudioRecorder/blob/master/app/src/main/cpp/OboeAudioRecorder.cpp

how to get random audio file from raw array on android?

This is my array, in the form of audio files
int[] rawQuetion = {R.raw.alikhlas, R.raw.alkafirun};// this for question
int [] rawAnswer={R.raw.jwbaliklas,R.raw.alfalaq };// this for answer
and this method to randomize questions
//fisher-yates Shuffle
public void playSoal() {
shuffleArray(rawQuetion);
try{
int idx = new Random().nextInt(rawQuetion.length);
mp = MediaPlayer.create(this, rawQuetion[idx]);
mp.start();
}
static void shuffleArray(int[] arr)
{
Random rnd = new Random();
for (int i = arr.length - 1; i > 0; i--)
{
int index = rnd.nextInt(i + 1);
// Swap
int a = arr[index];
arr[index] = arr[i];
arr[i] = a;
}
}
public void audioFile() throws IOException{
InputStream is = getResources().openRawResource(R.raw.jwbaliklas);// I want get audio file from rawAnswer based rawQuestion
ByteArrayOutputStream out = new ByteArrayOutputStream();
BufferedInputStream in = null;
in = new BufferedInputStream(is);
int read;
byte[] buff = new byte[1024];
while ((read = in.read(buff)) > 0)
{
out.write(buff, 0, read);
}
out.flush();
byte[] audioBytes = out.toByteArray();
for (int i = 0; i < audioBytes.length; i++) {
audioBytes[i] = (byte) ((audioBytes[i]) & 0xFF); }
absNormalizedSignal = hitungFFT(audioBytes);
AppLog.logString("===== INI DARI AUDIO FILE");
}
public void audioFile() throws IOException{
InputStream is = getResources().openRawResource(R.raw.jwbaliklas);// I want get audio file from rawAnswer based rawQuestion
You have your answer right here: use openRawResource() to open a raw resource. You don't need to hard-code specific argument values in your code. The method takes an int argument. How you determine the value to pass it entirely up to you. For example, you can declare your audioFile() method to take an integer argument and pass that on to openRawResource():
public void audioFile(final int resid) throws IOException{
InputStream is = getResources().openRawResource(resid);
Then, where you have your corresponding question/answer audio ids, you can pass the correct id.

how to get an answer in the form of raw audio folder in android studio

This array of questions and answers,
R.raw.ikhlas example is the question to answer R.raw.jwbalikhlas
int[] rawQuetion = {R.raw.alfalaq, R.raw.alikhlas, R.raw.alkafirun, R.raw.allahab};
int [] rawAnswer={R.raw.jwbaliklas};
This method to randomize questions
//fisher-yates Shuffle
public void playSoal() {
shuffleArray(rawQuetion);
try{
int idx = new Random().nextInt(rawQuetion.length);
mp = MediaPlayer.create(this, rawQuetion[idx]);
mp.start();
}catch(Exception e){
Log.e("ERROR", "Media Player", e);
mp = null;
mp.release();
mp.stop();
e.printStackTrace();
}
}
static void shuffleArray(int[] arr)
{
Random rnd = new Random();
for (int i = arr.length - 1; i > 0; i--)
{
int index = rnd.nextInt(i + 1);
// Swap
int a = arr[index];
arr[index] = arr[i];
arr[i] = a;
}
}
I want when the quiz questions selected at random, will answer here
public void audioFile() throws IOException{
InputStream is = getResources().openRawResource(R.raw.jwbaliklas);//I want this to be obtained from the above array
ByteArrayOutputStream out = new ByteArrayOutputStream();
BufferedInputStream in = null;
in = new BufferedInputStream(is);
int read;
byte[] buff = new byte[1024];
while ((read = in.read(buff)) > 0)
{
out.write(buff, 0, read);
}
out.flush();
byte[] audioBytes = out.toByteArray();
for (int i = 0; i < audioBytes.length; i++) {
audioBytes[i] = (byte) ((audioBytes[i]) & 0xFF); }
absNormalizedSignal = hitungFFT(audioBytes);
AppLog.logString("===== From audio File");
}
If you pass the array id for the resource you want to open, you can use the index directly from the array of integers (as long as it is visible from the audioFile scope.
public void audioFile(#RawRes int i) throws IOException {
InputStream is = getResources().openRawResource(rawQuestion[i]);
...
}
Also, you have to change include the annotation #RawRes:
#RawRes int[] rawQuestion = {R.raw.alfalaq, R.raw.alikhlas, R.raw.alkafirun, R.raw.allahab};
#RawRes int[] rawAnswer = {R.raw.jwbaliklas};

Android Bluetooth input stream not reading full array

I'm creating an app to read string values over Bluetooth serial port. My data receiving but in two parts. If I send $F00,A,B,0,M# via bluetooth it only reads $ in first part and F00,A,B,0,M# in next part. I provided my code here. Please do correct me if I'm wrong.
InputStream inputStream=null;
int avilableBytes=0;
public ConnectedThread(BluetoothSocket socket){
InputStream temp=null;
try{
temp=socket.getInputStream();
}catch (IOException e){
e.printStackTrace();
}
inputStream=temp;
}
public void run() {
try{
int bytes;
while (true){
try{
avilableBytes=inputStream.available();
if (avilableBytes>0){
byte[] buffer=new byte[avilableBytes];
bytes=inputStream.read(buffer);
final String readMessage=new String(buffer,0,bytes);
bt_handler.obtainMessage(handlerState,bytes,-1,readMessage).sendToTarget();
Log.d("PRAVEEN",readMessage);
}
}catch (IOException e){
e.printStackTrace();
}
}
}catch (Exception e){
e.printStackTrace();
}
}
Data are like stream bytes and can not be processed immediately when it comes with a few bytes. Data will not come all at once as a single packet. You have to use the other byte[] buffer (MainBuffer) in which you will gradually save incoming byte and move the index in that buffer. Then, from time to time (e.g. in the timer once per second) take data from the main buffer and processed it. By default you must implement some data frame with a separator (eg. Data * data * data * - Many ways to do it good or bad). I dealt with this in .net via Xamarin, but just as an example it may be helpfull :
update example, format
In ConnectedThread :
public override void Run()
{
while (true)
{
try
{
int readBytes = 0;
lock (InternaldataReadLock)
{
readBytes = clientSocketInStream.Read(InternaldataRead, 0, InternaldataRead.Length);
Array.Copy(InternaldataRead, TempdataRead, readBytes);
}
if (readBytes > 0)
{
lock (dataReadLock)
{
dataRead = new byte[readBytes];
for (int i = 0; i < readBytes; i++)
{
dataRead[i] = TempdataRead[i];
}
}
Bundle dataBundle = new Bundle();
dataBundle.PutByteArray("Data", dataRead);
Message message = btlManager.sourceHandler.ObtainMessage();
message.What = 1;
message.Data = dataBundle;
btlManager.sourceHandler.SendMessage(message);
}
}
catch (System.Exception e)
{
btlManager.btlState = BTLService.BTLState.Nothing;
}
}
}
In BTLHandler :
public override void HandleMessage(Message msg)
{
switch (msg.What)
{
case 1:
{
byte[] data = msg.Data != null ? msg.Data.GetByteArray("Data") : new byte[0];
btlService.BTLReceiveData(data);
}
break;
}
}
public void BTLReceiveData(byte[] data)
{
lock (dataReadLock)
{
for (int i = 0; i < data.Length; i++)
{
dataRead[dataReadWriteCursor] = data[i];
dataReadWriteCursor++;
}
}
}
In Timer :
int tmpWriteCursor = dataReadWriteCursor;
int tmpReadCursor = dataReadReadCursor;
lock (dataReadLock)
{
int newBytes = dataReadWriteCursor - dataReadReadCursor;
for (int i = 0; i < newBytes; i++)
{
dataReadMain[dataReadReadCursor] = dataRead[dataReadReadCursor++];
}
}
bool odradkovani = false;
string tmpRadek = "";
int lastLineIndex = 0;
List<string> list = new List<string>();
for (int i = LastWriteLineIndex; i < tmpWriteCursor; i++)
{
if (dataReadMain[i] >= 32 && dataReadMain[i] <= 255)
{
tmpRadek += (char)dataReadMain[i];
}
else if (dataReadMain[i] == 13) odradkovani = true;
else if (dataReadMain[i] == 10)
{
if (odradkovani)
{
odradkovani = false;
list.Add(Utils.GetFormatedDateTime(DateTime.Now) + " " + tmpRadek);
tmpRadek = "";
lastLineIndex = i + 1;
}
}
else
{
tmpRadek += "?" + dataReadMain[i].ToString() + "?";
}
}
WriteDataToLog(list);
LastWriteLineIndex = lastLineIndex;

Categories

Resources