I am using mp4parser Library for cutting multiple clips from a recored video. It is working fine If I cut one part from the video. But when I try to cutt multiple clips from video only 1st clip is proper cut. Other are of just 0 or 1 second.
Following is the My Code:
import android.app.ProgressDialog;
import android.content.Context;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.widget.Toast;
import com.coremedia.iso.IsoFile;
import com.coremedia.iso.boxes.TimeToSampleBox;
import com.googlecode.mp4parser.authoring.Movie;
import com.googlecode.mp4parser.authoring.Track;
import com.googlecode.mp4parser.authoring.builder.DefaultMp4Builder;
import com.googlecode.mp4parser.authoring.container.mp4.MovieCreator;
import com.googlecode.mp4parser.authoring.tracks.CroppedTrack;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import uk.org.humanfocus.hfi.Beans.TrimPoint;
import uk.org.humanfocus.hfi.Utils.Constants;
import uk.org.humanfocus.hfi.Utils.SimpleThreadFactory;
import uk.org.humanfocus.hfi.Utils.Ut;
/**
* Shortens/Crops a track
*/
public class ShortenExample {
private static final String TAG = "ShortenExample";
private final Context mCxt;
private ExecutorService mThreadExecutor = null;
private SimpleInvalidationHandler mHandler;
private ProgressDialog mProgressDialog;
String filePath;
ArrayList<TrimPoint> mTrimPoints;
int videoLength;
ArrayList<String> trimVideos;
private class SimpleInvalidationHandler extends Handler {
#Override
public void handleMessage(final Message msg) {
switch (msg.what) {
case R.id.shorten:
mProgressDialog.dismiss();
if (msg.arg1 == 0)
Toast.makeText(mCxt,
mCxt.getString(R.string.message_error) + " " + (String) msg.obj,
Toast.LENGTH_LONG).show();
else
Toast.makeText(mCxt,
mCxt.getString(R.string.message_shortened) + " " + (String) msg.obj,
Toast.LENGTH_LONG).show();
break;
}
}
}
public ShortenExample(Context context) {
mCxt = context;
mHandler = new SimpleInvalidationHandler();
//mProgressDialog = new ProgressDialog(mCxt);
//mProgressDialog.setMessage("Wait Saving..");
//mProgressDialog.setCancelable(false);
}
public void shorten(String filePath,ArrayList<TrimPoint> trimPoints, int endTime) {
trimVideos = new ArrayList<String>();
this.filePath = filePath;
this.videoLength = endTime;
this.mTrimPoints = trimPoints;
Log.d(Constants.TAG,"End Time: "+endTime+" Trim Points: "+mTrimPoints.size());
for (int i=0;i<trimPoints.size();i++){
TrimPoint point = trimPoints.get(i);
int start=0;
int end = 0;
if(point.getTime()-5<0){
start = 0;
}else{
start = point.getTime()-5;
}
if(point.getTime()+5>videoLength){
end = videoLength-1;
}else {
end = point.getTime() + 5;
}
Log.d(Constants.TAG,"Clip: "+start+" : "+end);
doShorten(start,end);
}
Log.d(Constants.TAG,"Done: "+trimVideos.size());
}
private void doShorten(final int _startTime, final int _endTime) {
//mProgressDialog = Ut.ShowWaitDialog(mCxt, 0);
//mProgressDialog.show();
if(mThreadExecutor == null)
mThreadExecutor = Executors.newSingleThreadExecutor(new SimpleThreadFactory("doShorten"));
//this.mThreadExecutor.execute(new Runnable() {
// public void run() {
try {
File folder = Ut.getTestMp4ParserVideosDir(mCxt);
//File folder = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),"HFVideos"+File.separator+"TEMP");
//Log.d(Constants.TAG, folder.toString());
if (!folder.exists()) {
Log.d(TAG, "failed to create directory");
}
//Movie movie = new MovieCreator().build(new RandomAccessFile("/home/sannies/suckerpunch-distantplanet_h1080p/suckerpunch-distantplanet_h1080p.mov", "r").getChannel());
// Movie movie = MovieCreator.build(new FileInputStream("/home/sannies/CSI.S13E02.HDTV.x264-LOL.mp4").getChannel());
Movie movie = MovieCreator.build(new FileInputStream(new File(filePath)).getChannel());
//Log.d(Constants.TAG,"Movie: "+movie.toString());
List<Track> tracks = movie.getTracks();
movie.setTracks(new LinkedList<Track>());
// remove all tracks we will create new tracks from the old
double startTime = _startTime;
double endTime = _endTime;//(double) getDuration(tracks.get(0)) / tracks.get(0).getTrackMetaData().getTimescale();
boolean timeCorrected = false;
// Here we try to find a track that has sync samples. Since we can only start decoding
// at such a sample we SHOULD make sure that the start of the new fragment is exactly
// such a frame
for (Track track : tracks) {
if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
if (timeCorrected) {
// This exception here could be a false positive in case we have multiple tracks
// with sync samples at exactly the same positions. E.g. a single movie containing
// multiple qualities of the same video (Microsoft Smooth Streaming file)
throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");
}
startTime = correctTimeToSyncSample(track, startTime, false);
endTime = correctTimeToSyncSample(track, endTime, true);
timeCorrected = true;
}
}
for (Track track : tracks) {
long currentSample = 0;
double currentTime = 0;
long startSample = -1;
long endSample = -1;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
// entry.getDelta() is the amount of time the current sample covers.
if (currentTime <= startTime) {
// current sample is still before the new starttime
startSample = currentSample;
}
if (currentTime <= endTime) {
// current sample is after the new start time and still before the new endtime
endSample = currentSample;
} else {
// current sample is after the end of the cropped video
break;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
movie.addTrack(new CroppedTrack(track, startSample, endSample));
}
long start1 = System.currentTimeMillis();
IsoFile out = new DefaultMp4Builder().build(movie);
long start2 = System.currentTimeMillis();
// FileOutputStream fos = new FileOutputStream(String.format("output-%f-%f.mp4", startTime, endTime));
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String filename = folder.getPath() + File.separator + String.format("TMP4_APP_OUT-%f-%f", startTime, endTime) + "_" + timeStamp + ".mp4";
trimVideos.add(filename);
FileOutputStream fos = new FileOutputStream(filename);
FileChannel fc = fos.getChannel();
out.getBox(fc);
fc.close();
fos.close();
long start3 = System.currentTimeMillis();
System.err.println("Building IsoFile took : " + (start2 - start1) + "ms");
System.err.println("Writing IsoFile took : " + (start3 - start2) + "ms");
System.err.println("Writing IsoFile speed : " + (new File(String.format("TMP4_APP_OUT-%f-%f", startTime, endTime)).length() / (start3 - start2) / 1000) + "MB/s");
Message.obtain(mHandler, R.id.shorten, 1, 0, filename).sendToTarget();
} catch (FileNotFoundException e) {
Message.obtain(mHandler, R.id.shorten, 0, 0, e.getMessage()).sendToTarget();
e.printStackTrace();
} catch (IOException e) {
Message.obtain(mHandler, R.id.shorten, 0, 0, e.getMessage()).sendToTarget();
e.printStackTrace();
}
//mProgressDialog.dismiss();
// }
//});
}
protected static long getDuration(Track track) {
long duration = 0;
for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) {
duration += entry.getCount() * entry.getDelta();
}
return duration;
}
private static double correctTimeToSyncSample(Track track, double cutHere, boolean next) {
double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
long currentSample = 0;
double currentTime = 0;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
// samples always start with 1 but we start with zero therefore +1
timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(), currentSample + 1)] = currentTime;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
double previous = 0;
for (double timeOfSyncSample : timeOfSyncSamples) {
if (timeOfSyncSample > cutHere) {
if (next) {
return timeOfSyncSample;
} else {
return previous;
}
}
previous = timeOfSyncSample;
}
return timeOfSyncSamples[timeOfSyncSamples.length - 1];
}
}
The Problem was only in emulator. When we record video in emulator it records less seconds that actual time. Code working on actual device fine.
BTW, for duration in seconds, you need to divide by the timescale. I find the entry (or sample ) counts vary between audio and video tracks of same duration. But I'm a total noob, don't listen to me.
protected static double getDuration(Track track) {
double duration = 0;
for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) {
duration += entry.getCount() * entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
}
return duration;
}
Related
im having an issue while video mixing with audio. everytime when I try to make it.
That always give me an error which is
Java.lang.RuntimeException: A cast to int has gone wrong. Please contact the mp4parser discussion group.
Im using googlecode mp4 parser : implementation com.googlecode.mp4parser:isoparser:1.1.22 and also tried that but no success implementation 'org.mp4parser:isoparser:1.9.41'
Also on forum there's no proper answer on that question.
here's my code
public Track CropAudio(String videopath, Track fullAudio) {
try {
IsoFile isoFile = new IsoFile(videopath);
double lengthInSeconds = (double)
isoFile.getMovieBox().getMovieHeaderBox().getDuration() /
isoFile.getMovieBox().getMovieHeaderBox().getTimescale();
Track audioTrack = (Track) fullAudio;
double startTime1 = 0;
double endTime1 = lengthInSeconds;
long currentSample = 0;
double currentTime = 0;
double lastTime = -1;
long startSample1 = -1;
long endSample1 = -1;
for (int i = 0; i < audioTrack.getSampleDurations().length; i++) {
long delta = audioTrack.getSampleDurations()[i];
if (currentTime > lastTime && currentTime <= startTime1) {
// current sample is still before the new starttime
startSample1 = currentSample;
}
if (currentTime > lastTime && currentTime <= endTime1) {
// current sample is after the new start time and still before the new endtime
endSample1 = currentSample;
}
lastTime = currentTime;
currentTime += (double) delta / (double) audioTrack.getTrackMetaData().getTimescale();
currentSample++;
}
CroppedTrack cropperAacTrack = new CroppedTrack(fullAudio, startSample1, endSample1);
return cropperAacTrack;
} catch (IOException e) {
e.printStackTrace();
}
return fullAudio;
}
public Runnable runnable = new Runnable() {
#Override
public void run() {
try {
Movie m = MovieCreator.build(video);
List nuTracks = new ArrayList<>();
for (Track t : m.getTracks()) {
if (!"soun".equals(t.getHandler())) {
nuTracks.add(t);
}
}
Track nuAudio = new AACTrackImpl(new FileDataSourceImpl(audio));
Track crop_track = CropAudio(video, nuAudio);
nuTracks.add(crop_track);
m.setTracks(nuTracks);
Container mp4file = new DefaultMp4Builder().build(m);
FileChannel fc = new FileOutputStream(new File(output)).getChannel();
mp4file.writeContainer(fc);
fc.close();
try {
Variables.closeProgressDialog();
} catch (Exception e) {
Log.d(Variables.tag, e.toString());
} finally {
Go_To_preview_Activity();
}
} catch (Exception e) {
Variables.closeProgressDialog();
// Toast.makeText(context, "Something went wrong"+ e.toString(), Toast.LENGTH_SHORT).show();
//Go_To_preview_Activity();
e.printStackTrace();
Log.d(Variables.tag, e.toString());
}
}
};
ArrayList<String> list1 = splitFileList;
for (int i = 0; i < list1.size(); i++) {
tempFileName = splitFileList.get(i);
String splitFileCheckinDirectory = splitVideofilepath + Constant.SPLIT_VIDEO + "/" + list1.get(i) + Constant.FILE_EXTENSION;
File myfile = new File(splitFileCheckinDirectory);
if (!myfile.exists()) {
new TrimmVideo(getExternalFilesDir(null) + "/" + getFileNameFromFilePath(mFilePath), mStartTImelist.get(i), mEndTimelist.get(i) - mStartTImelist.get(i)).execute();
}
}
below is my Asynktask which i am trying execute inside for loop
private class TrimmVideo extends AsyncTask<Void, Void, Void> {
private final String mediaPath;
private final double endTime;
private final int length;
private double startTime;
private ProgressDialog progressDialog;
private TrimmVideo(String mediaPath, int startTime, int length) {
this.mediaPath = mediaPath;
this.startTime = startTime;
this.length = length;
this.endTime = this.startTime + this.length;
}
#Override
protected void onPreExecute() {
progressDialog = ProgressDialog.show(VideoPlayActvity.this,
"Trimming videos", "Please wait...", true);
super.onPreExecute();
}
#Override
protected Void doInBackground(Void... params) {
trimVideo();
return null;
}
#Override
protected void onPostExecute(Void result) {
progressDialog.dismiss();
dbHandler.updateFlag(fileModel == null ? tempFileName : fileModel.getfilename());
btn_save_video.setVisibility(View.INVISIBLE);
super.onPostExecute(result);
}
private void trimVideo() {
try {
File file = new File(mediaPath);
FileInputStream fis = new FileInputStream(file);
FileChannel in = fis.getChannel();
Movie movie = MovieCreator.build(in);
List<Track> tracks = movie.getTracks();
movie.setTracks(new LinkedList<Track>());
boolean timeCorrected = false;
// Here we try to find a track that has sync samples. Since we can only start decoding
// at such a sample we SHOULD make sure that the start of the new fragment is exactly
// such a frame
for (Track track : tracks) {
if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
if (timeCorrected) {
// This exception here could be a false positive in case we have multiple tracks
// with sync samples at exactly the same positions. E.g. a single movie containing
// multiple qualities of the same video (Microsoft Smooth Streaming file)
//throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");
} else {
startTime = correctTimeToNextSyncSample(track, startTime);
timeCorrected = true;
}
}
}
for (Track track : tracks) {
long currentSample = 0;
double currentTime = 0;
long startSample = -1;
long endSample = -1;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
// entry.getDelta() is the amount of time the current sample covers.
if (currentTime <= startTime) {
// current sample is still before the new starttime
startSample = currentSample;
} else if (currentTime <= endTime) {
// current sample is after the new start time and still before the new endtime
endSample = currentSample;
} else {
// current sample is after the end of the cropped video
break;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
movie.addTrack(new CroppedTrack(track, startSample, endSample));
}
IsoFile out = new DefaultMp4Builder().build(movie);
File storagePath = new File(getExternalFilesDir(null) + "/" + Constant.SPLIT_VIDEO + "/");
storagePath.mkdirs();
File myMovie = new File(storagePath, fileModel == null ? "/" + tempFileName + Constant.FILE_EXTENSION : fileModel.getfilename() + Constant.FILE_EXTENSION);
FileOutputStream fos = new FileOutputStream(myMovie);
FileChannel fc = fos.getChannel();
out.getBox(fc);
dbHandler.updateFlag(fileModel == null ? tempFileName : fileModel.getfilename());
fc.close();
fos.close();
fis.close();
in.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private double correctTimeToNextSyncSample(Track track, double cutHere) {
double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
long currentSample = 0;
double currentTime = 0;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
// samples always start with 1 but we start with zero therefore +1
timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(), currentSample + 1)] = currentTime;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
for (double timeOfSyncSample : timeOfSyncSamples) {
if (timeOfSyncSample > cutHere) {
return timeOfSyncSample;
}
}
return timeOfSyncSamples[timeOfSyncSamples.length - 1];
}
}
splitFileList list Contain 2 Size data a,b i want to execute synchronously one by one i.e loop start from 0 then it should complete asynk task for 0 then if loop will go one then it should complete please suggest me how to execute asynk task one by one in for loop .
You can't run synchronously by AsyncTask You must use thread some thing like this:
Thread t = new Thread(
new Runnable() {
public void run() {
try {
ArrayList<String> list1 = splitFileList;
for (int i = 0; i < list1.size(); i++) {
tempFileName = splitFileList.get(i);
String splitFileCheckinDirectory = splitVideofilepath + Constant.SPLIT_VIDEO + "/" + list1.get(i) + Constant.FILE_EXTENSION;
File myfile = new File(splitFileCheckinDirectory);
if (!myfile.exists()) {
trimVideo(getExternalFilesDir(null) + "/" + getFileNameFromFilePath(mFilePath), mStartTImelist.get(i), mEndTimelist.get(i) - mStartTImelist.get(i)); //here you can run synchronously work
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
});
t.start();
try {
t.join();
.....
} catch (Exception e) {
e.printStackTrace();
}
private void trimVideo(String mediaPath, int startTime, int length) {
try {
File file = new File(mediaPath);
FileInputStream fis = new FileInputStream(file);
FileChannel in = fis.getChannel();
Movie movie = MovieCreator.build(in);
List<Track> tracks = movie.getTracks();
movie.setTracks(new LinkedList<Track>());
boolean timeCorrected = false;
// Here we try to find a track that has sync samples. Since we can only start decoding
// at such a sample we SHOULD make sure that the start of the new fragment is exactly
// such a frame
for (Track track : tracks) {
if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
if (timeCorrected) {
// This exception here could be a false positive in case we have multiple tracks
// with sync samples at exactly the same positions. E.g. a single movie containing
// multiple qualities of the same video (Microsoft Smooth Streaming file)
//throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");
} else {
startTime = correctTimeToNextSyncSample(track, startTime);
timeCorrected = true;
}
}
}
for (Track track : tracks) {
long currentSample = 0;
double currentTime = 0;
long startSample = -1;
long endSample = -1;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
// entry.getDelta() is the amount of time the current sample covers.
if (currentTime <= startTime) {
// current sample is still before the new starttime
startSample = currentSample;
} else if (currentTime <= endTime) {
// current sample is after the new start time and still before the new endtime
endSample = currentSample;
} else {
// current sample is after the end of the cropped video
break;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
movie.addTrack(new CroppedTrack(track, startSample, endSample));
}
IsoFile out = new DefaultMp4Builder().build(movie);
File storagePath = new File(getExternalFilesDir(null) + "/" + Constant.SPLIT_VIDEO + "/");
storagePath.mkdirs();
File myMovie = new File(storagePath, fileModel == null ? "/" + tempFileName + Constant.FILE_EXTENSION : fileModel.getfilename() + Constant.FILE_EXTENSION);
FileOutputStream fos = new FileOutputStream(myMovie);
FileChannel fc = fos.getChannel();
out.getBox(fc);
dbHandler.updateFlag(fileModel == null ? tempFileName : fileModel.getfilename());
fc.close();
fos.close();
fis.close();
in.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private double correctTimeToNextSyncSample(Track track, double cutHere) {
double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
long currentSample = 0;
double currentTime = 0;
for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
for (int j = 0; j < entry.getCount(); j++) {
if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
// samples always start with 1 but we start with zero therefore +1
timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(), currentSample + 1)] = currentTime;
}
currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
currentSample++;
}
}
for (double timeOfSyncSample : timeOfSyncSamples) {
if (timeOfSyncSample > cutHere) {
return timeOfSyncSample;
}
}
return timeOfSyncSamples[timeOfSyncSamples.length - 1];
}
If you can't implement #Hazem answer you can go with another approach.
For this you need to maintain counter for each of your data and forget about for loop.
First you need to call asynctask for first position of your list.Something like this:
if(list1.size() > 0) {
fileCounter=0;
tempFileName = splitFileList.get(fileCounter);
String splitFileCheckinDirectory = splitVideofilepath + Constant.SPLIT_VIDEO + "/" + tempFileName + Constant.FILE_EXTENSION;
File myfile = new File(splitFileCheckinDirectory);
if (!myfile.exists()) {
new TrimmVideo(getExternalFilesDir(null) + "/" + getFileNameFromFilePath(mFilePath), mStartTImelist.get(i), mEndTimelist.get(i) - mStartTImelist.get(i)).execute();
}
}
Then in onPostExecute of your asyncTask
#Override
protected void onPostExecute(Void result) {
progressDialog.dismiss();
dbHandler.updateFlag(fileModel == null ? tempFileName : fileModel.getfilename());
btn_save_video.setVisibility(View.INVISIBLE);
super.onPostExecute(result);
// Update your counter here
fileCounter++;
// Check if your incremented counter doesn't exceed your list size
if(fileCounter < list1.size()) {
// Then call your asynctask again with updated counter data
empFileName = splitFileList.get(fileCounter);
String splitFileCheckinDirectory = splitVideofilepath + Constant.SPLIT_VIDEO + "/" + tempFileName + Constant.FILE_EXTENSION;
File myfile = new File(splitFileCheckinDirectory);
if (!myfile.exists()) {
new TrimmVideo(getExternalFilesDir(null) + "/" + getFileNameFromFilePath(mFilePath), mStartTImelist.get(i), mEndTimelist.get(i) - mStartTImelist.get(i)).execute();
}
}
}
Hope this will help you.
I have a function which is supposed to return an object of arraylists but for some reason it gets stuck in the return statement. Everything before the return statement is working. I had an error where the ArrayLists only contained a single value which makes the function work, but of course returns the wrong data.
package com.burninglobster.TP;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.util.Log;
import android.widget.LinearLayout;
public class Chart2Activity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Object graphdata[] = graphsetup();
Log.d("This message is NOT shown", "");
List<Double> distances = (List<Double>) graphdata[0];
List<Double> accall = (List<Double>) graphdata[1];
List<Double> accdir = (List<Double>) graphdata[2];
List<Double> accdist = (List<Double>) graphdata[3];
List<Double> meandir = (List<Double>) graphdata[4];
List<Double> meandist = (List<Double>) graphdata[5];
linegraph lgraph = new linegraph();
setContentView(R.layout.splot);
LinearLayout layout1 = (LinearLayout) findViewById(R.id.splot2);
layout1.addView(lgraph.getView1(this, distances, accall));
LinearLayout layout2 = (LinearLayout) findViewById(R.id.splot3);
layout2.addView(lgraph.getView2(this, distances, accdir, meandir));
LinearLayout layout3 = (LinearLayout) findViewById(R.id.splot4);
layout3.addView(lgraph.getView2(this, distances, accdist, meandist));
}
public Object[] graphsetup() {
SharedPreferences rprefs;
rprefs = getSharedPreferences("com.burninglobster.TP.tprefs",
Context.MODE_WORLD_READABLE);
double setdist = rprefs.getFloat("setdist", 0);
String[] sourcesarray = rprefs.getString("sourcesarray", "Standarddef")
.split(",");
String setdisc = sourcesarray[rprefs.getInt("spindiscpos", 0)];
DBHelper dbHelper = new DBHelper(Chart2Activity.this);
SQLiteDatabase db;
db = dbHelper.getReadableDatabase();
String graphquery = "SELECT " + DBHelper.SHOOTER + "," + DBHelper.WDIR
+ "," + DBHelper.WSTR + "," + DBHelper.SMOD + ","
+ DBHelper.DIST + "," + DBHelper.R_DIST + ","
+ DBHelper.OS_DIST + " FROM " + DBHelper.TABLE + " WHERE "
+ DBHelper.SHOOTER + "='" + setdisc + "'" + " ORDER BY "
+ DBHelper.DIST;
Cursor graphcursor = db.rawQuery(graphquery, null);
int rows = graphcursor.getCount();
int ishooter = graphcursor.getColumnIndex(DBHelper.SHOOTER);
int iwdir = graphcursor.getColumnIndex(DBHelper.WDIR);
int iwstr = graphcursor.getColumnIndex(DBHelper.WSTR);
int ismod = graphcursor.getColumnIndex(DBHelper.SMOD);
int idist = graphcursor.getColumnIndex(DBHelper.DIST);
int irdist = graphcursor.getColumnIndex(DBHelper.R_DIST);
int iosdist = graphcursor.getColumnIndex(DBHelper.OS_DIST);
List<Double> accall = new ArrayList<Double>();
List<Double> accdir = new ArrayList<Double>();
List<Double> meandir = new ArrayList<Double>();
List<Double> accdist = new ArrayList<Double>();
List<Double> meandist = new ArrayList<Double>();
List<Double> accdirtemp = new ArrayList<Double>();
List<Double> accdisttemp = new ArrayList<Double>();
List<Double> distances = new ArrayList<Double>();
double dalla = 0;
double ddira = 0;
double ddista = 0;
double ddirm = 0;
double ddistm = 0;
double currentdist = 0;
graphcursor.moveToFirst();
if (rows > 0) {
currentdist = graphcursor.getDouble(idist);
}
for (int i = 0; i < rows; i++) {
// REMOVED ERROR CAUSING SINGLE VALUE IN ARRAYLISTS:
// currentdist=graphcursor.getDouble(idist);
if (graphcursor.getDouble(idist) < (currentdist + 5)) {
accdirtemp.add(graphcursor.getDouble(irdist));
accdisttemp.add(graphcursor.getDouble(iosdist));
} else {
for (int u = 0; u < accdirtemp.size(); u++) {
ddira += Math.pow(accdirtemp.get(u), 2);
ddista += Math.pow(accdisttemp.get(u), 2);
dalla += Math.pow(accdirtemp.get(u), 2)
+ Math.pow(accdisttemp.get(u), 2);
ddirm += accdirtemp.get(u);
ddistm += accdisttemp.get(u);
}
accall.add(Math.sqrt(dalla / accdirtemp.size()));
Double.toString(Math.sqrt(dalla / accdirtemp.size())));
accdir.add(Math.sqrt(ddira / accdirtemp.size()));
accdist.add(Math.sqrt(ddista / accdirtemp.size()));
meandir.add(ddirm / accdirtemp.size());
meandist.add(ddistm / accdirtemp.size());
distances.add(currentdist);
dalla = 0;
ddira = 0;
ddirm = 0;
ddista = 0;
ddistm = 0;
currentdist = graphcursor.getDouble(idist) + 5;
accdirtemp.clear();
accdisttemp.clear();
accdirtemp.add(graphcursor.getDouble(irdist));
accdisttemp.add(graphcursor.getDouble(iosdist));
}
graphcursor.moveToNext();
}
for (int u = 0; u < accdirtemp.size(); u++) {
ddira += Math.pow(accdirtemp.get(u), 2);
ddista += Math.pow(accdisttemp.get(u), 2);
dalla += Math.pow(accdirtemp.get(u), 2)
+ Math.pow(accdisttemp.get(u), 2);
ddirm += accdirtemp.get(u);
ddistm += accdisttemp.get(u);
}
accall.add(Math.sqrt(dalla / accdirtemp.size()));
accdir.add(Math.sqrt(ddira / accdirtemp.size()));
accdist.add(Math.sqrt(ddista / accdirtemp.size()));
meandir.add(ddirm / accdirtemp.size());
meandist.add(ddistm / accdirtemp.size());
distances.add(currentdist);
db.close();
dbHelper.close();
Log.d("This message", " is shown");
return new Object[] { distances, accall, accdir, accdist, meandir,
meandist };
}
}
I have a similar function that works just fine and I can't see the difference:
package com.burninglobster.TP;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.util.Log;
import android.widget.LinearLayout;
public class ChartActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Object plotdata[] = plotsetup();
Log.d("This message", " is also shown");
List<Double> rd1 = (List<Double>) plotdata[0];
List<Double> osd1 = (List<Double>) plotdata[1];
List<Double> rd2 = (List<Double>) plotdata[2];
List<Double> osd2 = (List<Double>) plotdata[3];
List<Double> rd3 = (List<Double>) plotdata[4];
List<Double> osd3 = (List<Double>) plotdata[5];
scatterplot plot = new scatterplot();
setContentView(R.layout.tplot);
LinearLayout layout = (LinearLayout) findViewById(R.id.chart);
layout.addView(plot.getView(this, rd1, osd1, rd2, osd2, rd3, osd3));
}
public Object[] plotsetup() {
SharedPreferences rprefs;
rprefs = getSharedPreferences("com.burninglobster.TP.tprefs",
Context.MODE_WORLD_READABLE);
double setdist = rprefs.getFloat("setdist", 0);
String[] sourcesarray = rprefs.getString("sourcesarray", "Standarddef")
.split(",");
String setdisc = sourcesarray[rprefs.getInt("spindiscpos", 0)];
DBHelper dbHelper = new DBHelper(ChartActivity.this);
SQLiteDatabase db;
db = dbHelper.getReadableDatabase();
String plotquery = "SELECT " + DBHelper.SHOOTER + "," + DBHelper.WDIR
+ "," + DBHelper.WSTR + "," + DBHelper.SMOD + ","
+ DBHelper.DIST + "," + DBHelper.R_DIST + ","
+ DBHelper.OS_DIST + " FROM " + DBHelper.TABLE;
Cursor plotcursor = db.rawQuery(plotquery, null);
int ishooter = plotcursor.getColumnIndex(DBHelper.SHOOTER);
int iwdir = plotcursor.getColumnIndex(DBHelper.WDIR);
int iwstr = plotcursor.getColumnIndex(DBHelper.WSTR);
int ismod = plotcursor.getColumnIndex(DBHelper.SMOD);
int idist = plotcursor.getColumnIndex(DBHelper.DIST);
int irdist = plotcursor.getColumnIndex(DBHelper.R_DIST);
int iosdist = plotcursor.getColumnIndex(DBHelper.OS_DIST);
int rows = plotcursor.getCount();
List<Double> rd1 = new ArrayList<Double>();
List<Double> osd1 = new ArrayList<Double>();
List<Double> rd2 = new ArrayList<Double>();
List<Double> osd2 = new ArrayList<Double>();
List<Double> rd3 = new ArrayList<Double>();
List<Double> osd3 = new ArrayList<Double>();
plotcursor.moveToFirst();
int disccount = 0;
int array1 = 0;
int array2 = 0;
int array3 = 0;
double cursordist;
for (int i = 0; i < rows; i++) {
if (plotcursor.getString(0).equals(setdisc)) {
cursordist = plotcursor.getDouble(idist);
if (cursordist > (setdist - 5) && cursordist < (setdist + 5)) {
rd1.add(plotcursor.getDouble(irdist));
osd1.add(plotcursor.getDouble(iosdist));
array1++;
} else if (cursordist > (setdist)
&& cursordist < (setdist + 10)) {
rd2.add(plotcursor.getDouble(irdist));
osd2.add(plotcursor.getDouble(iosdist));
array2++;
} else if (cursordist > (setdist - 10)
&& cursordist < (setdist)) {
rd3.add(plotcursor.getDouble(irdist));
osd3.add(plotcursor.getDouble(iosdist));
array3++;
}
disccount++;
}
plotcursor.moveToNext();
}
db.close();
dbHelper.close();
Log.d("This message", " is shown");
return new Object[] { rd1, osd1, rd2, osd2, rd3, osd3 };
}
}
I added a 'Log.d' just before the return statement and then another just after the function call in the activity and only the first one is shown. It freezes for ½-1 minute and then shows the dialogue to wait or kill. I don't know how to get more info on the problem. Suggestions?
Don't know what the problem is, and you will have to explain "stuck in the return statement". Code does not simply get "stuck".
But you will help yourself a lot of you break the problem up.
For example, if you created a class and handled instances of the class rather than untyped arrays of objects, your code would be much easier to write, much easier to debug and much easier to maintain. Doing so will also enable the compiler to catch a lot of errors before you need to ask for help :)
Whenever you are tempted to use 'Object', you should stop and question yourself. Of course, there are times when Object is useful and times when you have no choice but they are exceptional and should only be done for good reason - not because you haven't taken the time to think of a correct solution.
You should also choose better names for your variables and use proper Java naming conventions. Here, I've named the object Thing because your code does not give proper clues as to what dalla, ddira, ddista etc are and how the relate to each other, either in comments or code (which is a bad thing - this code might only ever be seen by you but when you come back in a year, you will wish that you'd done these things).
For example:
class Thing{
double dalla = 0;
double ddira = 0;
double ddista = 0;
double ddirm = 0;
double ddistm = 0;
double currentdist = 0;
}
Then, in your loops or methods or whatever,
ArrayList<Thing> things = new ArrayList<Thing>();
...
Thing thing = new Thing();
for (int u = 0; u < accdirtemp.size(); u++) {
thing.ddira += Math.pow(accdirtemp.get(u), 2);
thing.ddista += Math.pow(accdisttemp.get(u), 2);
thing.dalla += Math.pow(accdirtemp.get(u), 2)
+ Math.pow(accdisttemp.get(u), 2);
thing.ddirm += accdirtemp.get(u);
thing.ddistm += accdisttemp.get(u);
}
things.add(thing);
...
And so on. Take the time now to refactor your code before it becomes even more of a nightmare to read.
OK, so I'm feeling quite stupid for posting this and wasting coders' time!!
My problem was to be found elsewhere. The real problem was, that I was using Log.d wrong. If the text string is empty, it seems that it doesn't get sent to the log:
Log.d("This return","something") ; Log.d("This returns nothing","") :S log.d
I am creating a tuner for Android (similar to a guitar tuner) and I am wondering how to allow the tuner to run continuously (for a couple minutes or so). I don't want it to be a service that runs in the background, just while the user is in my app.
I have successfully used the AudioRecord class and am obtaining data that seems correct. I am in the process of filtering this data and finding the fundamental frequency of the input signal, but need help figuring out how to allow my tuner to run continuously.
This is what my code looks like so far:
import android.app.Activity;
import android.graphics.Color;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.dustin.tuner2.FFT;
import com.dustin.tuner2.Complex;
public class Tuner2 extends Activity implements OnClickListener {
Button btnTune;
TextView fft;
TextView freq;
TextView results;
MediaRecorder recorder;
AudioRecord tuner;
boolean startTuning = true;
int audioSource = MediaRecorder.AudioSource.MIC;
int sampleRateInHz = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int bufferSizeInBytes;
int samples;
short[] audioBuffer;
short[] audioData;
double[] temp;
String fileName;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
btnTune = (Button)findViewById(R.id.btnTune);
freq = (TextView)findViewById(R.id.freq);
btnTune.setOnClickListener(this);
bufferSizeInBytes = 4096;
//bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
results = (TextView)findViewById(R.id.results);
fft = (TextView)findViewById(R.id.fft);
}
#Override
public void onClick(View v) {
// TODO Auto-generated method stub
if (v == btnTune)
{
onTune(startTuning);
if (startTuning) {
((Button)v).setText("Stop Tuning");
}
else {
((Button)v).setText("Start Tuninig");
}
startTuning = !startTuning;
}
}
//------------------------------------------------------------>
private void onTune(boolean start) {
if(start) {
startTuning();
} else {
Toast.makeText(getApplicationContext(), "Tuning Stopped", Toast.LENGTH_SHORT).show();
tuner.stop();
}
}
private void startTuning()
{
tuner = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes);
audioData = new short[bufferSizeInBytes];
trigger();
}
public void trigger(){
acquire();
computeFFT();
display();
}
public void acquire(){
try {
tuner.startRecording();
samples = tuner.read(audioData, 0, bufferSizeInBytes);
}
catch (Throwable t){
}
}
public void computeFFT(){
//Conversion from short to double
double[] micBufferData = new double[bufferSizeInBytes];//size may need to change
final int bytesPerSample = 2; // As it is 16bit PCM
final double amplification = 100.0; // choose a number as you like
for (int index = 0, floatIndex = 0; index < bufferSizeInBytes - bytesPerSample + 1; index += bytesPerSample, floatIndex++) {
double sample = 0;
for (int b = 0; b < bytesPerSample; b++) {
int v = audioData[index + b];
if (b < bytesPerSample - 1 || bytesPerSample == 1) {
v &= 0xFF;
}
sample += v << (b * 8);
}
double sample32 = amplification * (sample / 32768.0);
micBufferData[floatIndex] = sample32;
}
//Create Complex array for use in FFT
Complex[] fftTempArray = new Complex[bufferSizeInBytes];
for (int i=0; i<bufferSizeInBytes; i++)
{
fftTempArray[i] = new Complex(micBufferData[i], 0);
}
//Obtain array of FFT data
final Complex[] fftArray = FFT.fft(fftTempArray);
final Complex[] fftInverse = FFT.ifft(fftTempArray);
//Create an array of magnitude of fftArray
double[] magnitude = new double[fftArray.length];
for (int i=0; i<fftArray.length; i++){
magnitude[i]= fftArray[i].abs();
}
fft.setTextColor(Color.GREEN);
fft.setText("fftArray is "+ fftArray[500] +" and fftTempArray is "+fftTempArray[500] + " and fftInverse is "+fftInverse[500]+" and audioData is "+audioData[500]+ " and magnitude is "+ magnitude[1] + ", "+magnitude[500]+", "+magnitude[1000]+" You rock dude!");
for(int i = 2; i < samples; i++){
fft.append(" " + magnitude[i] + " Hz");
}
}
public void display(){
results.setTextColor(Color.BLUE);
results.setText(audioData[1]+"");
for(int i = 2; i < samples; i++){
results.append(" " + audioData[i]);
}
results.invalidate();
//fft.setTextColor(Color.GREEN);
//fft.setText("Buffer size is "+bufferSizeInBytes);
//fft.setText(fftArray[1]+" Hz");
//for(int i = 2; i < samples; i++){
//fft.append(" " + fftArray[i] + " Hz");
//}
//fft.invalidate();
}
Do I need to change something concerning the button and what it does when pressed? Would it just involve the buffer size? How often I compute the FFT?
Unless I am misunderstanding, you could just use a while loop that checks a boolean variable. When the user clicks the stop button set that variable to false.
while (tuning) {
trigger();
}
you should also probably introduce a delay between these calls. It would also be wise to run this code on a thread other than the UI thread. See http://developer.android.com/resources/articles/painless-threading.html
A simple example of what I mean would be to do
new Thread(new Runnable() {
#Override
public void run() {
while (tuning) {
trigger();
try {
Thread.sleep(SLEEP_TIME_MS);
} catch (InterruptedException e) {
// handle exception
}
}
}
}).start();
but then you have to worry about updating the UI as you cannot do that from this Thread. The best option is to use AsyncTask http://developer.android.com/reference/android/os/AsyncTask.html
I made a little signal processing app. It processes audio signal (morse code) on certain frequency with Goerztel algorithm. Application saves temporary file to the filesystem and after recording is finished, starts to detect signals. Now I got the result with bunch of magnitudes.
I don't really know what to read from those magnitudes. How can I decode the morse code from those magnitudes? How can I read them? Tried to find references, but nowhere is explained what is the result and how to read it.
EDIT:
My morse code application is made with Delphi and uses Windows Beep function to send signals with certain frequency. I'm using 1200 Hz for signals. Also pauses between signals and words and morse beeps are like wikipedia described. All is accurate.
Goertzel.java:
public class Goertzel {
private float samplingRate;
private float targetFrequency;
private int n;
private double coeff, Q1, Q2;
private double sine, cosine;
public Goertzel(float samplingRate, float targetFrequency, int inN) {
this.samplingRate = samplingRate;
this.targetFrequency = targetFrequency;
n = inN;
sine = Math.sin(2 * Math.PI * (targetFrequency / samplingRate));
cosine = Math.cos(2 * Math.PI * (targetFrequency / samplingRate));
coeff = 2 * cosine;
}
public void resetGoertzel() {
Q1 = 0;
Q2 = 0;
}
public void initGoertzel() {
int k;
float floatN;
double omega;
floatN = (float) n;
k = (int) (0.5 + ((floatN * targetFrequency) / samplingRate));
omega = (2.0 * Math.PI * k) / floatN;
sine = Math.sin(omega);
cosine = Math.cos(omega);
coeff = 2.0 * cosine;
resetGoertzel();
}
public void processSample(double sample) {
double Q0;
Q0 = coeff * Q1 - Q2 + sample;
Q2 = Q1;
Q1 = Q0;
}
public double[] getRealImag(double[] parts) {
parts[0] = (Q1 - Q2 * cosine);
parts[1] = (Q2 * sine);
return parts;
}
public double getMagnitudeSquared() {
return (Q1 * Q1 + Q2 * Q2 - Q1 * Q2 * coeff);
}
}
SoundCompareActivity.java
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class SoundCompareActivity extends Activity {
private static final int RECORDER_SAMPLE_RATE = 8000; // at least 2 times
// higher than sound
// frequency,
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
private Button startRecBtn;
private Button stopRecBtn;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
startRecBtn = (Button) findViewById(R.id.button1);
stopRecBtn = (Button) findViewById(R.id.button2);
startRecBtn.setEnabled(true);
stopRecBtn.setEnabled(false);
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLE_RATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
startRecBtn.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.d("SOUNDCOMPARE", "Start Recording");
startRecBtn.setEnabled(false);
stopRecBtn.setEnabled(true);
stopRecBtn.requestFocus();
startRecording();
}
});
stopRecBtn.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.d("SOUNDCOMPARE", "Stop recording");
startRecBtn.setEnabled(true);
stopRecBtn.setEnabled(false);
startRecBtn.requestFocus();
stopRecording();
}
});
}
private void startRecording() {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLE_RATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToTempFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
private String getTempFilename() {
File file = new File(getFilesDir(), "tempaudio");
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(getFilesDir(), "signal.raw");
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + "signal.raw");
}
private void writeAudioDataToTempFile() {
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read = 0;
if (os != null) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (read != AudioRecord.ERROR_INVALID_OPERATION) {
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void stopRecording() {
if (recorder != null) {
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
new MorseDecoder().execute(new File(getTempFilename()));
}
}
MorseDecoder.java:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.os.AsyncTask;
import android.util.Log;
public class MorseDecoder extends AsyncTask<File, Void, Void> {
private FileInputStream is = null;
#Override
protected Void doInBackground(File... files) {
int index;
//double magnitudeSquared;
double magnitude;
int bufferSize = AudioRecord.getMinBufferSize(8000,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
Goertzel g = new Goertzel(8000, 1200, bufferSize);
g.initGoertzel();
for (int i = 0; i < files.length; i++) {
byte[] data = new byte[bufferSize];
try {
is = new FileInputStream(files[i]);
while(is.read(data) != -1) {
ShortBuffer sbuf = ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
short[] audioShorts = new short[sbuf.capacity()];
sbuf.get(audioShorts);
float[] audioFloats = new float[audioShorts.length];
for (int j = 0; j < audioShorts.length; j++) {
audioFloats[j] = ((float)audioShorts[j]) / 0x8000;
}
for (index = 0; index < audioFloats.length; index++) {
g.processSample(data[index]);
}
magnitude = Math.sqrt(g.getMagnitudeSquared());
Log.d("SoundCompare", "Relative magnitude = " + magnitude);
g.resetGoertzel();
}
is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
}
EDIT2:
Notices some bugs in processing samples. Changed code in while loop.
while(is.read(data) != -1) {
ShortBuffer sbuf = ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
short[] audioShorts = new short[sbuf.capacity()];
sbuf.get(audioShorts);
float[] audioFloats = new float[audioShorts.length];
for (int j = 0; j < audioShorts.length; j++) {
audioFloats[j] = ((float)audioShorts[j]) / 0x8000;
}
for (index = 0; index < audioFloats.length; index++) {
g.processSample(audioFloats[index]);
magnitude = Math.sqrt(g.getMagnitudeSquared());
Log.d("SoundCompare", "Relative magnitude = " + magnitude);
}
//magnitude = Math.sqrt(g.getMagnitudeSquared());
//Log.d("SoundCompare", "Relative magnitude = " + magnitude);
g.resetGoertzel();
}
Regards,
evilone
The output of your Goertzel filter will increase when a tone within its passband is present, and then decrease when the tone is removed. In order to detect pulses of a tone, e.g. morse code, you need some kind of threshold detector on the output of the filter which will just give a boolean value for "tone present" / "tone not present" on a sample-by-sample basis. Try plotting the output values and it should be obvious once you see it in graphical form.
Plot the signal magnitudes on a graph versus time (some CW decoding apps for the PC do this in real-time). Now figure out what the graph for each Morse code symbol should look like. Then study some pattern matching algorithms. If there is enough noise present, you may want to try some statistical pattern matching methods.
Here's the Wikipedia link for proper Morse Code timing.