Silicompressor no sound after video is compressed - android

I have been compressing Images without any problems. Recently my app required video compress functionality to be added. Again, Silicompressor does the job but for some reason there is no audio in output file.
I am using code inside Runnable
String videoUriStr; // Passed as a variable
String folderPath = activity.getExternalFilesDir("/").getAbsolutePath() + "/My Folder";
String videoPath = SiliCompressor.with(context).compressVideo(videoUriStr, folderPath);
I am able to play compressed video without sound (though on uploading to firebase storage, url shows black screen but right length of video. But I will figure that out). My main concern is the sound.
I have googled a lot and found 2 solutions out of which second seems to be promising but looking way to implement it.
First is to downgrade implementation 'com.googlecode.mp4parser:isoparser:1.1.22' to implementation 'com.googlecode.mp4parser:isoparser:1.0.6', which doesn't work.
Second, which I found here, says "issue seems to be with audio encoder, I changed audio encoder to HE_AAC it works now". Can please anyone help me with this?

So answering my own question. Didn't get much of help on this, hence decided to switch library to FFmpeg.
Gradle
implementation 'com.writingminds:FFmpegAndroid:0.3.2'
Load Library:
FFmpeg fFmpeg;
private void LoadFfmpegLibrary(){
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy_hh-mm", Locale.CANADA);
Date now = new Date();
String videoFileName = "Video-" + formatter.format(now) + ".mp4";
if(fFmpeg == null){
fFmpeg = FFmpeg.getInstance(context);
try {
fFmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onStart() {
}
#Override
public void onFailure() {
}
#Override
public void onSuccess() {
}
#Override
public void onFinish() {
Compress(videoFileName);
}
});
} catch (FFmpegNotSupportedException e) {
e.printStackTrace();
}
} else {
Compress(videoFileName);
}
}
Then Compress videos
private void Compress(String fileName){
String outputPath = getExternalFilesDir("/").getAbsolutePath() + "/My Folder/" + fileName;
String[] commandArray;
commandArray = new String[]{"-y", "-i", originalFileUriStr, "-s", "720x480", "-r", "25", "-vcodec", "mpeg4", "-b:v", "300k", "-b:a", "48000", "-ac", "2", "-ar", "22050", outputPath};
final ProgressDialog dialog = new ProgressDialog(activity);
try {
fFmpeg.execute(commandArray, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
dialog.setMessage("Compressing... please wait");
}
#Override
public void onProgress(String message) {
Log.e("FFmpeg onProgress? ", message);
}
#Override
public void onFailure(String message) {
Log.e("FFmpeg onFailure? ", message);
}
#Override
public void onSuccess(String message) {
Log.e("FFmpeg onSuccess? ", message);
}
#Override
public void onFinish() {
Uri compressedOutputUri = Uri.fromFile(new File(outputPath));
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}
}
And now sound works perfect.

Related

How to solve Speech-To-Text error: "unable to transcode data stream audio/wav -> audio/x-float-array"

I am building an android app by integrating IBM Speech-To-Text service in it and there first I am recording audio and saving it to device and then passing for conversion but while passing my audio , I am getting error as "unable to transcode data stream audio/wav -> audio/x-float-array" and I tried to give different format of audio too but getting same error for every format. Although audio getting save properly and can be listen using music player.So, please help me to get rid of this error.
Here First I recording audio using MediaRecorder and then saving it to device and then sending it for conversation but getting error and I tried using every possible audio format
fileName = getExternalCacheDir().getAbsolutePath() + "/" + "examples.wav";
try {
RecognizeOptions recognizeOptions = new RecognizeOptions.Builder()
.audio(new FileInputStream(fileName))
.contentType("audio/wav")
.model("en-US_BroadbandModel")
.build();
BaseRecognizeCallback baseRecognizeCallback =
new BaseRecognizeCallback() {
#Override
public void onTranscription
(SpeechRecognitionResults speechRecognitionResults) {
System.out.println(speechRecognitionResults);
}
#Override
public void onConnected() {
}
#Override
public void onError(Exception e) {
Log.i("Error", e.getMessage());
enableMicButton();
}
#Override
public void onDisconnected() {
enableMicButton();
}
#Override
public void onInactivityTimeout(RuntimeException runtimeException) {
}
#Override
public void onListening() {
}
#Override
public void onTranscriptionComplete() {
}
};
speechToText.recognizeUsingWebSocket(recognizeOptions,
baseRecognizeCallback);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
If you are using watson-developer-cloud Java SDK, here's an example that passes the right HTTPMediaType
FileInputStream audio = new FileInputStream("src/test/resources/speech_to_text/sample1.wav");
RecognizeOptions options =
new RecognizeOptions.Builder()
.audio(audio)
.interimResults(true)
.contentType(HttpMediaType.AUDIO_WAV)
.build();
service.recognizeUsingWebSocket(
options,
new BaseRecognizeCallback() {
#Override
public void onTranscription(SpeechRecognitionResults speechResults) {
System.out.println(speechResults);
}
#Override
public void onDisconnected() {
lock.countDown();
}
});
You can find the complete example here

reduce ffmpeg execution time and check if there anything wrong in command

i am using ffmpeg to create video from images and i have 2 image store in my sd card but when i am runing code it takes too long to execute or sometimes its shows only executing..my question is how to increase speed and check if there is anything wrong in my command
String cmd[]={ "-r" ,"1/5", "-i", inputputimg, "-strict",
"experimental", "-vcodec" ,"libx264" ,"-preset" ,"ultrafast" ,"-crf"
,"30",output};
String
inputputimg="/storage/emulated/0/FestiveApp/temp/img%02d.jpeg";
public void ImageCommand(String command[])
{
ffmpeg = FFmpeg.getInstance(MainActivity2.this);
try {
// to execute "ffmpeg -version" command you just need to pass "-version"
ffmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
Log.e("ffmpeg","Exaction Start");
}
#Override
public void onProgress(String message) {
}
#Override
public void onFailure(String message) {
imageprogressDialog.dismiss();
Log.e("ok",message);
}
#Override
public void onSuccess(String message) {
imageprogressDialog.dismiss();
Toast.makeText(MainActivity2.this,"success",Toast.LENGTH_SHORT).show();
}
#Override
public void onFinish() {
imageprogressDialog.dismiss();
Log.e("ffmpeg","Exaction Start");
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
imageprogressDialog.dismiss();
Log.e("ffmpeg",e.toString());
}
}
i want video should takes some sec to edit and also i trying to merge to video but it also takes same things

ffmpeg Add watermark/text in video on react native

Right now i'm stuck on android part,here is my code
I don't want to define font styling or box it just i found this code somewhere so i just copy pasted it as i don't have any knowledge of ffmpeg, I just want a simple text of right aligned in 2 lines on top of right of the video like this.
I am getting the error in this part of code as the video is getting generated but it does not play and it is of always 262B size
String[] cmd = new String[] {
"-i", path, "-vf", String.format("drawtext=\"fontfile=/systems/fonts/DroidSans.ttf: text='%s': " + "box=1: boxcolor=black#0.5: boxborder=5: x=(w-text_w)/t: y=(h-text_h)/2\"", text), "-codec:a", "aac", out.getAbsolutePath()
};
This is the full code
#ReactMethod
public void embedTextOnVideo(String text, String path, int fontSize, String fontColor, final Callback successCallback, final Callback errorCallback)
{
FFmpeg ffmpeg = FFmpeg.getInstance(_reactContext);
try
{
ffmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onStart() {}
#Override
public void onFailure() {}
#Override
public void onSuccess() {}
#Override
public void onFinish() {}
});
} catch (FFmpegNotSupportedException e) {
// Handle if FFmpeg is not supported by device
}
File out = getOutputFile(TYPE_VIDEO);
String[] cmd = new String[] {
"-i", path, "-vf", String.format("drawtext=\"fontfile=/systems/fonts/DroidSans.ttf: text='%s': " + "box=1: boxcolor=black#0.5: boxborder=5: x=(w-text_w)/t: y=(h-text_h)/2\"", text), "-codec:a", "aac", out.getAbsolutePath()
};
try {
ffmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {}
#Override
public void onProgress(String message) {}
#Override
public void onFailure(String message) {
errorCallback.invoke("Error ffmpeg executing with message:\n\t" + message);
}
#Override
public void onSuccess(String message) {
successCallback.invoke("Successfully output file with message:\n\t");
}
#Override
public void onFinish() {}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// Handle if FFmpeg is already running
}
}
#Nullable
private Throwable writeDataToFile(byte[] data, File file) {
try {
FileOutputStream fos = new FileOutputStream(file);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
return e;
} catch (IOException e) {
return e;
}
return null;
}
#Nullable
private File getOutputFile(int type) {
File storageDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM;
// Create storage dir if it does not exist
if (!storageDir.exists()) {
if (!storageDir.mkdirs()) {
Log.e(TAG, "Failed to create directory:" + storageDir.getAbsolutePath());
return null;
}
}
// media file name
String fileName = String.format("%s", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()));
enter code hereif (type == TYPE_VIDEO) {
fileName = String.format("VID_%s.mp4", fileName);
} else {
Log.e(TAG, "Unsupported media type:" + type);
return null;
}
return new File(String.format("%s%s%s", storageDir.getPath(), File.separator, fileName));
}

FFMPEG in Android doesn't create output file.

I am currently using FFMPEG in my Android project for converting a video file to an audio file.
When I execute converting by FFMPEG library, no error occurs. However, the output file is not created in the folder which I already specified.
Here is my code for generating audio file.
OnConvertButtonClickListener convertButtonClickListener = new OnConvertButtonClickListener() {
#Override
public void onClick(int position) {
Converter.loadFFMpegBinary();
String cmd = CMD_HEAD + videoItems.get(position).getTitle() + CMD_STRICT;
String[] fileDir = videoItems.get(position).getTitle().split(File.separator);
String fileName = fileDir[fileDir.length-1];
String out_audio_file = FileManager.getHomeDir()+ File.separator+ fileName.substring(0, fileName.length()-3)+"aac";
Log.d("tag1", out_audio_file);
cmd = cmd+out_audio_file;
Log.e("tag1", cmd);
String[] command = cmd.split(" ");
Converter.execFFmpegBinary(command);
}
};
This is exeFFmpegBinary method code, and after executing this method, success is displayed in my Log window.
public static void execFFmpegBinary(final String[] command) {
try {
ffmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String s) {
Log.d("execFFmpegBinary", "fail");
}
#Override
public void onSuccess(String s) {
Log.d("execFFmpegBinary", "success");
}
#Override
public void onProgress(String s) {
Log.d("execFFmpegBinary", "progress");
}
#Override
public void onStart() {
Log.d("execFFmpegBinary", "start");
}
#Override
public void onFinish() {
Log.d("execFFmpegBinary", "finish");
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// do nothing for now
Log.d("execFFmpegBinary", "Exception");
}
}
Below is an example of my cmd.
-version -y -i /storage/emulated/0/DCIM/Camera/20180104_031417.mp4 -f aac -ab 192000 -vn /storage/emulated/0/Memento/20180104_031417.aac
Anyone knows why my output file doesn't be created?
I have added audio using ffmpeg may be this will help you.
class AddAudio {
private Context context;
private FFmpeg ffmpeg;
private ProgressDialog progressDialog;
private String videoPath;
private String videoWithoutAudioPath;
private String output;
AddAudio(Context context, String soundPath, String videoPath, String videoWithoutAudioPath, ProgressDialog progressDialog, FFmpeg ffmpeg) {
this.context = context;
this.videoPath = videoPath;
this.ffmpeg = ffmpeg;
this.progressDialog = progressDialog;
this.videoWithoutAudioPath = videoWithoutAudioPath;
String currentMilliSecond = String.valueOf(System.currentTimeMillis());
output = Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + context.getResources().getString(R.string.app_name) + "/test/" + currentMilliSecond + ".mp4";
//String cmd = "-y -i " + videoWithoutAudioPath + " -i " + soundPath + " -c:v copy -map 0:v:0 -map 1:a:0 -c:a aac -shortest " + output;
String cmd = "-y -i " + videoWithoutAudioPath + " -i " + soundPath + " -c:v copy -map 0:v:0 -map 1:a:0 -c:a aac -shortest " + output;
encodeAudio(cmd);
}
private void encodeAudio(String cmd) {
String[] command = cmd.split(" ");
if (command.length != 0) {
execFFmpegBinary(command);
} else {
Toast.makeText(context, context.getString(R.string.empty_command_toast), Toast.LENGTH_LONG).show();
}
}
private void execFFmpegBinary(final String[] command) {
try {
ffmpeg.execute(command, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String s) {
Log.e("Failure", s);
}
#Override
public void onSuccess(String s) {
Log.e("Success", s);
}
#Override
public void onProgress(String s) {
progressDialog.setMessage("Adding audio....");
}
#Override
public void onStart() {
progressDialog.setCancelable(false);
progressDialog.setCanceledOnTouchOutside(false);
progressDialog.show();
}
#Override
public void onFinish() {
/**
* delete original video since new video is made with sound
*/
if (deleteOriginalVideo() && deleteWorkingFolder(videoWithoutAudioPath)) {
progressDialog.dismiss();
Intent notificationIntent = new Intent(Utils.LOCAL_BROADCAST_CODE);
notificationIntent.putExtra(Utils.FILE_PATH, output);
LocalBroadcastManager.getInstance(context).sendBroadcast(notificationIntent);
}
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}
}
private boolean deleteOriginalVideo() {
boolean success = false;
File file = new File(videoPath);
if (file.exists()) {
success = file.delete();
}
return success;
}
private boolean deleteWorkingFolder(String deletePath) {
File file = new File(deletePath);
File folder = new File(file.getParent());
if (folder.isDirectory()) {
for (File child : folder.listFiles()) {
//noinspection ResultOfMethodCallIgnored
child.delete();
}
}
return folder.delete();
}
}
Always check the log from ffmpeg (assuming your script is working and it is actually being executed). Yours should show an error:
Requested output format 'aac' is not a suitable output format
Replace -f aac with -f adts, or omit the -f option if the output is a normal file name with the .aac extension.
Alternatively, since your MP4 input most likely already contains AAC audio consider stream copying it instead of re-encoding. To do so remove -ab 192000 and add -c:a copy.
(well, I don't have enough karma to post a comment and I'm pretty new to FFmpeg lib)
what I can think of now is always consider giving a SPACE(" ") while concatenating strings in commands for this FFmpeg lib...try following
change:
cmd = cmd+out_audio_file;
To: (mark the space between cmd and out_audio_file)
cmd = cmd+" "+out_audio_file;

Mix 2 audio files with android ffmpeg android

I am developing an android application for mixing 2 audio files.And i use android ffmpeg for that.I use following lib. from GitHub
https://github.com/guardianproject/android-ffmpeg-java
I use following code to mix 2 audio files from activity .
try {
File fileAppRoot = new File(getApplicationInfo().dataDir);
SoxController sxCon = new SoxController(fileAppRoot, new ShellUtils.ShellCallback() {
#Override
public void shellOut(String shellLine) {
System.out.println(shellLine);
}
#Override
public void processComplete(int exitValue) {
System.out.println("hello");
}
});
List<String> files=new ArrayList<String>();
files.add(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Testing/me.mp3");
files.add(Environment.getExternalStorageDirectory().getAbsolutePath()+"/Testing/il.mp3");
sxCon.combineMix(files,Environment.getExternalStorageDirectory().getAbsolutePath()+"/Testing/ial.mp3");
but this return exit value 2 on processComplete and no new file generated for mix audio.
This will return following problem in logs no handler for file extension `mp3'
Thanks for any help on this..
You cannot mix files mp3 with this library.
It cans mix files ".wave" only.
Let's convert your mp3 file to wave file then uset this lib to mix files wave.
I hope this response is good for you.
Thanks,
https://github.com/bravobit/FFmpeg-Android
implementation 'nl.bravobit:android-ffmpeg:1.1.7'
public boolean mergeAudio(final Context context, File[] voiceFile, String file_name) {
final ProgressDialog asyncDialog = new ProgressDialog(context);
asyncDialog.setMessage("Audio Merging Start..");
asyncDialog.setCancelable(false);
final boolean[] isSuccess = {false};
if (file_name != null) {
file_name = Environment.getExternalStorageDirectory() + "/podmod/" + file_name + "_.mp3";
} else {
file_name = getMusicFilename();
}
File ffmpegFile = new File(file_name);
if (ffmpegFile.exists()) {
ffmpegFile.delete();
}
for (File f : voiceFile) {
if (!f.exists()) {
Log.d("AudioMergingFailure", "File ot Exist");
return isSuccess[0];
}
}
String s = "";
String s_index = "";
String fileSize = "n=" + voiceFile.length;
for (int i = 0; i < voiceFile.length; i++) {
s = s + "-i#" + voiceFile[i].getPath() + "#";
s_index = s_index + "[" + i + ":0]";
}
String str_cmd = s + "-filter_complex#" + s_index + "concat=" + fileSize + ":v=0:a=1[out]#-map#[out]#" + file_name;
Log.d("str_cmd", str_cmd);
String[] cmd = str_cmd.split("#");
final String finalFile_name = file_name;
try {
if (FFmpeg.getInstance(context).isSupported()) {
FFmpeg ffmpeg = FFmpeg.getInstance(context);
// to execute "ffmpeg -version" command you just need to pass "-version"
ffmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onStart() {
asyncDialog.show();
}
#Override
public void onProgress(String message) {
}
#Override
public void onFailure(String message) {
Log.d("AudioMergingFailure", message);
asyncDialog.dismiss();
Toast.makeText(context, "Audio Merging Failed",
Toast.LENGTH_LONG).show();
}
#Override
public void onSuccess(String message) {
asyncDialog.dismiss();
Log.v("onSuccess", message);
File ffmpegFile_ = new File(finalFile_name);
Toast.makeText(context, "Audio onSuccess",
Toast.LENGTH_LONG).show();
isSuccess[0] = true;
}
#Override
public void onFinish() {
asyncDialog.dismiss();
}
});
} else {
asyncDialog.dismiss();
}
} catch (Exception e) {
asyncDialog.dismiss();
Log.d("NotException_", e.getMessage());
}
return isSuccess[0];
}
public static String getMusicFilename() {
return Environment.getExternalStorageDirectory() + "/podmod/Merged_Audio_" + getRandomNumber(0, 100000) + ".mp3";
}

Categories

Resources