I need to speed up video compression in my Android app. I'm using FFMPEG and it takes 3 minutes to compress 80MB video. Does anyone knows a better solution?
The command I'm using is:
/data/data/com.moymer/app_bin/ffmpeg -y -i /storage/emulated/0/DCIM/Camera/VID_20150803_164811363.mp4 -s 640x352 -r 25 -vcodec mpeg4 -ac 1 -preset ultrafast -strict -2 /storage/emulated/0/DCIM/Camera/compressed_video.mp4
I'm running this command using FFMPEG for Android from this github repo: https://github.com/guardianproject/android-ffmpeg-java
The code to use FFMPEG in my project is inside an AsyncTask and is copied below:
#Override
protected Object doInBackground(Object... params) {
ItemRoloDeCamera compressedVideo = new ItemRoloDeCamera();
File videoInputFile = new File(video.getSdcardPath());
File videoFolderFile = videoInputFile.getParentFile();
File videoOutputFile = new File(videoFolderFile, "video_comprimido_moymer.mp4");
if (videoFolderFile.exists())
android.util.Log.e("COMPRESS VIDEO UTILS", "video folder exist");
else
android.util.Log.e("COMPRESS VIDEO UTILS", "video folder DON'T exist");
if (videoInputFile.exists())
android.util.Log.e("COMPRESS VIDEO UTILS", "video input file exist");
else
android.util.Log.e("COMPRESS VIDEO UTILS", "video input file DON'T exist");
if (videoOutputFile.exists())
android.util.Log.e("COMPRESS VIDEO UTILS", "video output file exist");
else
android.util.Log.e("COMPRESS VIDEO UTILS", "video output file DON'T exist");
FfmpegController ffmpegController;
try {
ffmpegController = new FfmpegController(context, videoFolderFile);
Clip clipIn = new Clip(videoInputFile.getAbsolutePath());
ffmpegController.getInfo(clipIn, new ShellUtils.ShellCallback() {
#Override
public void shellOut(String shellLine) {
videoInfo.add(shellLine);
}
#Override
public void processComplete(int exitValue) {
videoInfo.add(String.valueOf(exitValue));
}
});
int rotate = getRotateMetadata();
Clip clipOut = new Clip(videoOutputFile.getAbsolutePath());
clipOut.videoFps = "24";
clipOut.videoBitrate = 512;
clipOut.audioChannels = 1;
clipOut.width = 640;
clipOut.height = 352;
if (rotate == 90)
clipOut.videoFilter = "transpose=1";
else if (rotate == 180)
clipOut.videoFilter = "transpose=1,transpose=1";
else if (rotate == 270)
clipOut.videoFilter = "transpose=1,transpose=1,transpose=1";
millisDuration = getVideoDuration(videoInputFile.getAbsolutePath());
float secondsDuration = millisDuration / 1000f;
clipOut.duration = secondsDuration;
ffmpegController.processVideo(clipIn, clipOut, true, new ShellUtils.ShellCallback() {
#Override
public void shellOut(String shellLine) {
android.util.Log.e("COMPRESS VIDEO UTILS", "shellOut - " + shellLine);
float percentage = getTimeMetadata(shellLine);
if (percentage >= 0f)
publishProgress(percentage);
}
#Override
public void processComplete(int exitValue) {
android.util.Log.e("COMPRESS VIDEO UTILS", "proccess complete - " + exitValue);
}
});
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (videoOutputFile.exists()) {
android.util.Log.e("COMPRESS VIDEO UTILS", "finished ffmpeg ---> video output file exist");
compressedVideo.setSdcardPath(videoOutputFile.getAbsolutePath());
return compressedVideo;
} else
android.util.Log.e("COMPRESS VIDEO UTILS", "finished ffmpeg ---> video output file DON'T exist");
}
return compressedVideo;
}
private float getTimeMetadata(String shellLine) {
float percentage = -1;
if (shellLine.contains("time=")) {
String[] timeLine = shellLine.split("=");
String time = timeLine[5];
time = time.replace("bitrate", "");
time = time.trim();
// String source = "00:10:17";
String[] tokens = time.split(":");
int secondsToMs = (int) (Float.parseFloat(tokens[2]) * 1000);
int minutesToMs = Integer.parseInt(tokens[1]) * 60000;
int hoursToMs = Integer.parseInt(tokens[0]) * 3600000;
long timeInMillis = secondsToMs + minutesToMs + hoursToMs;
percentage = (timeInMillis * 100.0f) / millisDuration;
}
return percentage;
}
private int getRotateMetadata() {
int rotate = 0;
String durationString = "";
for (String shellLine : videoInfo) {
if (shellLine.contains("rotate")) {
//rotate : 270
String[] rotateLine = shellLine.split(":");
rotate = Integer.parseInt(rotateLine[1].trim());
}
}
return rotate;
}
public static long getVideoDuration(String videoPath) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(videoPath);
String time = retriever
.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
long timeInmillisec = Long.parseLong(time);
return timeInmillisec;
}
The only change I made in the processVideo method was to add the following lines when building the commmand:
cmd.add("-preset");
cmd.add("ultrafast");
Related
I'm developing an app using bass audio library. about voice change function, I can play audio succeessfully like this,
however , I cannot save it to custom file:
BASS.BASS_MusicFree(chan);
BASS.BASS_StreamFree(chan);
if ((chan = BASS.BASS_StreamCreateFile(new BASS.Asset(getAssets(), "test.mp3"), 0, 0, BASS.BASS_MUSIC_DECODE)) == 0
&& (chan = BASS.BASS_MusicLoad(new BASS.Asset(getAssets(), "test.mp3"), 0, 0, BASS.BASS_SAMPLE_LOOP | BASS.BASS_MUSIC_RAMP | floatable, 1)) == 0) {
// whatever it is, it ain't playable
((Button) findViewById(R.id.open)).setText("press here to open a file");
Error("Can't play the file");
return;
}
chan = BASS_FX.BASS_FX_TempoCreate(chan, BASS.BASS_SAMPLE_MONO);//enable pitch
chanFX = BASS_FX.BASS_FX_TempoGetSource(chan);
((Button) findViewById(R.id.open)).setText("test");
setupFX(mPitch, mRate, mIdistortionListener);
BASS.BASS_ChannelPlay(chan, false);
BASS.BASS_SetVolume(0.9f);
I'm developing an app using bass audio library. about voice change function,
however , I cannot save it to custom file:
mBtnSave.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
String path = testPath();
savePath(path);
Toast.makeText(TextActivity.this, "save successfully" + path, Toast.LENGTH_SHORT).show();
}
});
private String testPath() {
File localFile = new File(Environment.getExternalStorageDirectory().getPath(), "testVoid");
if (!localFile.exists()) {
localFile.mkdirs();
}
String path = localFile.getAbsolutePath() + "/record" + ".wav";
Log.d("mvn", path);
return path;
}
public void savePath(String filePath) {
long len = BASS.BASS_ChannelGetLength(chan, BASS.BASS_POS_BYTE);
double time = BASS.BASS_ChannelBytes2Seconds(chan, len);
File localFile = new File(filePath);
//flag=262208
if ((!isEmpty(filePath)) && (chan != 0) && (BASSenc.BASS_Encode_Start(chan, filePath, 262208, null, Integer.valueOf(0)) != 0)) {
int i1;
try {
ByteBuffer localByteBuffer = ByteBuffer.allocateDirect(20000);
do {
i1 = BASS.BASS_ChannelGetData(chan, localByteBuffer, localByteBuffer.capacity());
} while ((i1 != -1) && (i1 != 0));
return;
} catch (Exception localException) {
localException.printStackTrace();
}
}
}
I want to Convert Video(mp4) to Mp3 format. Please if any one know then explain me.
I have tried this but its not working.
I have implement from this link but it wont work for me.
How do I extractor audio to mp3 from mp4 using java in Android?
public class AudioFromVideo {
private String audio, video;
private MediaCodec amc;
private MediaExtractor ame;
private MediaFormat amf;
private String amime;
public AudioFromVideo(String srcVideo, String destAudio) {
this.audio = destAudio;
this.video = srcVideo;
ame = new MediaExtractor();
init();
}
public void init() {
try {
ame.setDataSource(video);
amf = ame.getTrackFormat(1);
ame.selectTrack(1);
amime = amf.getString(MediaFormat.KEY_MIME);
amc = MediaCodec.createDecoderByType(amime);
amc.configure(amf, null, null, 0);
amc.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public void start() {
new AudioService(amc, ame, audio).start();
}
private class AudioService extends Thread {
private MediaCodec amc;
private MediaExtractor ame;
private ByteBuffer[] aInputBuffers, aOutputBuffers;
private String destFile;
#SuppressWarnings("deprecation")
AudioService(MediaCodec amc, MediaExtractor ame, String destFile) {
this.amc = amc;
this.ame = ame;
this.destFile = destFile;
aInputBuffers = amc.getInputBuffers();
aOutputBuffers = amc.getOutputBuffers();
}
#SuppressWarnings("deprecation")
public void run() {
try {
OutputStream os = new FileOutputStream(new File(destFile));
long count = 0;
while (true) {
int inputIndex = amc.dequeueInputBuffer(0);
if (inputIndex == -1) {
continue;
}
int sampleSize = ame.readSampleData(aInputBuffers[inputIndex], 0);
if (sampleSize == -1) break;
long presentationTime = ame.getSampleTime();
int flag = ame.getSampleFlags();
ame.advance();
amc.queueInputBuffer(inputIndex, 0, sampleSize, presentationTime, flag);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputIndex = amc.dequeueOutputBuffer(info, 0);
if (outputIndex >= 0) {
byte[] data = new byte[info.size];
aOutputBuffers[outputIndex].get(data, 0, data.length);
aOutputBuffers[outputIndex].clear();
os.write(data);
count += data.length;
Log.e("write", "" + count);
amc.releaseOutputBuffer(outputIndex, false);
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
aOutputBuffers = amc.getOutputBuffers();
} else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
}
}
os.flush();
os.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
new AudioFromVideo(videopath,audioPath).start();
You will need to extract Audio from Mp4 Videos using Android FFmpeg Library:
ffmpeg -i my_video.mp4 -ab 128k -ac 2 -ar 44100 -vn my_audio.mp3
Here is link to the library that does this:
http://writingminds.github.io/ffmpeg-android-java/
http://www.android-arsenal.com/details/1/931
It is possible to extract audio form vidoe file in android programmatically. You can do this without using any external library. It can be done by using MediaMuxer class.
new AudioExtractor().genVideoUsingMuxer("Video source Path", "Audio destination path", -1, -1, true, false);
See this post fro better understanding and complete code.
https://androidprogrammatically425516919.wordpress.com/2020/04/21/how-to-convert-video-to-audio-in-android-programmatically/
I'm developing a JAVA RTP Streaming App for a company project, which should be capable of joining the Multicast Server and receive the RTP Packets.Later I use the H264 Depacketizer to recreate the a complete frame from the NAL FU (Keep append the data until End Bit & Marker Bit set )
I want to decode and display a raw h264 video byte stream in Android and therefore I'm currently using the MediaCodec classes with Hardware Decoder configured.
The Application is Up and running for the Jeallybean (API 17). Various Resolutions which I need to decodes are :
480P at 30/60 FPS
720P/I at 30/60 FPS
1080P/I at 30/60 FPS
Recently, Due to System Upgrade we are porting the App to Android L Version 5.0.2. My App is not capable of playing the high resolutions videos like 720p#60fps and 1080p#60fps.
For the debugging purpose I started feeding the Elementary H264 Frames with size from the dump file to MediaCodec and found out the Video is Lagging.
There are timestamps on the sample video I used and it seems the actual time taken to proceed by 1 sec in Rendered Video is more
Below is my sample code and links to sample video
h264 video https://www.dropbox.com/s/cocjhhovihm8q25/dump60fps.h264?dl=0
h264 framesize https://www.dropbox.com/s/r146d5zederrne1/dump60fps.size?dl=0
Also as this is my question on stackoverflow, Please bear with me on Bad code formatting and Direct references.
public class MainActivity extends Activity {
static final String TAG = "MainActivity";
private PlayerThread mPlayer = null;
private static final String MIME_TYPE = "video/avc";
private byte[] mSPSPPSFrame = new byte [3000];
private byte[] sps = new byte[37];
File videoFile = null;
File videoFile1 = null;
TextView tv ;
FileInputStream videoFileStream = null;
FileInputStream videoFileStream1 = null;
int[] tall = null ;
SpeedControlCallback mspeed = new SpeedControlCallback();
int mStreamLen = 0;
FrameLayout game;
RelativeLayout rl ;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//mVideoSurfaceView = (SurfaceView)findViewById(R.id.videoSurfaceView);
setContentView(R.layout.activity_main);
SurfaceView first = (SurfaceView) findViewById(R.id.firstSurface);
first.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface created!");
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
Log.d(TAG, "surfaceChanged()");
surfaceHolder.getSurface();
if (mPlayer == null) {
mPlayer = new PlayerThread(surfaceHolder.getSurface());
mPlayer.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface destroyed!");
}
});
tv = (TextView) findViewById(R.id.textview);
videoFile = new File("/data/local/tmp/dump60fps.h264");
videoFile1 = new File("/data/local/tmp/dump60fps.size");
}
private class PlayerThread extends Thread {
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
try {
decodeVideo(0, 1920,1080, 50, surface);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
}
}
}
private void decodeVideo(int testinput, int width, int height,
int threshold, Surface surface) throws Throwable {
MediaCodec codec = null;
MediaFormat mFormat;
final long kTimeOutUs = 10000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
MediaFormat oformat = null;
int errors = -1;
long presentationTimeUs = 0L;
boolean mVideoStart = false;
byte[] byteArray = new byte[65525*5*3];
int i;
int sizeInBytes = 0, index, sampleSize = 0;
try {
byte[] bytes = new byte[(int) videoFile1.length()];
FileInputStream fis = new FileInputStream(videoFile1);
fis.read(bytes);
fis.close();
String[] valueStr = new String(bytes).trim().split("\\s+");
tall = new int[valueStr.length];
mStreamLen = valueStr.length;
Log.e(TAG, "++++++ Total Frames ++++++"+mStreamLen);
for ( i = 0; i < valueStr.length; i++) {
tall[i] = Integer.parseInt(valueStr[i]);
}
} catch (IOException e1) {
e1.printStackTrace();
}
index =1;
try {
videoFileStream = new FileInputStream(videoFile);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
System.currentTimeMillis();
if (mVideoStart == false) {
try {
sizeInBytes = videoFileStream.read(mSPSPPSFrame, 0,37);
Log.e(TAG, "VideoEngine configure ."+sizeInBytes);
//for (i = 0 ; i < sizeInBytes; i++){
// Log.e(TAG, "VideoEngine ."+mSPSPPSFrame[i]);}
} catch (IOException e1) {
e1.printStackTrace();
}
sampleSize = sizeInBytes;
index++;
index++;
mFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1920,1080);
mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( mSPSPPSFrame,0, sizeInBytes));
codec = MediaCodec.createDecoderByType(MIME_TYPE);
codec.configure(mFormat, surface /*surface*/ , null /* crypto */, 0 /* flags */);
codec.start();
codec.getInputBuffers();
codec.getOutputBuffers();
}
// index = 0;
while (!sawOutputEOS && errors < 0) {
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
//Log.d(TAG, String.format("Archana Dqing the input buffer with BufIndex #: %d",inputBufIndex));
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffers()[inputBufIndex];
/*
* Read data from file and copy to the input ByteBuffer
*/
try {
sizeInBytes = videoFileStream.read(byteArray, 0,
tall[index] /*+ 4*/);
sampleSize = tall[index]/*+ 4*/;
index++;
} catch (IOException e) {
e.printStackTrace();
}
if (sizeInBytes <= 0) {
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
0,
presentationTimeUs,
MediaCodec.BUFFER_FLAG_END_OF_STREAM );
sawInputEOS = true;
}
else {
dstBuf.put(byteArray, 0, sizeInBytes);
if (mVideoStart == false) mVideoStart = true;
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
mVideoStart ? 0:MediaCodec.BUFFER_FLAG_CODEC_CONFIG );
//Log.d(TAG, String.format(" After queueing the buffer to decoder with inputbufindex and samplesize #: %d ,%d ind %d",inputBufIndex,sampleSize,index));
}
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
//Log.d(TAG, String.format(" Getting the information about decoded output buffer flags,offset,PT,size #: %d %d %d %d",info.flags,info.offset,info.presentationTimeUs,info.size));
//Log.d(TAG, String.format(" Getting the output of decoder in res #: %d",res));
if (res >= 0) {
int outputBufIndex = res;
//Log.d(TAG, "Output PTS "+info.presentationTimeUs);
//mspeed.preRender(info.presentationTimeUs);
//mspeed.setFixedPlaybackRate(25);
codec.releaseOutputBuffer(outputBufIndex, true /* render */);
//Log.d(TAG, String.format(" releaseoutputbuffer index= #: %d",outputBufIndex));
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
oformat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
}
}
codec.stop();
codec.release();
this.finish();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_main, menu);
return true;
}
}
There are couples of workaround to problem with the above sample test.
Instead of feeding One Full frame to the decoder Inout, I was feeding single of NAL Units at a time. But still the playback was slow and could not match 60FPS
Google has changed the Implementation of Surface BufferQueue from Asynchronous to Synchronous.Hence when we call MediaCodec.dequeueBuffer to get decoded data, the server side (SurfaceTexture::dequeueBuffer) will wait for a buffer to be queued, and the client side waits for that, so that SurfaceTextureClient::dequeueBuffer will not return until a buffer has actually been queued on the server side. Where as in the Asynchronous Mode, a new GraphicBuffer is allocated.
I am trying to merge mp4 and mp3 on Android, I am using the JavaCV, please check my code below first, then I explained the strange error after:
private void test2() throws Exception {
String path = Environment.getExternalStorageDirectory()
.getAbsolutePath();
int testId = 4;
String videoPath = path + "/" + "sample" + testId + ".mp4";
String audioPath = path + "/" + "love.mp3";
String outputPath = path + "/" + "out" + testId + ".mp4";
FrameGrabber grabber1 = new FFmpegFrameGrabber(videoPath);
FrameGrabber grabber2 = new FFmpegFrameGrabber(audioPath);
grabber1.start();
grabber2.start();
FrameRecorder recorder = new FFmpegFrameRecorder(outputPath,
grabber1.getImageWidth(), grabber1.getImageHeight(),
grabber2.getAudioChannels());
double frameRate = grabber1.getFrameRate();
recorder.setFrameRate(frameRate);
recorder.setSampleRate(grabber2.getSampleRate());
recorder.setVideoQuality(1);
recorder.start();
Frame frame1, frame2 = null;
// getLengthInTime is correct, but getLengthInFrames not accurate.
Log.d(TAG, " Video lengthInTime:" + grabber1.getLengthInTime()
+ " Video frames:" + grabber1.getLengthInFrames());
// Record video.
int count = 0;
while (true) {
frame1 = grabber1.grabFrame();
if (frame1 == null) {
break;
}
frame1.samples = null;
recorder.record(frame1);
count++;
Log.d(TAG, "Video frame timestamp:" + grabber1.getTimestamp());
}
Log.d(TAG, " Video frame count:" + count);
// Record audtio.
long videoTimestamp = recorder.getTimestamp();
while (true) {
frame2 = grabber2.grabFrame();
if (frame2 != null && grabber2.getTimestamp() <= videoTimestamp) {
frame2.image = null;
recorder.record(frame2);
// Log.d(TAG, "Audio frame timestamp:" +
// grabber2.getTimestamp());
} else {
break;
}
}
// release
recorder.stop();
recorder.release();
grabber1.stop();
grabber2.stop();
}
The output's audio is OK, but the video is strangle. The video play 1s and stop 1s, then repeat like this. The input video I recorded by Phone's camera.
I tried to count the real number of video frames, and I found the real number is much bigger than the number got from method getLengthInFrames().
frameGrabber grabs both ImageFrames and Sound Frames
if you need to work on VideoFrames or its count, Do like this
Frame grabFrame = frameGrabber.grabFrame();
if (grabFrame == null) {
// all frames are processed
System.out.println("!!! Failed cvQueryFrame");
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(RecordActivity.this, "Done !!!", Toast.LENGTH_SHORT).show();
}
});
break;
}
if (grabFrame.image != null) {
//This is a video Frame, Do your work here
}
}
My Problem is, that I capture audio by pressing the record button, but I can't see any audioFile in the specific path.
I capture sound, when I press the capture button:
private void startRecord()
{
ImageButton soundStop = (ImageButton)findViewById(R.id.soundstop);
soundStop.setVisibility(View.VISIBLE);
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
//mediaRecorder.setOutputFormat(sound);
//mediaRecorder.setOutputFile(soundFilePath);
//mediaRecorder.setAudioEncoder(audioEncoder);
//Log.d("hier ", "hier" + soundFilePath);
try {
mediaRecorder.prepare();
mediaRecorder.start();
}catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Toast.makeText(getApplicationContext(), "Aufnahme gestartet", Toast.LENGTH_LONG).show();
}
When I press the Stop Recording Button the file should be saved in the specific path. I tried it with Directory Musik and Downloads, but I can't find any file there
final OnClickListener soundRecordStop = new OnClickListener(){
#Override
public void onClick(View v) {
soundStop();
}
};
public void soundStop(){
SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy hh:mm:ss");
Timestamp time = new Timestamp(System.currentTimeMillis());
String actualTime = sdf.format(time);
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
String name = sharedPreferences.getString("soundformat", "format");
int audioEncoder = sharedPreferences.getInt("audioEncoder", Property.getAudioEncoderInt());
String dateiendung = ".aac";
int sound = 6;
if(name == "aac"){
dateiendung = ".aac";
sound = 6;
} else if(name == "amr_nb"){
dateiendung = ".3gp";
sound = 3;
}
else if( name == "amr_wb"){
dateiendung = ".3gp";
sound = 4;
}
else if( name == "default" ){
dateiendung = ".default";
sound = 0;
}
else if( name == "mpeg"){
dateiendung = ".mp4";
sound = 2;
}
else if( name == "gpp" ){
Log.d("in gpp", "in gpp");
dateiendung = ".3gp";
sound = 1;
}
soundFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
soundFilePath = soundFile.getAbsolutePath() + actualTime + dateiendung;
Log.d("hier ", "hier1" + mediaRecorder);
if(mediaRecorder != null){
Log.d("hier ", "hier2" + mediaRecorder);
try {
//mediaRecorder.prepare();
mediaRecorder.stop();
mediaRecorder.release();
//mediaRecorder = null;
Log.d("hier ", "hier4" + mediaRecorder);
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
mediaRecorder.setOutputFormat(sound);
mediaRecorder.setOutputFile(soundFilePath);
Log.d("hier ", "hier2");
String pfad = soundFile.getAbsolutePath();
Toast.makeText(getApplicationContext(), "Aufnahme in " + pfad + " gespeichert", Toast.LENGTH_LONG).show();
ImageButton soundStop = (ImageButton)findViewById(R.id.soundstop);
soundStop.setVisibility(View.INVISIBLE);
}
The path seems to be correct: pfad /storage/sdcard0/Download17.07.2014 11:55:58.aac
Thanks for the comment the missing File Separator wasn't intentionally. I insert the missing Separator in Order to save the audio File in Downloads Directory. But no File nevertheless 😑
Please help me to find my file ;)