This is the code I use:
public void stopRecording() throws Exception, com.googlecode.javacv.FrameRecorder.Exception {
runAudioThread = false;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
Log.i(LOG_TAG, "filename = " + ffmpeg_link);
if (i > 0) {
Log.i(LOG_TAG, "!!!!!!!!!WILL CONCATENATE");
FrameGrabber grabber1 = new FFmpegFrameGrabber(pathStart + "/JavaCV/stream0.mp4");
grabber1.start();
Log.i(LOG_TAG, "graber1.start");
FrameGrabber grabber2 = new FFmpegFrameGrabber(pathStart + "/JavaCV/stream1.mp4");
grabber2.start();
Log.i(LOG_TAG, "graber2.start");
FrameRecorder recorder2 = new FFmpegFrameRecorder(pathStart + "/JavaCV/output.mp4", grabber1.getImageWidth(), grabber1.getImageHeight(), grabber1.getAudioChannels());
recorder2.setFrameRate(grabber1.getFrameRate());
recorder2.setSampleFormat(grabber1.getSampleFormat());
recorder2.setSampleRate(grabber1.getSampleRate());
Log.i(LOG_TAG, "Recorder.start");
recorder2.start();
Frame frame;
int j = 0;
while ((frame = grabber1.grabFrame()) != null) {
j++;
recorder2.record(frame);
Log.i(LOG_TAG, "while1 nr:" + j + "Frame number: " + grabber1.getFrameNumber());
}
Log.i(LOG_TAG, "after while1");
while ((frame = grabber2.grabFrame()) != null) {
recorder2.record(frame);
Log.i(LOG_TAG, "while2");
}
Log.i(LOG_TAG, "Recorder.stop");
recorder2.stop();
grabber2.stop();
grabber1.stop();
Log.i(LOG_TAG, "end concatenate");
}
i++;
ffmpeg_link = pathStart + "/JavaCV/stream" + i + ".mp4";
Log.i(LOG_TAG, "next filename = " + ffmpeg_link);
}
It works great, it manages to merge the videos, the only issue I have is that it does not have sound for the second video. How can i put the audiochannel on the second video also?
runAudioThread = false; this line in the stopRecording, was putting the audiothread on false, and thats why it did not have sound the second video.
a Frame with sound is "grabber1.grab()"
Related
I am developing an app where some task needs to be perform when call is incoming as well as outgoing.
But here when i am making "outgoing" call from app, only incoming call code is working, its not detecting the ACTION_OUTGOING.
I have traced with Log and found that when i am making outgoing call, no code related to ACTION_OUTGOING is executing.
here is what i have tried so far.
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(ACTION_IN)) {
Log.e(TAG, "ACTION_IN" + "");
if ((bundle = intent.getExtras()) != null) {
state = bundle.getString(TelephonyManager.EXTRA_STATE);
if (state.equals(TelephonyManager.EXTRA_STATE_RINGING)) {
inCall = bundle.getString(TelephonyManager.EXTRA_INCOMING_NUMBER);
wasRinging = true;
Toast.makeText(context, "IN : " + inCall, Toast.LENGTH_LONG).show();
} else if (state.equals(TelephonyManager.EXTRA_STATE_OFFHOOK)) {
if (wasRinging == true) {
Toast.makeText(context, "ANSWERED", Toast.LENGTH_LONG).show();
startRecord("incoming");
}
} else if (state.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
Log.e(TAG, "ACTION_IN EXTRA_STATE_IDLE");
wasRinging = false;
Toast.makeText(context, "Call End", Toast.LENGTH_LONG).show();
end_time = System.currentTimeMillis();
if (recordstarted) {
recorder.stop();
recordstarted = false;
}
String path = Environment.getExternalStorageDirectory().toString() + "/Ripples_Call_Recording";
Log.e("Files", "Path: " + path);
File directory = new File(path);
File[] files = directory.listFiles();
Log.e("Files", "Size: " + files.length);
for (int i = 0; i < files.length; i++) {
Log.e("Files", "FileName: " + files[i].getName());
performIncomingTask(files[i], path, files[i].getName());
}
}
}
}
else if (intent.getAction().equals(ACTION_OUT)) {
Log.e(TAG, "ACTION_OUT ");
if ((bundle = intent.getExtras()) != null) {
outCall = intent.getStringExtra(Intent.EXTRA_PHONE_NUMBER);
Toast.makeText(context, "OUT : " + outCall, Toast.LENGTH_LONG).show();
startRecord("outgoing");
if ((bundle = intent.getExtras()) != null) {
state = bundle.getString(TelephonyManager.EXTRA_STATE);
if (state != null) {
if (state.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
Log.e(TAG, "ACTION_OUT EXTRA_STATE_IDLE ");
wasRinging = false;
Toast.makeText(context, "REJECT", Toast.LENGTH_LONG).show();
if (recordstarted) {
recorder.stop();
recordstarted = false;
}
String path = Environment.getExternalStorageDirectory().toString() + "/Ripples_Call_Recording";
Log.e("Files", "Path: " + path);
File directory = new File(path);
File[] files = directory.listFiles();
Log.e("Files", "Size: " + files.length);
for (int i = 0; i < files.length; i++) {
Log.e("Files", "FileName: " + files[i].getName());
performOutGoingTask(files[i], path, files[i].getName());
}
}
}
}
}
}
}
In our player we are using media player feature to render into a SurfaceTexture that is used in jni opengl layer to render dynamic textures.
Everything run OK in android 4&5, but not in android 6 (with the same code).
The workflow is very easy.
First thing is load movie from url
public void loadMovie(String moviePath){
try {
if(mediaPlayer == null) {
mediaPlayer = new MediaPlayer();
mediaPlayer.reset();
mediaPlayer.setLooping(bIsLooping);
mediaPlayer.setOnBufferingUpdateListener(this);
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnErrorListener(this);
mediaPlayer.setOnInfoListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnSeekCompleteListener(this);
mediaPlayer.setOnTimedTextListener(this);
mediaPlayer.setOnVideoSizeChangedListener(this);
surfaceTexture = new SurfaceTexture(texNumber);
surfaceTexture.setOnFrameAvailableListener(this);
surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
}
bIsLoaded = false;
duration = 0.0f;
width = height = 0;
mediaPlayer.reset();
if (moviePath.startsWith("android.resource://")){
mediaPlayer.setDataSource(activity, Uri.parse(moviePath));
} else if (moviePath.contains("obb://")) {
String obbFileEntry = moviePath.replace("obb://", "");
int idx0 = obbFileEntry.indexOf("/");
int idx1 = obbFileEntry.indexOf("/", idx0 + 1);
int mainVersion = Integer.parseInt(obbFileEntry.substring(0,idx0));
int patchVersion = Integer.parseInt(obbFileEntry.substring(1+idx0, idx1));
obbFileEntry = obbFileEntry.substring(idx1 + 1);
ZipResourceFile obbFile = APKExpansionSupport.getAPKExpansionZipFile(activity, mainVersion, patchVersion);
AssetFileDescriptor assetFileDescriptor = obbFile.getAssetFileDescriptor(obbFileEntry);
if (null != assetFileDescriptor) {
Log.d("[OFAndroidVideoPlayer]", "Playing media resource '" + obbFileEntry + " packaged in obb expansion file( " + mainVersion + ", " + patchVersion + ")");
mediaPlayer.setDataSource(assetFileDescriptor.getFileDescriptor(), assetFileDescriptor.getStartOffset(), assetFileDescriptor.getLength());
}
} else if (moviePath.contains(".zip/")) {
String zipFilePath = moviePath.substring(0, moviePath.lastIndexOf(".zip") + ".zip".length());
String zipFileEntry = moviePath.substring(zipFilePath.length() + 1);
ZipResourceFile zipFile = new ZipResourceFile(zipFilePath);
AssetFileDescriptor assetFileDescriptor = zipFile.getAssetFileDescriptor(zipFileEntry);
if (null != assetFileDescriptor) {
Log.d("[OFAndroidVideoPlayer]", "Playing media resource '" + zipFileEntry + " packaged in zip file: " + zipFilePath);
mediaPlayer.setDataSource(assetFileDescriptor.getFileDescriptor(), assetFileDescriptor.getStartOffset(), assetFileDescriptor.getLength());
}
} else {
mediaPlayer.setDataSource(moviePath);
}
mediaPlayer.prepare();
width = mediaPlayer.getVideoWidth();
height = mediaPlayer.getVideoHeight();
duration = mediaPlayer.getDuration();
//setVolume(volume);
this.mediaPath = moviePath;
} catch(IllegalStateException e) {
Log.e("[OFAndroidVideoPlayer]","Illegal state with " + moviePath + " (" + e.getMessage() + ")", e);
} catch(IOException e) {
Log.e("[OFAndroidVideoPlayer]","I/O exception with " + moviePath + " (" + e.getMessage() + ")", e);
this.fixMediaPlayerIOExceptionCounter += 1;
if (this.fixMediaPlayerIOExceptionCounter < 2) {
mediaPlayer.reset();
mediaPlayer.release();
mediaPlayer = null;
loadMovie(moviePath);
}
} catch (Exception e) {
Log.e("[OFAndroidVideoPlayer]","Couldn't load " + moviePath + " (" + e.getMessage() + ")", e);
}
}
After register a mediaplayer listener we are to control when mediaplayer is ready
#Override
public void onPrepared(MediaPlayer mp) {
Log.d("[ofxAndroidVideo]","[onPrepared] Entering method.");
duration = (0.1f < duration)? duration: mediaPlayer.getDuration();
width = (0 != width)? width: mediaPlayer.getVideoWidth();
height = (0 != height)? height: mediaPlayer.getVideoHeight();
bIsLoaded = true;
if(bAutoResume) {
setPositionMS(movieResumeTime);
bAutoResume = false;
play();
}
}
And update texture in surface texture loop
#Override
public void onFrameAvailable(SurfaceTexture arg0) {
synchronized(this){
if(surfaceTexture != null) {
surfaceTexture.updateTexImage();
}
}
What is the problem with android 6?.
No listener callbacks are called after mediaplayer creation, prepare,..., IMHO seems like internal state of mediaplayer were wrong or broken o always in idle state and some internal working threads were not created/initialized.
No error traces in logcat, all this is so frustating.
Any idea? kabo
Thanks in advanced
I'm modifying an audio encoder example for write audio and video both. I'm trying to get video raw data from device display by MediaProjection and get audio raw data from Microphone by AudioRecord. Then I will send them to MediaCodec(I will set two Codec instance).
After that I will send the video data and audio data to MediaMux to get a mp4 file. I have any problems:
In result I get a file with perfect video track but terrible audio track. Audio track have plays with intervals at the beginning of track and without pauses but very fast at the end.
When I play video with android MXPlayer all ok, but when I play with PC players (Windows Media or Media Player Classic) audio plays like on MXPlayer but video don't plays - shows only first frame.
Sample of result video:
Part of code:
/**
* Method run of EncoderTask
*/
public void run() {
if (mIsInitialized) {
switch (type) {
case ENCODE_AUDIO_FRAME:
if (!mStopReceived) {
_offerAudioEncoder(mAudioData, presentationTimeNs);
mDrainHandler.postDelayed(new Runnable() {
#Override
public void run() {
mEncodingService.submit(new EncoderTask(Encoder.this, EncoderTaskType.ENCODER_VIDEO_FRAME));
}
}, DELAY_MILLIS); // 10 milliseconds
}
break;
case ENCODER_VIDEO_FRAME:
if (!mStopReceived) {
encoder._offerVideoEncoder();
mDrainHandler.postDelayed(new Runnable() {
#Override
public void run() {
mEncodingService.submit(new EncoderTask(Encoder.this, EncoderTaskType.ENCODE_AUDIO_FRAME));
}
}, DELAY_MILLIS); // 10 milliseconds
}
break;
case FINALIZE_ENCODER:
finalizeEncoder();
break;
}
// prevent multiple execution of same task
mIsInitialized = false;
mEncodingServiceQueueLength -= 1;
} else {
Log.e(TAG, "run() called but EncoderTask not initialized");
}
}
public void _offerVideoEncoder() {
Log.d(TAG, "Offer video");
if (mStopReceived) {
closeVideoEncoder();
Log.d(TAG, "Offer video - stop");
} else {
drainEncoder(mVideoEncoder, mVideoBufferInfo, mVideoTrackIndex, false, "video");
Log.d(TAG, "Offer video - drain");
if (mStopReceived) {
closeVideoEncoder();
Log.d(TAG, "Offer video - stop");
}
}
}
public void processAudioFrame() {
long audioPresentationTimeNs = System.nanoTime();
byte[] thisBuffer;
if (mDataBuffer.isEmpty()) {
thisBuffer = new byte[mSamplesPerFrame];
} else {
thisBuffer = mDataBuffer.poll();
}
mReadResult = mAudioRecorder.read(thisBuffer, 0, mSamplesPerFrame);
if (VERBOSE) Log.i(TAG, "FillBuffer real: " + String.valueOf(mBufferWriteIndex)
+ " - " + String.valueOf(mBufferWriteIndex + mReadResult - 1));
if (mReadResult != AudioRecord.ERROR_BAD_VALUE && mReadResult != AudioRecord.ERROR_INVALID_OPERATION) {
mBufferWriteIndex = mBufferWriteIndex + mReadResult - 1;
mTotalFramesWritten++;
if (mAudioEncoder != null) {
mAudioEncoder.offerEncoder(thisBuffer, audioPresentationTimeNs);
}
if (!mIsRecording && mAudioRecorder != null) {
mAudioRecorder.setRecordPositionUpdateListener(null);
mAudioRecorder.release();
mAudioRecorder = null;
Log.i(TAG, "stopped");
}
} else {
Log.e(TAG, "Read error");
}
}
private void _offerAudioEncoder(byte[] input, long presentationTimeNs) {
if (audioBytesReceived == 0) {
mAudioStartTime = presentationTimeNs;
}
mTotalInputAudioFrameCount++;
audioBytesReceived += input.length;
if (mEosSentToAudioEncoder && mStopReceived || input == null) {
logStatistics();
if (mEosReceived) {
Log.d(TAG, "EOS received in offerAudioEncoder");
closeAudioEncoder();
mEosSentToAudioEncoder = true;
if (!mStopReceived) {
prepareAudioEncoder();
} else {
mEncodingService.shutdown();
}
}
return;
}
// transfer previously encoded data to muxer
drainEncoder(mAudioEncoder, mAudioBufferInfo, mAudioTrackIndex, false, "audio");
sendFrameToEncoder(input, presentationTimeNs);// send current frame data to encoder
}
private void sendFrameToEncoder(byte[] input, long presentationTimeNs) {
try {
ByteBuffer[] inputBuffers = mAudioEncoder.getInputBuffers();
int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
if (mAudioSoftwarePoller != null) {
mAudioSoftwarePoller.recycleInputBuffer(input);
}
long presentationTimeUs = (presentationTimeNs - mAudioStartTime) / 1000; // in microseconds
if (mEosReceived) {
mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, input.length, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
closeAudioEncoder();
mEosSentToAudioEncoder = true;
if (mStopReceived) {
mEncodingService.shutdown();
}
} else {
mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, input.length, presentationTimeUs, 0);
}
}
} catch (Throwable t) {
t.printStackTrace();
}
}
private void drainEncoder(MediaCodec encoder, MediaCodec.BufferInfo bufferInfo, TrackIndex trackIndex, boolean endOfStream, String type) {
final int TIMEOUT_USEC = 100;
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
if (VERBOSE) Log.d(TAG, "INFO_TRY_AGAIN_LATER " + type + " out of while");
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no " + type + " output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
if (VERBOSE) Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED " + type);
encoderOutputBuffers = encoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed after muxer start");
}
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat + ".");
// now that we have the Magic Goodies, start the muxer
synchronized (mMuxer) {
trackIndex.index = mMuxer.addTrack(newFormat);
numTracksAdded++;
Log.d(TAG, "Added " + type + " track index: " + trackIndex.index);
if (numTracksAdded == TOTAL_NUM_TRACKS) {
mMuxer.start();
mMuxerStarted = true;
Log.d(TAG, numTracksAdded + " tracks added. Muxer started");
break;
}
}
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from " + type + " encoder.dequeueOutputBuffer: " +
encoderStatus);
} else {
if (encodedData == null) {
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ") " + type + " encodedData == null");
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ") " + type + " Muxer not started");
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
synchronized (mMuxer) {
mMuxer.writeSampleData(trackIndex.index, encodedData, bufferInfo);
}
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break;
}
}
}
long endTime = System.nanoTime();
}
I think in drainEncoder function you missed
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
before
if (encodedData == null)
I am trying to merge mp4 and mp3 on Android, I am using the JavaCV, please check my code below first, then I explained the strange error after:
private void test2() throws Exception {
String path = Environment.getExternalStorageDirectory()
.getAbsolutePath();
int testId = 4;
String videoPath = path + "/" + "sample" + testId + ".mp4";
String audioPath = path + "/" + "love.mp3";
String outputPath = path + "/" + "out" + testId + ".mp4";
FrameGrabber grabber1 = new FFmpegFrameGrabber(videoPath);
FrameGrabber grabber2 = new FFmpegFrameGrabber(audioPath);
grabber1.start();
grabber2.start();
FrameRecorder recorder = new FFmpegFrameRecorder(outputPath,
grabber1.getImageWidth(), grabber1.getImageHeight(),
grabber2.getAudioChannels());
double frameRate = grabber1.getFrameRate();
recorder.setFrameRate(frameRate);
recorder.setSampleRate(grabber2.getSampleRate());
recorder.setVideoQuality(1);
recorder.start();
Frame frame1, frame2 = null;
// getLengthInTime is correct, but getLengthInFrames not accurate.
Log.d(TAG, " Video lengthInTime:" + grabber1.getLengthInTime()
+ " Video frames:" + grabber1.getLengthInFrames());
// Record video.
int count = 0;
while (true) {
frame1 = grabber1.grabFrame();
if (frame1 == null) {
break;
}
frame1.samples = null;
recorder.record(frame1);
count++;
Log.d(TAG, "Video frame timestamp:" + grabber1.getTimestamp());
}
Log.d(TAG, " Video frame count:" + count);
// Record audtio.
long videoTimestamp = recorder.getTimestamp();
while (true) {
frame2 = grabber2.grabFrame();
if (frame2 != null && grabber2.getTimestamp() <= videoTimestamp) {
frame2.image = null;
recorder.record(frame2);
// Log.d(TAG, "Audio frame timestamp:" +
// grabber2.getTimestamp());
} else {
break;
}
}
// release
recorder.stop();
recorder.release();
grabber1.stop();
grabber2.stop();
}
The output's audio is OK, but the video is strangle. The video play 1s and stop 1s, then repeat like this. The input video I recorded by Phone's camera.
I tried to count the real number of video frames, and I found the real number is much bigger than the number got from method getLengthInFrames().
frameGrabber grabs both ImageFrames and Sound Frames
if you need to work on VideoFrames or its count, Do like this
Frame grabFrame = frameGrabber.grabFrame();
if (grabFrame == null) {
// all frames are processed
System.out.println("!!! Failed cvQueryFrame");
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(RecordActivity.this, "Done !!!", Toast.LENGTH_SHORT).show();
}
});
break;
}
if (grabFrame.image != null) {
//This is a video Frame, Do your work here
}
}
My Problem is, that I capture audio by pressing the record button, but I can't see any audioFile in the specific path.
I capture sound, when I press the capture button:
private void startRecord()
{
ImageButton soundStop = (ImageButton)findViewById(R.id.soundstop);
soundStop.setVisibility(View.VISIBLE);
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
//mediaRecorder.setOutputFormat(sound);
//mediaRecorder.setOutputFile(soundFilePath);
//mediaRecorder.setAudioEncoder(audioEncoder);
//Log.d("hier ", "hier" + soundFilePath);
try {
mediaRecorder.prepare();
mediaRecorder.start();
}catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Toast.makeText(getApplicationContext(), "Aufnahme gestartet", Toast.LENGTH_LONG).show();
}
When I press the Stop Recording Button the file should be saved in the specific path. I tried it with Directory Musik and Downloads, but I can't find any file there
final OnClickListener soundRecordStop = new OnClickListener(){
#Override
public void onClick(View v) {
soundStop();
}
};
public void soundStop(){
SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy hh:mm:ss");
Timestamp time = new Timestamp(System.currentTimeMillis());
String actualTime = sdf.format(time);
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
String name = sharedPreferences.getString("soundformat", "format");
int audioEncoder = sharedPreferences.getInt("audioEncoder", Property.getAudioEncoderInt());
String dateiendung = ".aac";
int sound = 6;
if(name == "aac"){
dateiendung = ".aac";
sound = 6;
} else if(name == "amr_nb"){
dateiendung = ".3gp";
sound = 3;
}
else if( name == "amr_wb"){
dateiendung = ".3gp";
sound = 4;
}
else if( name == "default" ){
dateiendung = ".default";
sound = 0;
}
else if( name == "mpeg"){
dateiendung = ".mp4";
sound = 2;
}
else if( name == "gpp" ){
Log.d("in gpp", "in gpp");
dateiendung = ".3gp";
sound = 1;
}
soundFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
soundFilePath = soundFile.getAbsolutePath() + actualTime + dateiendung;
Log.d("hier ", "hier1" + mediaRecorder);
if(mediaRecorder != null){
Log.d("hier ", "hier2" + mediaRecorder);
try {
//mediaRecorder.prepare();
mediaRecorder.stop();
mediaRecorder.release();
//mediaRecorder = null;
Log.d("hier ", "hier4" + mediaRecorder);
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
mediaRecorder.setOutputFormat(sound);
mediaRecorder.setOutputFile(soundFilePath);
Log.d("hier ", "hier2");
String pfad = soundFile.getAbsolutePath();
Toast.makeText(getApplicationContext(), "Aufnahme in " + pfad + " gespeichert", Toast.LENGTH_LONG).show();
ImageButton soundStop = (ImageButton)findViewById(R.id.soundstop);
soundStop.setVisibility(View.INVISIBLE);
}
The path seems to be correct: pfad /storage/sdcard0/Download17.07.2014 11:55:58.aac
Thanks for the comment the missing File Separator wasn't intentionally. I insert the missing Separator in Order to save the audio File in Downloads Directory. But no File nevertheless 😑
Please help me to find my file ;)