I am using this code to record the current sound. I need now some processing to detect the beat from music. I want a LED go on/off according the incoming music. I think BeatDetect is an useful library for my project, however I cannot find a good example. Can anyone give me an easy example to use? Or a reference?
public void start() {
initializeMediaRecorder();
handler.postDelayed(new Runnable() {
#Override
public void run() {
notifySample(getAmplitude());
handler.postDelayed(this, 150);
}
}, 150);
}
public void stop() {
handler.removeCallbacksAndMessages(null);
if (mediaRecorder != null) {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
mediaRecorder = null;
}
}
private double getAmplitude() {
if (mediaRecorder != null) {
return mediaRecorder.getMaxAmplitude();
} else {
return 0;
}
}
private void initializeMediaRecorder(){
try {
if(mediaRecorder == null) {
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mediaRecorder.setOutputFile("/dev/null");
mediaRecorder.prepare();
mediaRecorder.start();
}
} catch (IOException e) {
e.printStackTrace();
}
}
Im develping a custom video recorder, I record a video in portrait mode, after record the video the video previewd as landscape view.
My Code
here im useing surface view. i have button for caputure first time when i click the camera record starts and second time when i start click it stoped and move on next activity which contains preview the recorded video
public void startVideoRecord() {
initrecorder();
try {
Thread.sleep(1000);
mMediaRecorder.start();
isRecordStart = true;
} catch (Exception e) {
e.printStackTrace();
}
flashmode.setEnabled(false);
}
public void stopVideoRecord() {
try {
if (mMediaRecorder != null) {
mMediaRecorder.stop();
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder = null;
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.lock();
mCamera.startPreview();
}
flashmode.setEnabled(true);
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
isRecordStart = false;
lightson = false;
startlayout.setVisibility(View.GONE);
savelayout.setVisibility(View.GONE);
} catch (RuntimeException stopException) {
}
Intent intent = new Intent(UnlVideoActivity.this, PreviewVideo.class);
intent.putExtra("Video_Path", filepath);
intent.putStringArrayListExtra("Term", terms);
intent.putStringArrayListExtra("Term_Id", termId);
startActivity(intent);
}
public void initrecorder() {
try {
mCamera.setParameters(parameters);
CamcorderProfile cp = CamcorderProfile.
get(CamcorderProfile.QUALITY_HIGH);
Video_Size_Max_Width = cp.videoFrameWidth;
Video_Size_Max_Height = cp.videoFrameHeight;
int vframerate = cp.videoFrameRate;
startcameraservice();
mCamera.stopPreview();
mCamera.unlock();
if (mMediaRecorder == null)
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setPreviewDisplay(mHolder.getSurface());
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource
(MediaRecorder.VideoSource.DEFAULT);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat
(MediaRecorder.OutputFormat.MPEG_4);
enviromentState = Environment.getExternalStorageState();
File file = new File(Environment.getExternalStoragePublicDirectory(unl_videos), VIDEO_PATH_NAME);
if ((enviromentState == Environment.MEDIA_REMOVED) || (enviromentState == Environment.MEDIA_UNMOUNTED)) {
Toast.makeText(getApplicationContext(), enviromentState, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getApplicationContext(), enviromentState, Toast.LENGTH_LONG).show();
// "touch" the file
if (!file.exists()) {
File parent = file.getParentFile();
if (parent != null)
if (!parent.exists())
if (!parent.mkdirs())
throw new IOException("Cannot create " + "parent directories for file: " + file);
file.createNewFile();
}
}
mMediaRecorder.setVideoSize(Video_Size_Max_Width, Video_Size_Max_Height);
mMediaRecorder.setAudioEncodingBitRate(131072);
mMediaRecorder.setAudioSamplingRate(44100);
mMediaRecorder.setVideoEncodingBitRate(3567616);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setVideoFrameRate(vframerate);
mMediaRecorder.setOutputFile(file.getAbsolutePath());
Toast.makeText(getApplicationContext(), "file location " + file.getAbsolutePath(), Toast.LENGTH_LONG)
.show();
filename = file.getName();
filepath = file.getAbsolutePath();
} catch (Exception e) {
e.printStackTrace();
}
try {
mMediaRecorder.prepare();
} catch (Exception e) {
e.printStackTrace();
}
}
private void startcameraservice() {
PackageManager pm = getApplicationContext().getPackageManager();
CameraInfo cinfo = new CameraInfo();
if (mCamera == null) {
if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
mCamera = Camera.open(cinfo.CAMERA_FACING_BACK);
}
elseif(pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
mCamera = Camera.open(cinfo.CAMERA_FACING_FRONT);
} else {
}
}
}
i was reading about many reports about this, but still couldn't find the answer for me.
Sometimes, after capturing with camera2, an exception is throwing: java.lang.IllegalStateException: Session has been closed; further changes are illegal.
I tried to check all the sessions not being null, when was doing something, but still get this.
Any suggestion ? anything else should i handle?=, which i didn't ?
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
if(touchFocusEnabled) {
letSetCaptureSessionOnce = true;
mState = STATE_WAITING_LOCK;
try {
// Reset the auto-focus trigger
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mHandler);
} catch (CameraAccessException e) {
L.e("CameraLolipop --- CameraCaptureSession.CaptureCallback " + e);
}
} else {
if(letSetCaptureSessionOnce) {
try {
if ((null != mCaptureSession) && (isCameraOpen)) {
mState = STATE_PREVIEW;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mHandler);
letSetCaptureSessionOnce = true;
}
} catch (CameraAccessException e) {
L.e("CameraLolipop --- CameraCaptureSession.CaptureCallback " + e);
}
}
}
break;
}
case STATE_WAITING_LOCK: {
if(touchFocusEnabled) {
mState = STATE_PICTURE_TAKEN;
touchFocusEnabled = false;
try {
if((mCaptureSession != null) && (isCameraOpen)){
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mHandler);
}
} catch (CameraAccessException e) {
L.e("CameraLolipop --- STATE_WAITING_LOCK " + e);
}
return;
}
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
L.d("Focus state ", "STATE_WAITING_LOCK");
boolean fixedFocus = isFixedFocus();
if (afState == null) {
// if ((burstMode) && (getSupportedHardwareLevel() == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL)) {
// captureStillPictureBurst();
// } else {
captureStillPicture();
// }
} else if ((CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) || fixedFocus) {
L.d("Focus state ", "CONTROL_AF_STATE_FOCUSED_LOCKED or CONTROL_AF_STATE_NOT_FOCUSED_LOCKED");
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
if ((burstMode) && (getSupportedHardwareLevel() == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL)) {
// captureStillPictureBurst();
captureStillPicture();
} else {
captureStillPicture();
}
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
L.d("Focus state ", "STATE_WAITING_PRECAPTURE");
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED || isFixedFocus()) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
L.d("Focus state ", "STATE_WAITING_NON_PRECAPTURE");
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
if ((burstMode) && (getSupportedHardwareLevel() == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL)) {
// captureStillPictureBurst();
captureStillPicture();
} else {
captureStillPicture();
}
}
break;
}
case STATE_PICTURE_TAKEN: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
L.d("TOUCH", " afState " + afState);
mState = STATE_PREVIEW;
if(afState == 4) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
}
if(afState == 0) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
mPreviewRequestBuilder
.set(CaptureRequest.CONTROL_AF_REGIONS, null);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
}
}, 2500);
}
break;
}
}
}
#Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
// process(partialResult);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request,
TotalCaptureResult result) {
// if ((mState != STATE_PREVIEW) || (touchFocusEnabled)) {
super.onCaptureCompleted(session, request, result);
if(session != null) {
process(result);
}
}
};
and here is where session is being created:
private CameraCaptureSession.StateCallback mSessionPreviewStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
L.i("Thread", "onConfigured---->" + Thread.currentThread().getName());
// The camera is already closed
if(null == mCameraDevice) {
return;
}
try {
mCaptureSession = cameraCaptureSession;
mCameraSessionIsClosed = false;
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
if((isCameraOpen) && (mCaptureSession != null)){
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mHandler);
}
L.d("ex0003", "Test... mSessionPreviewStateCallback ");
} catch (CameraAccessException e) {
L.e("CameraLolipop --- openCamera() " + e);
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), "onConfigureFailed---Preview", Toast.LENGTH_SHORT).show();
}
};
and in onPause closeCamera() is being called:
try {
if(mCameraDevice == null && mCameraIsClosed) {
return;
}
if((mCaptureSession != null) && (isCameraOpen)) {
try {
mCaptureSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
mCameraSessionIsClosed = true;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
isCameraOpen = false;
mCameraIsClosed = true;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
and after stopBackgroundThread() and super.onPause() are being called.
In my case problem was createCaptureRequest being called multiple when another is already in process(onConfigured/onConfigureFailed called). What I end up was creating a boolean variable that keeps track if camera_preview is already in process, if so then do not make another createCaptureRequest.
Check if all the camera configurations have been set with a flag, as #Rohit suggested or give some delay to the repeatingRequest of your mCaptureSession while it setting the configuration for you in parallel in the background. In my case I solved this exception by giving a delay of 500 mS to set the repeatingRequest like this:
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
mCaptureCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to start camera preview because it couldn't access camera", e);
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to start camera preview.", e);
}
}
}, 500);
Also, the same problem occurs when you first time grant permissions and initially start your camera preview(app crashes), but there you will get CameraAccessException stating "Failed to start camera session". In that case, you should give the same amount of delay while creating capture sessions like below:
void startCaptureSession() {
if (!isCameraOpened() || !mPreview.isReady() || mImageReader == null) {
return;
}
previewSize = chooseOptimalSize();
mPreview.setBufferSize(previewSize.getWidth(), previewSize.getHeight());
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
try {
Surface surface = mPreview.getSurface();
mPreviewRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCamera.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
mSessionCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to start camera session");
}
}
}, 500);
}
I hope it helps.
In the documentation it says:
if this session is no longer active, either because the session was explicitly closed, a new session has been created or the camera device has been closed.
If you are using the camera2api sample-project, this means: You either call openCamera(), createCameraPreviewSession() or closeCamera() while another createCameraPreviewSession() was called aswell.
I solved this with a global variable, that is always set to true, when the function createCameraPreviewSession() is called; and is set to false, 10MS after createCameraPreviewSession() was finished. If this variable is true, I block all new createCameraPreviewSession() calls and also all closeCamera() calls.
This solution worked for me. However, I don't know if there is a fundamental reason, why you should not use this. If somebody knows better, please let me know.
I'm not sure if this will work, but try it and let me know if it worked.
private CameraCaptureSession.StateCallback mSessionPreviewStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
L.i("Thread", "onConfigured---->" + Thread.currentThread().getName());
// The camera is already closed
if(null == mCameraDevice) {
return;
}
try {
mCaptureSession = cameraCaptureSession;
mCameraSessionIsClosed = false;
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
if((isCameraOpen) && (mCaptureSession != null)){
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mHandler);
}
L.d("ex0003", "Test... mSessionPreviewStateCallback ");
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to start camera preview because it couldn't access camera", e);
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to start camera preview.", e);
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), "onConfigureFailed---Preview", Toast.LENGTH_SHORT).show();
}
#Override
public void onClosed(#NonNull CameraCaptureSession session) {
if (mCaptureSession != null && mCaptureSession.equals(session)) {
mCaptureSession = null;
}
}
};
This error occurs if the camera is opened before setting all the configuration to the preview request builder. Hence you must first set all the parameters like the TextureView size, camera output size etc.
Here is code from google's sample app:
setUpCameraOutputs(width, height);
configureTransform(width, height);
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
I'm trying to record video by using GLSurfaceview in android. But every time when i tap to record, it gives me null pointer exception onPause.
here is my code to record video
mCamera = new CameraLoader();
buttonn_capture.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
try{
if (recording) {
mediaRecorder.stop(); // stop the recording
recording = false;
} else {
// Release Camera before MediaRecorder start
mCamera.releaseCamera();
if (!prepareMediaRecorder()) {
finish();
}
try {
mediaRecorder.prepare();
} catch (IllegalStateException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mediaRecorder.start();
recording = true;
// myButton.setText("Cancel");
}
}catch(Exception ex){
Toast.makeText(getApplicationContext(), "Please tap and hold to record!",
Toast.LENGTH_LONG).show();
reload();
}
}
});
private boolean prepareMediaRecorder() {
mCamera.mCameraInstance.unlock();
mediaRecorder.setCamera(mCamera.mCameraInstance);
// Step 2: Set sources
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES),
"MusicDubs");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
mediaStorageDir.mkdirs();
}
timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault())
.format(new Date());
CameraInfo caminfo = new CameraInfo();
mCamera.mCameraInstance.getCameraInfo(0, caminfo);
if (caminfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
mediaRecorder.setProfile(CamcorderProfile.get(0,
CamcorderProfile.QUALITY_HIGH));
mediaRecorder
.setOrientationHint(270);
} else if (caminfo.facing == CameraInfo.CAMERA_FACING_BACK) {
mediaRecorder.setProfile(CamcorderProfile.get(0,
CamcorderProfile.QUALITY_HIGH));
mediaRecorder
.setOrientationHint(270);
mediaRecorder.setOrientationHint(90);
}
//mediaRecorder.setCaptureRate(20);
mediaRecorder.setVideoFrameRate(120);
mediaRecorder.setOutputFile(mediaStorageDir.getPath() + "/"
+ "_" + timeStamp + ".mp4");
// Step 5: Set the preview output
mediaRecorder.setPreviewDisplay(glSurfaceView.getHolder()
.getSurface());
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
releaseMediaRecorder();
// releaseMediaPlayer();
return false;
} catch (IOException e) {
releaseMediaRecorder();
// releaseMediaPlayer();
return false;
}
return true;
}
private void releaseMediaRecorder() {
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
mCamera.mCameraInstance.lock(); // lock camera for later use
}
}
on Activity pause and resume i added that code
#Override
protected void onResume() {
super.onResume();
mCamera.onResume();
}
#Override
protected void onPause() {
mCamera.onPause();
super.onPause();
}
here is my camera class to load camera
private class CameraLoader {
private int mCurrentCameraId = 0;
private Camera mCameraInstance;
public void onResume() {
setUpCamera(mCurrentCameraId);
}
public void onPause() {
releaseCamera();
}
public void switchCamera() {
releaseCamera();
mCurrentCameraId = (mCurrentCameraId + 1) % mCameraHelper.getNumberOfCameras();
setUpCamera(mCurrentCameraId);
}
private void setUpCamera(final int id) {
mCameraInstance = getCameraInstance(id);
Parameters parameters = mCameraInstance.getParameters();
// TODO adjust by getting supportedPreviewSizes and then choosing
// the best one for screen size (best fill screen)
if (parameters.getSupportedFocusModes().contains(
Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCameraInstance.setParameters(parameters);
int orientation = mCameraHelper.getCameraDisplayOrientation(
ActivityCamera.this, mCurrentCameraId);
CameraInfo2 cameraInfo = new CameraInfo2();
mCameraHelper.getCameraInfo(mCurrentCameraId, cameraInfo);
boolean flipHorizontal = cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT;
mGPUImage.setUpCamera(mCameraInstance, orientation, flipHorizontal, false);
}
/** A safe way to get an instance of the Camera object. */
private Camera getCameraInstance(final int id) {
Camera c = null;
try {
c = mCameraHelper.openCamera(id);
} catch (Exception e) {
e.printStackTrace();
}
return c;
}
private void releaseCamera() {
mCameraInstance.setPreviewCallback(null);
mCameraInstance.release();
mCameraInstance = null;
}
}
Its behavior is very strange, I didn't get why it is giving me null pointer exception onPause because there is no point of getting camera null there. please tell me where i'm doing wrong. Any help would be much appreciated. Thank you :)
I am stuck with a weird problem. I have an activity with a MediaPlayer that should play a just recorded audio file. At first the mediaplayer is initialized ok and the file can be played.
When I rotate the screen, the activity is destroyed and then reinitialized for the new orientation. Therefore, I re-initialize the mediaplayer too.
this works a couple of times, but at some point mediaPlayer.setDataSource() throws a NullPointerException because the file is suddenly gone. Sadly, I haven't seen any other error in the logs.
Here are some Code snippets:
player creation:
/**
* Creates and initializes the player with the proper file.
*/
private void createPlayer() {
synchronized (playerMutex) {
player = new MediaPlayer();
player.setLooping(false);
player.setOnPreparedListener(this);
player.setOnErrorListener(this);
player.setOnCompletionListener(this);
}
readGreeting();
}
player initialization:
isPrepared = false;
try {
final File file = new File(audioFilename);
in = new FileInputStream(file);
synchronized (playerMutex) {
player.setDataSource(in.getFD());
}
// using a runnable instead of prepareAsync to not accidentally call pause on media player while preparing
Runnable preparer = new Runnable() {
#Override
public void run() {
try {
synchronized (playerMutex) {
if (player != null) {
player.prepare();
}
}
} catch (Exception ex) {
Log.e(TAG, "Error preparing player for file " + file.getAbsolutePath(), ex);
}
}
};
new Thread(preparer).start();
} catch (Exception ex) {
btnPlayback.setEnabled(false);
Log.e(TAG, "Error preparing player", ex);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
Log.e(TAG, "initPlayer: ", e);
}
}
}
saving instance state...
#Override
protected void onPause() {
synchronized (playerMutex) {
if (isPlaying()) {
getIntent().putExtra(EXTRA_KEY_SEEK, player.getCurrentPosition());
pause();
}
}
setAudioModeBackToNormal();
super.onPause();
}
private void pause() {
synchronized (playerMutex) {
if (isPlaying()) {
player.pause();
}
}
btnPlayback.setVisibility(View.VISIBLE);
btnPause.setVisibility(View.GONE);
}
#Override
protected void onSaveInstanceState(Bundle outState) {
final Bundle extras = getIntent().getExtras();
outState.putBundle("extras", extras);
super.onSaveInstanceState(outState);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
getIntent().putExtras(savedInstanceState.getBundle("extras"));
}
cleanup:
private void stopPlayerAndFreeResources() {
synchronized (playerMutex) {
isPrepared = false;
if (player != null) {
player.stop();
player.release();
player = null;
}
}
if (in != null) {
try {
in.close();
in = null;
} catch (IOException e) {
Log.e(TAG, "Unexpected error", e);
}
}
}
Maybe I'm looking at the problem from the wrong angle and it has nothing to do with player. Has anybody ever had issues with disappearing files?
I had playlist files disappear once. The cause of the problem turned out to be certain media players that had an option to manage my playlists. In this case manage meant deleting the playlists I already had :(