Screen recording in android using MediaRecorder api - android

Hey I'm trying to record a screencast app in android lolipop using mediarecorder api. Problem is that my application acts weird. Whenever I call start recording on the VideoRecorder class even when hardcoding the configuration like video size and output file the phone reboots. The app previosuly worked fine - saved the stuff in the correct place and the video itself looked good but then I've changed something in the code and now it doesn't work. Any idea what I'm missing?
Here is my code:
public class VideoRecorder {
private int screenDensity, screenHeight, screenWidth;
private MediaRecorder mMediaRecorder;
private VirtualDisplay mVirtualDisplay;
private MediaProjection mediaProjection;
private String directory, filename;
private Display defaultDisplay = null;
private DisplayMetrics metrics;
public VideoRecorder(Display defaultDisplay) {
//this.defaultDisplay = defaultDisplay;
//metrics = new DisplayMetrics();
//defaultDisplay.getMetrics(metrics);
//screenDensity = metrics.densityDpi;
// screenHeight = metrics.heightPixels;
// screenWidth = metrics.widthPixels;
}
void prepareVideoRecorder() {
initRecorder();
prepareRecorder();
}
private void initRecorder() {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(540, 888);
// mMediaRecorder.setOutputFile(directory + "/" + filename + ".mp4");
mMediaRecorder.setOutputFile(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsoluteFile() + "/recorderrsr.mp4");
}
void startRecording(MediaProjection mediaProjection) {
mMediaRecorder = new MediaRecorder();
initRecorder();
prepareRecorder();
this.mediaProjection = mediaProjection;
mMediaRecorder.start();
mVirtualDisplay = createVirtualDisplay();
}
private VirtualDisplay createVirtualDisplay() {
return mediaProjection.createVirtualDisplay("MainActivity",
540, 888, 240,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}
void setFilename(String filename) {
this.filename = filename;
}
void setDirectory(String directory) {
this.directory = directory;
}
void stopRecording() {
mMediaRecorder.stop();
mMediaRecorder.reset();
if (mediaProjection != null) {
mediaProjection.stop();
mediaProjection = null;
}
if (mMediaRecorder != null) {
mMediaRecorder.release();
mMediaRecorder = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
}
}
private void prepareRecorder() {
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
void release() {
mMediaRecorder.release();
}
}
I'm sure that the location exists and I have the correct permissions - as I've said the app worked fine before.
Thanks for help Jon

Related

Record screen Android 11 using MediaRecorder - file is created but not able to record video

I am creating a screen recording application when I am recording screen using MediaRecorder class in android, application working fine if below Android 10 but not working in Android 11
when I start recording using mediarecord.start() method it is creating file but the file size is always 3.23 kb, so I am assuming the file is not updating when recording screen please check below code
Thanks in advance
private void startProjection(int resultCode, Intent data) throws FileNotFoundException {
MediaProjectionManager mpManager =
(MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
if (mediaProjection == null) {
mediaProjection = mpManager.getMediaProjection(resultCode, data);
if (mediaProjection != null) {
density = Resources.getSystem().getDisplayMetrics().densityDpi;
if (mediaRecorder == null) {
mediaRecorder = new MediaRecorder();
}
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mediaRecorder.setVideoEncodingBitRate(10000000);
mediaRecorder.setVideoFrameRate(30);
mediaRecorder.setVideoSize(Resources.getSystem().getDisplayMetrics().widthPixels, Resources.getSystem().getDisplayMetrics().heightPixels);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
mediaRecorder.setOutputFile(getFilePath());
} else {
mediaRecorder.setOutputFile(getFilePath());
}
try {
mediaRecorder.prepare();
mediaRecorder.start();
//ScreenCaptureReceiver.getInstance().registerIntent(getApplicationContext(), filename, true);
} catch (IllegalStateException | IOException e) {
//Log.d("milasdfjhsdf", e.getMessage());
e.printStackTrace();
}
createVirtualDisplay();
mediaProjection.registerCallback(new MediaProjectionStopCallback(), handler);
new SecurePreferences(getApplicationContext()).setFilePath(filename);
}
}
}
private String getFilePath() {
filename = getExternalCacheDir().getAbsolutePath();
filename += "/audiorecordtest.mp4";
return filename;
}
#SuppressLint("WrongConstant")
private void createVirtualDisplay() {
int width = Resources.getSystem().getDisplayMetrics().widthPixels;
int height = Resources.getSystem().getDisplayMetrics().heightPixels;
virtualDisplay = mediaProjection.createVirtualDisplay(ScreenshotConstant.REDPEN_CAPTURE, width, height,
density, getVirtualDisplayFlags(), mediaRecorder.getSurface(), null, handler);
}

audio and video capture of an activity from kitkat version support

Iam developing an android application "User album" in which user pics are scrolling in a viewpager with a background music,here if user wants to make a video with scrolling pics and audio music we should provide a "video screen cast option" to make a video.
I write some code like this to make a video cast.
My code:
public void onToggleScreenShare(View view) {
if (((ToggleButton) view).isChecked()) {
initRecorder();
shareScreen();
} else {
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.v(TAG, "Stopping Recording");
stopScreenSharing();
}
}
private void shareScreen() {
if (mMediaProjection == null) {
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), REQUEST_CODE);
return;
}
mVirtualDisplay = createVirtualDisplay();
mMediaRecorder.start();
}
private VirtualDisplay createVirtualDisplay() {
return mMediaProjection.createVirtualDisplay("MainActivity",
DISPLAY_WIDTH, DISPLAY_HEIGHT, mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mMediaRecorder.getSurface(), null /*Callbacks*/, null
/*Handler*/);
}
private void initRecorder() {
try {
/*****here mMediaRecorder is not supported for kitkat version*******/
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setOutputFile(Environment
.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DOWNLOADS) + "/video_"+System.currentTimeMillis()+".mp4");
mMediaRecorder.setVideoSize(DISPLAY_WIDTH, DISPLAY_HEIGHT);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
int orientation = ORIENTATIONS.get(rotation + 90);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
}
private class MediaProjectionCallback extends MediaProjection.Callback {
#Override
public void onStop() {
if (mToggleButton.isChecked()) {
mToggleButton.setChecked(false);
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.v(TAG, "Recording Stopped");
}
mMediaProjection = null;
stopScreenSharing();
}
}
private void stopScreenSharing() {
if (mVirtualDisplay == null) {
return;
}
mVirtualDisplay.release();
//mMediaRecorder.release(); //If used: mMediaRecorder object cannot
// be reused again
destroyMediaProjection();
}
By using th eabove code it works well from lollipop but FOR KITKAT VERSION it is saying that media recorder is not supported for kitkat version, i tried for other codes but didn't find the correct solution.
can you suggest me how to implement video cast for KitKat OS version?

Xamarin - How to set/change VideoView display preview orientation

I copied a code from here https://developer.xamarin.com/recipes/android/media/video/record_video/, an instruction on making videostream and I am trying to change some part of it.
I already tried to change the output orientation to 90 using SetOrientationHint(90)
But since this code do not use the Camera class and just a MediaRecorder class. How can I rotate the display preview because it is giving me a +90degrees and landscape preview?
I already tried the rotation in xml and code but the preview became a total black.
This is the code
[Activity(Label = "App2", MainLauncher = true, Icon = "#drawable/icon")]
public class MainActivity : Activity, ISurfaceHolderCallback
{
string path = Android.OS.Environment.ExternalStorageDirectory.AbsolutePath + "/test.mp4";
MediaRecorder recorder;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
//Set our view from the "main" layout resource
SetContentView(Resource.Layout.Main);
var record = FindViewById<Button>(Resource.Id.Record);
var stop = FindViewById<Button>(Resource.Id.Stop);
var play = FindViewById<Button>(Resource.Id.Play);
var video = FindViewById<VideoView>(Resource.Id.SampleVideoView);
//video.Rotation = 90;
record.Click += delegate
{
if (recorder == null)
recorder = startRecording(video);
else
Toast.MakeText(this, "Now recording", 0).Show();
};
stop.Click += delegate
{
if (recorder != null)
{
stopRecording(recorder);
recorder = null;
}
else
Toast.MakeText(this, "No video recording", 0).Show();
};
play.Click += delegate
{
if (path != null)
playVideo(video);
else
Toast.MakeText(this, "No video available", 0).Show();
};
//recorder = startRecording(video);
}
protected override void OnDestroy()
{
base.OnDestroy();
if (recorder != null)
{
recorder.Release();
recorder.Dispose();
recorder = null;
}
}
private void playVideo(VideoView video)
{
var uri = Android.Net.Uri.Parse(path);
video.SetVideoURI(uri);
video.Start();
}
private static void stopRecording(MediaRecorder recorder)
{
if (recorder != null)
{
recorder.Stop();
recorder.Release();
}
}
private MediaRecorder startRecording(VideoView video)
{
MediaRecorder recorder;
video.StopPlayback();
//video.Holder.AddCallback(this);
//video.Holder.SetType(SurfaceType.PushBuffers);
recorder = new MediaRecorder();
recorder.SetVideoSource(VideoSource.Camera);
recorder.SetAudioSource(AudioSource.Mic);
recorder.SetOutputFormat(OutputFormat.Default);
recorder.SetVideoEncoder(VideoEncoder.Default);
recorder.SetAudioEncoder(AudioEncoder.Default);
recorder.SetOutputFile(path);
recorder.SetOrientationHint(90);
recorder.SetPreviewDisplay(video.Holder.Surface);
if (recorder!=null)
{
try
{
recorder.Prepare();
recorder.Start();
}
catch (Exception)
{
Toast.MakeText(this, "Exception!", 0).Show();
}
}
return recorder;
}
public void SurfaceChanged(ISurfaceHolder holder, [GeneratedEnum] Format format, int width, int height)
{
throw new NotImplementedException();
}
public void SurfaceCreated(ISurfaceHolder holder)
{
throw new NotImplementedException();
}
public void SurfaceDestroyed(ISurfaceHolder holder)
{
throw new NotImplementedException();
}
}
UPDATE
#Elvis Xia's answer helped a lot.
Here is the new code
[Activity(Label = "App2", MainLauncher = true, Icon = "#drawable/icon")]
public class MainActivity : Activity, ISurfaceHolderCallback
{
string path = Android.OS.Environment.ExternalStorageDirectory.AbsolutePath + "/test.mp4";
MediaRecorder recorder;
Android.Hardware.Camera mCamera; //Android.Hardware is used because it will have
//problem with Android.Graphics
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
//Set our view from the "main" layout resource
SetContentView(Resource.Layout.Main);
var record = FindViewById<Button>(Resource.Id.Record);
var stop = FindViewById<Button>(Resource.Id.Stop);
var play = FindViewById<Button>(Resource.Id.Play);
var video = FindViewById<VideoView>(Resource.Id.SampleVideoView);
record.Click += delegate
{
if (recorder == null)
recorder = startRecording(video);
else
Toast.MakeText(this, "Now recording", 0).Show();
};
stop.Click += delegate
{
if (recorder != null)
{
stopRecording(recorder, mCamera);
recorder = null;
}
else
Toast.MakeText(this, "No video recording", 0).Show();
};
play.Click += delegate
{
if (path != null)
playVideo(video);
else
Toast.MakeText(this, "No video available", 0).Show();
};
//recorder = startRecording(video);
}
protected override void OnDestroy()
{
base.OnDestroy();
if (recorder != null)
{
recorder.Release();
recorder.Dispose();
recorder = null;
}
}
private void playVideo(VideoView video)
{
var uri = Android.Net.Uri.Parse(path);
video.SetVideoURI(uri);
video.Start();
}
private static void stopRecording(MediaRecorder recorder, Android.Hardware.Camera mCamera)
{
if (recorder != null)
{
recorder.Stop();
recorder.Release();
mCamera.StopPreview();
mCamera.Release();
}
}
private MediaRecorder startRecording(VideoView video)
{
MediaRecorder recorder;
video.StopPlayback();
//video.Holder.AddCallback(this);
//video.Holder.SetType(SurfaceType.PushBuffers);
recorder = new MediaRecorder();
mCamera = GetCameraInstance();
mCamera.SetDisplayOrientation(90);
mCamera.Unlock();
recorder.SetCamera(mCamera);
recorder.SetVideoSource(VideoSource.Camera);
recorder.SetAudioSource(AudioSource.Mic);
recorder.SetOutputFormat(OutputFormat.Default);
recorder.SetVideoEncoder(VideoEncoder.Default);
recorder.SetAudioEncoder(AudioEncoder.Default);
recorder.SetOutputFile(path);
recorder.SetOrientationHint(90);
recorder.SetPreviewDisplay(video.Holder.Surface);
if (recorder!=null)
{
try
{
recorder.Prepare();
recorder.Start();
}
catch (Exception)
{
Toast.MakeText(this, "Exception!", 0).Show();
}
}
return recorder;
}
public void SurfaceChanged(ISurfaceHolder holder, [GeneratedEnum] Format format, int width, int height)
{
throw new NotImplementedException();
}
public void SurfaceCreated(ISurfaceHolder holder)
{
throw new NotImplementedException();
}
public void SurfaceDestroyed(ISurfaceHolder holder)
{
throw new NotImplementedException();
}
public static Android.Hardware.Camera GetCameraInstance()
{
Android.Hardware.Camera c = null;
try
{
c = Android.Hardware.Camera.Open();
}
catch (Exception e)
{
}
return c;
}
}
But since this code do not use the Camera class and just a MediaRecorder class. How can I rotate the display preview because it is giving me a +90degrees and landscape preview?
You need to associate Camera with MediaRecorder after setting the rotation degree:
get an Camera Instacnce through GetCameraInstace:
public static Camera GetCameraInstance()
{
Camera c = null;
try
{
c = Camera.Open();
}
catch (Exception e)
{
}
return c;
}
In MainActivity.cs record button click event, associate the camera with MediaRecorder and set the orientation before mCamera.Unlock:
Camera mCamera
...
recorder = new MediaRecorder();
mCamera = ClassName.GetCameraInstance(); //ClassName if in different class.
//else just GetCameraInstance();
mCamera.SetDisplayOrientation(90);
mCamera.Unlock();
recorder.SetCamera(mCamera);
recorder.SetVideoSource(VideoSource.Camera);

How to start capture video automaticlly, when the app is starting

I want to show a camera preview and start capture immediately when the preview is ready (base on camera2 API)
To do this simple, I build a sample app that should start capture video automatically when the preview is ready based on Google camera2Video example and modify it according my needs.
Here is my code for OnCreate method:
if (null == savedInstanceState) {
getFragmentManager().beginTransaction()
.replace(R.id.container, camera2VideoFragment)
.commit();
}
Runnable runnable = new Runnable() {
#Override
public void run() {
while (!camera2VideoFragment.isCameraReady) {
try {
Log.d("camera status", String.valueOf(camera2VideoFragment.isCameraReady));
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
camera2VideoFragment.captureVideo();
}
};
runnable.run();
When in Camera2VideoFragment class I add public static Boolean variable initlize to false and modify it to true when the camera is ready, using CameraDevice.StateCallback, like this:
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
isCameraReady = true; // Setting the Boolean to true here
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
The problem, isCameraReady stay false forever when I'm using the while check (* If I start capture after button event the value is true and all works fine)
The question: How can I know when the preview is ready and start capture video automatically, without pressing on any button? (clicking the button will be to stop the record, not to start it)
Thank you!
EDIT, Below is Camera2VideoFragment relevant code, you can found the full code here.
public class Camera2VideoFragment extends Fragment
implements FragmentCompat.OnRequestPermissionsResultCallback {
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private AutoFitTextureView mTextureView;
private CameraDevice mCameraDevice;
private CameraCaptureSession mPreviewSession;
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private Size mPreviewSize;
private Size mVideoSize;
private CaptureRequest.Builder mPreviewBuilder;
private MediaRecorder mMediaRecorder;
private static boolean mIsRecordingVideo;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
public static Boolean isCameraReady = false; // I add this variable in order to found when can I start recording
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
isCameraReady = true; // Here I set the Boolean to true
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
isCameraReady = false;
}
#Override
public void onError(CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
isCameraReady = false;
}
};
public void captureVideo()
{
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void openCamera(int width, int height) {
if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
requestVideoPermissions();
return;
}
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<Surface>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
isCameraReady = true;
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(getVideoFile(activity).getAbsolutePath());
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int orientation = ORIENTATIONS.get(rotation);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();
}
private void startRecordingVideo() {
try {
mIsRecordingVideo = true;
mMediaRecorder.start();
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + getVideoFile(activity),
Toast.LENGTH_SHORT).show();
}
startPreview();
}
}
Try this hope it works fine for you.
public class MainActivity extends AppCompatActivity {
private Uri fileUri;
public static final int MEDIA_TYPE_VIDEO = 2;
private static final int CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE = 200;
public static MainActivity ActivityContext =null;
public static TextView output;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityContext = this;
Button buttonRecording = (Button)findViewById(R.id.recording);
output = (TextView)findViewById(R.id.output);
// buttonRecording.setOnClickListener(new Button.OnClickListener() {
// #Override
// public void onClick(View arg0) {
// create new Intentwith with Standard Intent action that can be
// sent to have the camera application capture an video and return it.
Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
// create a file to save the video
fileUri = getOutputMediaFileUri(MEDIA_TYPE_VIDEO);
// set the image file name
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
// set the video image quality to high
intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1);
// start the Video Capture Intent
startActivityForResult(intent, CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE);
// }
// });
}
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type){
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// Check that the SDCard is mounted
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraVideo");
// Create the storage directory(MyCameraVideo) if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
output.setText("Failed to create directory MyCameraVideo.");
Toast.makeText(ActivityContext, "Failed to create directory MyCameraVideo.",
Toast.LENGTH_LONG).show();
Log.d("MyCameraVideo", "Failed to create directory MyCameraVideo.");
return null;
}
}
// Create a media file name
// For unique file name appending current timeStamp with file name
java.util.Date date= new java.util.Date();
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(date.getTime());
File mediaFile;
if(type == MEDIA_TYPE_VIDEO) {
// For unique video file name appending current timeStamp with file name
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// After camera screen this code will excuted
if (requestCode == CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
output.setText("Video File : " +data.getData());
// Video captured and saved to fileUri specified in the Intent
Toast.makeText(this, "Video saved to:" +
data.getData(), Toast.LENGTH_LONG).show();
} else if (resultCode == RESULT_CANCELED) {
output.setText("User cancelled the video capture.");
// User cancelled the video capture
Toast.makeText(this, "User cancelled the video capture.",
Toast.LENGTH_LONG).show();
} else {
output.setText("Video capture failed.");
// Video capture failed, advise user
Toast.makeText(this, "Video capture failed.",
Toast.LENGTH_LONG).show();
}
}
}
}

How to use Mediaprojection library in android to capture screen and convert into mp4 file?

Since android 5.0 they are providing mediaprojection library to capture screen content. but sample demo application provided by them is not clear. U can find sample app here. In that application they are projecting captured screen using virtualdisplay method
private void setUpVirtualDisplay() {
Log.i(TAG, "Setting up a VirtualDisplay: " +
mSurfaceView.getWidth() + "x" + mSurfaceView.getHeight() +
" (" + mScreenDensity + ")");
mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenCapture",
mSurfaceView.getWidth(), mSurfaceView.getHeight(), mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mSurface, null, null);
mButtonToggle.setText(R.string.stop);
}
I want to convert captured screen into mp4 file for my screen recording application. Please help me to get through this.
here is sample code refference from
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final int PERMISSION_CODE = 1;
private int mScreenDensity;
private MediaProjectionManager mProjectionManager;
private static final int DISPLAY_WIDTH = 480;
private static final int DISPLAY_HEIGHT = 640;
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaProjectionCallback mMediaProjectionCallback;
private ToggleButton mToggleButton;
private MediaRecorder mMediaRecorder;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mScreenDensity = metrics.densityDpi;
mMediaRecorder = new MediaRecorder();
initRecorder();
prepareRecorder();
mProjectionManager = (MediaProjectionManager) getSystemService
(Context.MEDIA_PROJECTION_SERVICE);
mToggleButton = (ToggleButton) findViewById(R.id.toggle);
mToggleButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
onToggleScreenShare(v);
}
});
mMediaProjectionCallback = new MediaProjectionCallback();
}
#Override
public void onDestroy() {
super.onDestroy();
if (mMediaProjection != null) {
mMediaProjection.stop();
mMediaProjection = null;
}
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != PERMISSION_CODE) {
Log.e(TAG, "Unknown request code: " + requestCode);
return;
}
if (resultCode != RESULT_OK) {
Toast.makeText(this,
"Screen Cast Permission Denied", Toast.LENGTH_SHORT).show();
mToggleButton.setChecked(false);
return;
}
mMediaProjection = mProjectionManager.getMediaProjection(resultCode, data);
mMediaProjection.registerCallback(mMediaProjectionCallback, null);
mVirtualDisplay = createVirtualDisplay();
mMediaRecorder.start();
}
public void onToggleScreenShare(View view) {
if (((ToggleButton) view).isChecked()) {
shareScreen();
} else {
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.v(TAG, "Recording Stopped");
stopScreenSharing();
initRecorder();
prepareRecorder();
}
}
private void shareScreen() {
if (mMediaProjection == null) {
startActivityForResult(mProjectionManager.createScreenCaptureIntent(), PERMISSION_CODE);
return;
}
mVirtualDisplay = createVirtualDisplay();
mMediaRecorder.start();
}
private void stopScreenSharing() {
if (mVirtualDisplay == null) {
return;
}
mVirtualDisplay.release();
//mMediaRecorder.release();
}
private VirtualDisplay createVirtualDisplay() {
return mMediaProjection.createVirtualDisplay("MainActivity",
DISPLAY_WIDTH, DISPLAY_HEIGHT, mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}
private class MediaProjectionCallback extends MediaProjection.Callback {
#Override
public void onStop() {
if (mToggleButton.isChecked()) {
mToggleButton.setChecked(false);
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.v(TAG, "Recording Stopped");
initRecorder();
prepareRecorder();
}
mMediaProjection = null;
stopScreenSharing();
Log.i(TAG, "MediaProjection Stopped");
}
}
private void prepareRecorder() {
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
private void initRecorder() {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(DISPLAY_WIDTH, DISPLAY_HEIGHT);
mMediaRecorder.setOutputFile("/sdcard/capture.mp4");
}
}
You're passing the Surface from a SurfaceView into createVirtualDisplay(). Replace that with the Surface from a MediaRecorder.
Look at this POST. There is a good explanation on how to use MediaProjection API to actually record the screen to an mp4 file on the external storage. This solution uses the MediaRecorder to store the video.
You can find another solution on the page of Matt Snider. There the MediaMuxer is used to store the video on to the external storage. But note that the output of the MediaMuxer is not streamable.

Categories

Resources