Media Recorder with Google Vision API - android

I am using the FaceTracker sample from the Android vision API. However, I am experiencing difficulty in recording videos while the overlays are drawn on them.
One way is to store bitmaps as images and process them using FFmpeg or Xuggler to merge them as videos, but I am wondering if there is a better solution to this problem if we can record video at runtime as the preview is projected.
Update 1:
I updated the following class with media recorder, but the recording is still not working. It is throwing the following error when I call triggerRecording() function:
MediaRecorder: start called in an invalid state: 4
and I have external storage permission in the Manifest file.
Update 2:
I have fixed the above issue in the code and moved the setupMediaRecorder() in the onSurfaceCreated callback. However, when I stop recording it throws the runtime-exception. According to the documentation if there is no video/audio data Runtime exception will be thrown.
So, what am I missing here?
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
mMediaRecorder = new MediaRecorder();
}
private void setUpMediaRecorder() throws IOException {
mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(Environment.getExternalStorageDirectory() + File.separator + Environment.DIRECTORY_DCIM + File.separator + System.currentTimeMillis() + ".mp4");
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(480, 640);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
//int rotation = mContext.getWindowManager().getDefaultDisplay().getRotation();
//int orientation = ORIENTATIONS.get(rotation);
mMediaRecorder.setOrientationHint(ORIENTATIONS.get(0));
mMediaRecorder.prepare();
mMediaRecorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
#Override
public void onError(MediaRecorder mr, int what, int extra) {
Timber.d(mr.toString() + " : what[" + what + "]" + " Extras[" + extra + "]");
}
});
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
surface.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
// setup the media recorder
try {
setUpMediaRecorder();
} catch (IOException e) {
e.printStackTrace();
}
try {
startIfReady();
} catch (IOException e) {
Timber.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int width = 320;
int height = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
width = size.getWidth();
height = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = width;
width = height;
height = tmp;
}
final int layoutWidth = right - left;
final int layoutHeight = bottom - top;
// Computes height and width for potentially doing fit width.
int childWidth = layoutWidth;
int childHeight = (int) (((float) layoutWidth / (float) width) * height);
// If height is too tall using fit width, does fit height instead.
if (childHeight > layoutHeight) {
childHeight = layoutHeight;
childWidth = (int) (((float) layoutHeight / (float) height) * width);
}
for (int i = 0; i < getChildCount(); ++i) {
getChildAt(i).layout(0, 0, childWidth, childHeight);
}
try {
startIfReady();
} catch (IOException e) {
Timber.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Timber.d(TAG, "isPortraitMode returning false by default");
return false;
}
private void startRecordingVideo() {
try {
// Start recording
mMediaRecorder.start();
mIsRecordingVideo = true;
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
}
public void triggerRecording() {
if (mIsRecordingVideo) {
stopRecordingVideo();
Timber.d("Recording stopped");
} else {
startRecordingVideo();
Timber.d("Recording starting");
}
}
}

Solution 1: From Android Lollipop, a MediaProjection API was introduced which in conjunction with MediaRecorder can be used to save a SurfaceView to a video file. This example shows how to output a SurfaceView to a video file.
Solution 2: Alternatively, you can use one of the neat Encoder classes provided in the Grafika repository. Note that this will require you to port the FaceTracker application so that it is using OpenGL to perform all rendering. This is because Grafika samples utilise the OpenGL pipeline for fast read and write of texture data.
There is a minimal example which achieves exactly what you want using a CircularEncoder in the ContinuousCaptureActivity class. This provides an example of Frame Blitting, simultaneously displaying frame buffer data to the screen and outputting to a video.
The major change would be to utilise a Grafika WindowSurface instead of a SurfaceView for the FaceTracker application, this sets up the EGL Context allowing you to save frame buffer data to a file via the Encoder. Once you can render everything to the WindowSurface, it is trivial to set up recording in the same way as the ContinuousCaptureActivity class.

Related

Video recording using camera 2 for Portrait and Landscape in Android

I am implementing video recording using Camera 2 API. Video is getting recorded and uploaded, playing also.
But it works fine in Portrait mode. When I am recording in landscape mode, it is not showing in Landscape. Orientation is a bit odd and is playing reversed.
My code snippet is here:
public class Camera2VideoFragment extends Fragment implements View.OnClickListener {
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private boolean orientation = true;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Testing";
boolean flag = false;
private static final String VIDEO_DIRECTORY_NAME = "Powerconsent";
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* An {#link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* Button to record video
*/
private ImageView mButtonVideo;
/**
* Button to record video
*/
private ImageView mSaveVideo;
private File mOutputFile;
/**
* A refernce to the opened {#link android.hardware.camera2.CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* A reference to the current {#link android.hardware.camera2.CameraCaptureSession} for preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {#link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {#link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
Log.d("Testing", "onSurfaceTextureSizeChanged");
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
Log.d("Testing", "onSurfaceTextureDestroyed");
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
// Log.d("Testing", "onSurfaceTextureUpdated");
}
};
/**
* The {#link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
String destVideoPath = null;
/**
* The {#link android.util.Size} of video recording.
*/
private Size mVideoSize;
PowerManager.WakeLock wl = null;
/**
* Camera preview.
*/
private CaptureRequest.Builder mPreviewBuilder;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
private File mCurrentFile;
Chronometer mChronometer;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
private Integer mSensorOrientation;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {#link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {#link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
public static Camera2VideoFragment newInstance() {
Camera2VideoFragment fragment = new Camera2VideoFragment();
// fragment.setRetainInstance(true);
return fragment;
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes larger
* than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* #param choices The list of available sizes
* #return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
private void screenAlive() {
PowerManager pm = (PowerManager)getActivity().getSystemService(Context.POWER_SERVICE);
wl = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Testing:");
wl.acquire(120*60*1000L);
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<Size>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
private int currentOrientation = -1;
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Log.d("Testing","onCreateView !!!");
View view = inflater.inflate(R.layout.fragment_camera2_video, container, false);
RelativeLayout frameLayout = (RelativeLayout) view.findViewById(R.id.toplayout);
mChronometer = new Chronometer(getActivity());
// RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams) frameLayout.getLayoutParams();//new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
// layoutParams.addRule(RelativeLayout.CENTER_HORIZONTAL);
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
layoutParams.setMargins(50, 50, 0, 0);
mChronometer.setLayoutParams(layoutParams);
Typeface font = Typeface.createFromAsset(getActivity().getAssets(), "fonts/psans.ttf");
mChronometer.setTypeface(font, Typeface.NORMAL);
mChronometer.setTextSize(20);
mChronometer.setGravity(Gravity.CENTER_HORIZONTAL);
// mChronometer.setBackgroundColor(Color.BLACK);
mChronometer.setTextColor(Color.WHITE);
frameLayout.addView(mChronometer);
screenAlive();
return view;
}
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (ImageView) view.findViewById(R.id.mRecordVideo);
mButtonVideo.setOnClickListener(this);
mSaveVideo = (ImageView) view.findViewById(R.id.mSaveVideo);
mSaveVideo.setOnClickListener(this);
// view.findViewById(R.id.info).setOnClickListener(this);
}
#Override
public void onResume() {
super.onResume();
Log.d("Testing","onResume !!!");
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.mRecordVideo: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.start();
startRecordingVideo();
}
break;
}
case R.id.mSaveVideo:
File destinationPath = new File(String.valueOf(getActivity().getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS)));
File file = new File(destinationPath.getAbsolutePath());
AppLogger.d( "Video destination Path:: "+file.toString()+ " "+Environment.getExternalStorageDirectory());
mOutputFile = getCurrentFile();
Log.d("Testing", "Output file path:: "+mOutputFile.getAbsolutePath());
// notificationBuilder.setProgress(PROGRESS_MAX, PROGRESS_CURRENT, false);
// notificationManager.notify(notificationID, notificationBuilder.build());
Uri videoURI = FileProvider.getUriForFile(getActivity(),
APPLICATION_ID + ".provider", mOutputFile);
Intent service = new Intent(getActivity(), VideoCompressionService.class);
// Add extras to the bundle
service.putExtra("videouri", videoURI);
// service.putExtra("destVideoPath", destVideoPath);
service.putExtra("destVideoPath", mOutputFile.getPath());
// Start the service
getActivity().startService(service);
getActivity().finish();
break;
}
}
protected File getCurrentFile() {
return mCurrentFile;
}
/**
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Tries to open a {#link CameraDevice}. The result is listened by `mStateCallback`.
*/
private void openCamera(int width, int height) {
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
Log.d("Testing", "Sensor Orientation: "+mSensorOrientation);
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
Log.d("Testing", "normal Orientation: "+orientation);
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
new ErrorDialog().show(getFragmentManager(), "dialog");
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
}
// catch (InterruptedException e) {
catch (Exception e) {
Log.d(TAG, "exception:: "+e.getMessage());
// throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<Surface>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mCurrentFile = getVideoFile(activity);
mMediaRecorder.setOutputFile(getVideoFile(activity).getAbsolutePath());
mMediaRecorder.setVideoEncodingBitRate(2000000);
// mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
Log.d("Testing", "Captured rate::: 15");
// mMediaRecorder.setCaptureRate(15.03);
mMediaRecorder.setVideoSize(1280, 720);
// mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Log.d("Testing", "rotation value:: "+rotation);
int orientation = ORIENTATIONS.get(rotation);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
private File getVideoFile(Context context) {
return getOutputMediaFile();
}
private File getOutputMediaFile() {
// External sdcard file location
File mediaStorageDir = new File(String.valueOf(getActivity().getExternalFilesDir(Environment.DIRECTORY_PICTURES)));
// new File(getE);
// Create storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
// Log.d(TAG, "Oops! Failed create "+ VIDEO_DIRECTORY_NAME + " directory");
return null;
}
}
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
File mediaFile;
//String vidFileName = Prefs.getString("docid", "null")+"_"+ timeStamp;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ Prefs.getString("docid", "null") + "_"+ timeStamp + ".mp4");
return mediaFile;
}
private void startRecordingVideo() {
try {
// UI
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
mButtonVideo.setImageResource(R.drawable.ic_stop);
mSaveVideo.setVisibility(View.GONE);
}
});
mIsRecordingVideo = true;
// Start recording
if (flag == true){
closePreviewSession();
startPreview();
// setUpMediaRecorder();
// Log.d(TAG, "Media record stopped, setting up again");
// if (mTextureView.isAvailable()) {
// openCamera(mTextureView.getWidth(), mTextureView.getHeight());
// } else {
// mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
// }
// startPreview();
// setUpMediaRecorder();
}
mMediaRecorder.start();
} catch (Exception e) {
Log.d(TAG, "Exception:: "+e.getMessage());
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mChronometer.stop();
mButtonVideo.setImageResource(R.drawable.ic_record);
mSaveVideo.setVisibility(View.VISIBLE);
try {
mMediaRecorder.stop();
mMediaRecorder.reset();
flag = true;
}
catch (Exception e) {
Log.d(TAG, "media recorder released "+e.getMessage());
e.printStackTrace();
}
}
/**
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage("This device doesn't support Camera2 API.")
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
}
The camera preview and recording is done in a fragment. Can some one please check and help me here.
Thanks,
Arindam.
You're setting MediaRecorder.setOrientationHint() based on purely the display orientation of the device.
If you want it to be based also on how the device is oriented in the real world (so that down in the video is played back as down), you need to add more code to listen to orientation sensors of the device.
You can see code that should work for MediaRecorder as well in the reference docs of JPEG orientation in the camera API:
https://developer.android.com/reference/android/hardware/camera2/CaptureRequest?hl=en#JPEG_ORIENTATION
which uses OrientationEventListener
private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Round device orientation to a multiple of 90
deviceOrientation = (deviceOrientation + 45) / 90 * 90;
// Reverse device orientation for front-facing cameras
boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
if (facingFront) deviceOrientation = -deviceOrientation;
// Calculate desired JPEG orientation relative to camera orientation to make
// the image upright relative to the device orientation
int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
return jpegOrientation;
}
I added the below code. Still it did not solve.
private int getVideoOrientation(CameraCharacteristics c, int deviceOrientation) {
if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Round device orientation to a multiple of 90
deviceOrientation = (deviceOrientation + 45) / 90 * 90;
// Reverse device orientation for front-facing cameras
boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
if (facingFront) deviceOrientation = -deviceOrientation;
// Calculate desired JPEG orientation relative to camera orientation to make
// the image upright relative to the device orientation
int videoOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
return videoOrientation;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
// Choose the sizes for camera preview and video recording
int orientation = getVideoOrientation(characteristics, rotation); // getResources().getConfiguration().orientation);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();

Face Tracker CameraSource Android: How to brighten front camera quality?

Face Tracker app based on Google Vision Face Tracker. By default, Face Tracker use rear/back camera, but I want to detect faces with front camera.
This is the code for CameraSourcePreview that google vision provide:
package com.google.android.gms.samples.vision.face.facetracker.ui.camera;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.CameraSource;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
public void start(CameraSource cameraSource) throws IOException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
private void startIfReady() throws IOException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
#Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int width = 640;
int height = 480;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
width = size.getWidth();
height = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = width;
width = height;
height = tmp;
}
final int layoutWidth = right - left;
final int layoutHeight = bottom - top;
// Computes height and width for potentially doing fit width.
int childWidth = layoutWidth;
int childHeight = (int)(((float) layoutWidth / (float) width) * height);
// If height is too tall using fit width, does fit height instead.
if (childHeight > layoutHeight) {
childHeight = layoutHeight;
childWidth = (int)(((float) layoutHeight / (float) height) * width);
}
for (int i = 0; i < getChildCount(); ++i) {
getChildAt(i).layout(0, 0, childWidth, childHeight);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
I call camera source with this method:
private void startCameraSource() {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource, mGraphicOverlay);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
Face Tracker front camera still too dark compare with default phone camera app.
How to brighten front camera in face tracker google vision? Is it related with surface view?
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview
android:id="#+id/preview"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1.00"
android:weightSum="1">
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay
android:id="#+id/faceOverlay"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="0.79" />
</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview>
It is not related with the SurfaceView at all. It is a Camera API misconfiguration. You will have to make some additional changes inside your CameraSource.java file.
You can find it on this GitHub repository
First, you need to know that it is an Exposure Problem. It refers to the light the camera allows to receive on the lens. You need to know if your Camera supports Exposure Compensation. You will have to query getMinExposureCompensation() and getMaxExposureCompensation() from your Camera.Parameters instance. As the documentation explains, if both methods return 0, exposure compensation is not supported and there's nothing you can do.
Luckily most of the time this characteristic is supported by all phones. Now you can check the current camera exposure by calling getExposureCompensation() which will return the default value (usually 0, which means exposure is not adjusted). Now to prevent dark images, you only have to set your new exposure between the min and max values using setExposureCompensation() and apply the Camera.Parameters to your camera.
Finally, you can Lock the Exposure to avoid losing the configuration using setAutoExposureLock(), getAutoExposureLock() and most important: before setting the Exposure Lock you must be sure that isAutoExposureLockSupported() returned true.
Good Luck!

How I can change to FRONT FACING CAMERA in this library?

I'm trying to use the library QRCodeReaderView from dlazaro66 to read QR Codes, but I'm facing some problems, the first is that in phones like Nexus 5X the image is shown upside down, solved with a new method added to the library, but now I need to change the selected camera to the FRONT because default is the BACK/REAR camera. How I can achieve this?
The code of the class is the next, thanks.
public class QRCodeReaderView extends SurfaceView implements SurfaceHolder.Callback,Camera.PreviewCallback {
public interface OnQRCodeReadListener {
public void onQRCodeRead(String text, PointF[] points);
public void cameraNotFound();
public void QRCodeNotFoundOnCamImage();
}
private OnQRCodeReadListener mOnQRCodeReadListener;
private static final String TAG = QRCodeReaderView.class.getName();
private QRCodeReader mQRCodeReader;
private int mPreviewWidth;
private int mPreviewHeight;
private SurfaceHolder mHolder;
private CameraManager mCameraManager;
public QRCodeReaderView(Context context) {
super(context);
init();
}
public QRCodeReaderView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public void setOnQRCodeReadListener(OnQRCodeReadListener onQRCodeReadListener) {
mOnQRCodeReadListener = onQRCodeReadListener;
}
public CameraManager getCameraManager() {
return mCameraManager;
}
#SuppressWarnings("deprecation")
private void init() {
if (checkCameraHardware(getContext())){
mCameraManager = new CameraManager(getContext());
mHolder = this.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // Need to set this flag despite it's deprecated
} else {
Log.e(TAG, "Error: Camera not found");
if (mOnQRCodeReadListener != null) {
mOnQRCodeReadListener.cameraNotFound();
}
}
}
/****************************************************
* SurfaceHolder.Callback,Camera.PreviewCallback
****************************************************/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
// Indicate camera, our View dimensions
mCameraManager.openDriver(holder,this.getWidth(),this.getHeight());
} catch (IOException e) {
Log.w(TAG, "Can not openDriver: "+e.getMessage());
mCameraManager.closeDriver();
}
try {
mQRCodeReader = new QRCodeReader();
mCameraManager.startPreview();
} catch (Exception e) {
Log.e(TAG, "Exception: " + e.getMessage());
mCameraManager.closeDriver();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed");
mCameraManager.getCamera().setPreviewCallback(null);
mCameraManager.getCamera().stopPreview();
mCameraManager.getCamera().release();
mCameraManager.closeDriver();
}
// Called when camera take a frame
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
PlanarYUVLuminanceSource source = mCameraManager.buildLuminanceSource(data, mPreviewWidth, mPreviewHeight);
HybridBinarizer hybBin = new HybridBinarizer(source);
BinaryBitmap bitmap = new BinaryBitmap(hybBin);
try {
Result result = mQRCodeReader.decode(bitmap);
// Notify we found a QRCode
if (mOnQRCodeReadListener != null) {
// Transform resultPoints to View coordinates
PointF[] transformedPoints = transformToViewCoordinates(result.getResultPoints());
mOnQRCodeReadListener.onQRCodeRead(result.getText(), transformedPoints);
}
} catch (ChecksumException e) {
Log.d(TAG, "ChecksumException");
e.printStackTrace();
} catch (NotFoundException e) {
// Notify QR not found
if (mOnQRCodeReadListener != null) {
mOnQRCodeReadListener.QRCodeNotFoundOnCamImage();
}
} catch (FormatException e) {
Log.d(TAG, "FormatException");
e.printStackTrace();
} finally {
mQRCodeReader.reset();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged");
if (mHolder.getSurface() == null){
Log.e(TAG, "Error: preview surface does not exist");
return;
}
//preview_width = width;
//preview_height = height;
mPreviewWidth = mCameraManager.getPreviewSize().x;
mPreviewHeight = mCameraManager.getPreviewSize().y;
mCameraManager.stopPreview();
mCameraManager.getCamera().setPreviewCallback(this);
mCameraManager.getCamera().setDisplayOrientation(90); // Portrait mode
// Fix the camera sensor rotation
setCameraDisplayOrientation(this.getContext(), mCameraManager.getCamera());
mCameraManager.startPreview();
}
/**
* Transform result to surfaceView coordinates
*
* This method is needed because coordinates are given in landscape camera coordinates.
* Now is working but transform operations aren't very explained
*
* TODO re-write this method explaining each single value
*
* #return a new PointF array with transformed points
*/
private PointF[] transformToViewCoordinates(ResultPoint[] resultPoints) {
PointF[] transformedPoints = new PointF[resultPoints.length];
int index = 0;
if (resultPoints != null){
float previewX = mCameraManager.getPreviewSize().x;
float previewY = mCameraManager.getPreviewSize().y;
float scaleX = this.getWidth()/previewY;
float scaleY = this.getHeight()/previewX;
for (ResultPoint point :resultPoints){
PointF tmppoint = new PointF((previewY- point.getY())*scaleX, point.getX()*scaleY);
transformedPoints[index] = tmppoint;
index++;
}
}
return transformedPoints;
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
}
else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)){
// this device has a front camera
return true;
}
else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)){
// this device has any camera
return true;
}
else {
// no camera on this device
return false;
}
}
/**
* Fix for the camera Sensor no some devices (ex.: Nexus 5x)
* https://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
*/
#SuppressWarnings("deprecation")
public static void setCameraDisplayOrientation(Context context, android.hardware.Camera camera) {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.GINGERBREAD) {
Camera.CameraInfo info = new Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(0, info);
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = windowManager.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
}
}
You must change CameraManager.openCamera() method.
version 2.0.0 and above now have this functionality. Please update your library to latest https://github.com/dlazaro66/QRCodeReaderView#add-it-to-your-project
and use this 2 methods to choose what camera do you want to open:
mydecoderview.setFrontCamera();
mydecoderview.setBackCamera();
you can also specify a camera using an ID
public void setPreviewCameraId(int cameraId)

Imageformat set in surfaceChanged changes to something else in onPreviewFrame

Below attached is the code I am trying. The CamPreview class is being used by the launcher activity. I was able to get preview fine before implementing PreviewCallback. When i try PreviewCallback by implementing onPreviewFrame, i am totally confused how it works internally. Below are the following things that are confusing me. Kindly clarify them.
1) Though i set camera parameters like ImageFormat and Previewsize, they don't seem to persist till the invokation of method onPreviewFrame. For example, the Log.i statements in surfaceChanged method (called immediately atleast once after surfaceCreated as per my understanding) prints the preview size as 1056x864. However, onPreviewFrame reports that the preview size as 1920x1080.
Even the picture format changes from NV21(17 in surfaceChanged) to JPEG(256 in onPreviewFrame).
I have verified and confirmed that the Camera instance passed to onPreviewFrame is same as the member variable mCamera declared in CamPreview class.
If i am able to successfully get preview format as NV21 in onPreviewFrame, how do i convert that to ARGB format ? I have tried the methods posted in stackoverflow but the data passed to onPreviewFrame fails due to index out of bounds, which lead to me to check the image formats in the first place. If anyone has tried something similar, pls do let me know what was i missing during the creation that is causing this mess :(.
I have tried to create bitmap by initially creating YuvImage from byte[] passed to onPreviewFrame which gave me green latern images(all green or garbage some times)!
2) You can see other Log.i stmts next to the ones i mentioned in point (1). They print out bits per pixel and bytes per pixel information of the preview in the methods surfaceChanged and onPreviewFrame. They turn out to be 12 and 1 respectively. How is that even possible ? Again, this could be a side effect of what is happening in (1)
public class CamPreview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CamPreview";
private SurfaceHolder mHolder;
private Camera mCamera;
private byte[] mVideoSource;
private Bitmap mBackBuffer;
private Paint mPaint;
private Context mContext;
public CamPreview(Context context) {
super(context);
mContext = context;
mCamera = getCameraInstance();
mHolder = getHolder();
mHolder.addCallback(this);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setDisplayOrientation(90);
mCamera.setPreviewDisplay(null);
mCamera.setPreviewCallbackWithBuffer(this);
this.setWillNotDraw(false);
Log.i(TAG, "#SurfaceCreated: initilization finished");
} catch (IOException eIOException) {
Log.i(TAG, "Error setting camera preview: " + eIOException.getMessage());
throw new IllegalStateException();
}
}
private Size findBestResolution(int pWidth, int pHeight) {
List<Size> lSizes = mCamera.getParameters().getSupportedPreviewSizes();
Size lSelectedSize = mCamera.new Size(0, 0);
for (Size lSize : lSizes) {
if ((lSize.width <= pWidth)
&& (lSize.height <= pHeight)
&& (lSize.width >= lSelectedSize.width)
&& (lSize.height >= lSelectedSize.height)) {
lSelectedSize = lSize;
}
}
if ((lSelectedSize.width == 0)
|| (lSelectedSize.height == 0)) {
lSelectedSize = lSizes.get(0);
}
return lSelectedSize;
}
private void createBuffers(String caller, Size prefSize) {
Camera.Parameters camParams = mCamera.getParameters();
int previewWidth = prefSize.width;
int previewHeight = prefSize.height;
mBackBuffer = Bitmap.createBitmap(previewWidth,
previewHeight,
Bitmap.Config.ARGB_8888);
Log.i(TAG,"#"+caller+": Piture Width " + Integer.toString(previewWidth));
Log.i(TAG,"#"+caller+": Piture Height " + Integer.toString(previewHeight));
Log.i(TAG,"#"+caller+": Piture format " + Integer.toString(ImageFormat.NV21));
camParams.setPreviewSize(previewWidth,previewHeight);
camParams.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(camParams);
PixelFormat pxlFrmt = new PixelFormat();
PixelFormat.getPixelFormatInfo(camParams.getPreviewFormat(), pxlFrmt);
Log.i(TAG,"#"+caller+": Bits per pixel " + Integer.toString(pxlFrmt.bitsPerPixel));
Log.i(TAG,"#"+caller+": Bytes per pixel " + Integer.toString(pxlFrmt.bytesPerPixel));
int sz = previewWidth * previewHeight * pxlFrmt.bitsPerPixel/8;
mVideoSource = new byte[sz];
mCamera.addCallbackBuffer(mVideoSource);
Log.i(TAG, "#"+caller+": backbuffer initilization finished");
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
Log.i(TAG, "#SurfaceCreated: preview started");
} catch (Exception e){
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
Log.i(TAG,"No proper holder");
return;
}
try {
mCamera.stopPreview();
} catch (Exception e){
Log.i(TAG,"tried to stop a non-existent preview");
return;
}
createBuffers("surfaceChanged",findBestResolution(w, h));
}
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i(TAG,"#onPreviewFrame: Invoked");
Camera.Parameters params = camera.getParameters();
Camera.Size camSize = params.getPictureSize();
int w = camSize.width;
int h = camSize.height;
Log.i(TAG,"#onPreviewFrame: Piture Width " + Integer.toString(w));
Log.i(TAG,"#onPreviewFrame: Piture Height " + Integer.toString(h));
Log.i(TAG,"#onPreviewFrame: Piture format " + Integer.toString(params.getPictureFormat()));
PixelFormat pxlFrmt = new PixelFormat();
PixelFormat.getPixelFormatInfo(params.getPreviewFormat(), pxlFrmt);
Log.i(TAG,"#onPreviewFrame: Bits per pixel " + Integer.toString(pxlFrmt.bitsPerPixel));
Log.i(TAG,"#onPreviewFrame: Bytes per pixel " + Integer.toString(pxlFrmt.bytesPerPixel));
mBackBuffer = BitmapFactory.decodeByteArray(data, 0, data.length);
Log.i(TAG,"#onPreviewFrame: Back buffer set.");
invalidate();
}
#Override
protected void onDraw(Canvas pCanvas) {
super.onDraw(pCanvas);
Log.i(TAG,"#onDraw: Invoked");
if (mCamera != null) {
Log.i(TAG,"#onDraw: Bbefore draw call to canvas");
pCanvas.drawBitmap(mBackBuffer, 0, 0, mPaint);
mCamera.addCallbackBuffer(mVideoSource);
Log.i(TAG,"#onDraw: Draw finished");
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
/** A safe way to get an instance of the Camera object. */
private Camera getCameraInstance(){
Camera c = null;
if(checkCameraHardware(mContext)) {
try {
Log.i(TAG, "Trying to open the camera");
c = Camera.open(0);
Log.i(TAG, "Camera opened successfully.");
}
catch (Exception e){
Log.i(TAG, e.getMessage());
}
}
return c;
}
private void releaseCamera(){
if (mCamera != null){
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.stopPreview();
releaseCamera();
mVideoSource = null;
mBackBuffer = null;
}
}
}
Ok, Figured out couple of things after careful reiteration of my reading through the android docs. Apparently preview[Size|Format] is completely different from picture[Size/Format], which i assumed to be one and the same earlier. So, that fixed my rendering issues and crashes due incorrect data format. It also clarifed my confusion of change in camera parameters automatically.
The whole example is working now. However, i am seeing two layers of preview, the one which directly rendered by the camera, and the one i am rendering though onDraw. I am not sure whether i should see both of them or not. Below is the fixed code.
Thank you if anyone who might have spent time on this. Now, i will work on moving the whole onPreviewFrame logic to native code to speed up things! :)
public class CamPreview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CamPreview";
private static final int mPreviewWidth = 1280;
private static final int mPreviewHeight = 720;
private static final int mFPSXOff = 72;
private static final int mFPSYOff = 72;
private static final int mFPSSize = 64;
private float mTotalTime;
private float mFrameCount;
private String mFPS;
private long mStart;
private Context mContext;
private SurfaceHolder mHolder;
private Camera mCamera;
private byte[] mVideoSource;
private Bitmap mBackBuffer;
private Paint mPaint;
public CamPreview(Context context) {
super(context);
mContext = context;
mHolder = getHolder();
mCamera = getCameraInstance();
mHolder.addCallback(this);
mFrameCount = 0;
mTotalTime = 0;
mFPS = "0 FPS";
mStart = 0;
mPaint = new Paint();
mPaint.setColor(0xFFFF0000);
mPaint.setTextSize(mFPSSize);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(null);
mCamera.setPreviewCallbackWithBuffer(this);
this.setWillNotDraw(false);
} catch (IOException eIOException) {
Log.i(TAG, "Error setting camera preview: " + eIOException.getMessage());
throw new IllegalStateException();
}
}
private Size findBestResolution(int pWidth, int pHeight) {
List<Size> lSizes = mCamera.getParameters().getSupportedPreviewSizes();
Size lSelectedSize = mCamera.new Size(0, 0);
for (Size lSize : lSizes) {
if ((lSize.width <= pWidth)
&& (lSize.height <= pHeight)
&& (lSize.width >= lSelectedSize.width)
&& (lSize.height >= lSelectedSize.height)) {
lSelectedSize = lSize;
}
}
if ((lSelectedSize.width == 0)
|| (lSelectedSize.height == 0)) {
lSelectedSize = lSizes.get(0);
}
return lSelectedSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null) {
Log.i(TAG,"No proper holder");
return;
}
try {
mCamera.stopPreview();
} catch (Exception e) {
Log.i(TAG,"tried to stop a non-existent preview");
return;
}
PixelFormat pxlFrmt = new PixelFormat();
Camera.Parameters camParams = mCamera.getParameters();
Size previewSize = findBestResolution(w, h);
int previewWidth = previewSize.width;
int previewHeight = previewSize.height;
camParams.setPreviewSize(previewWidth,previewHeight);
camParams.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(camParams);
mCamera.setDisplayOrientation(90);
mBackBuffer = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
PixelFormat.getPixelFormatInfo(camParams.getPreviewFormat(), pxlFrmt);
int sz = previewWidth * previewHeight * pxlFrmt.bitsPerPixel/8;
mVideoSource = new byte[sz];
mCamera.addCallbackBuffer(mVideoSource);
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
Log.i(TAG, "#SurfaceChanged: preview started");
} catch (Exception e){
Log.d(TAG, "#SurfaceChanged:Error starting camera preview: " + e.getMessage());
}
mFrameCount = 0;
mTotalTime = 0;
mStart = SystemClock.elapsedRealtime();
}
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i(TAG,"#onPreviewFrame: Invoked");
Camera.Parameters params = camera.getParameters();
Camera.Size camSize = params.getPreviewSize();
int w = camSize.width;
int h = camSize.height;
PixelFormat pxlFrmt = new PixelFormat();
PixelFormat.getPixelFormatInfo(params.getPreviewFormat(), pxlFrmt);
try {
YuvImage yuv = new YuvImage(data,ImageFormat.NV21,w,h,null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuv.compressToJpeg(new Rect(0,0,w,h), 100, baos);
byte[] jpgData = baos.toByteArray();
mBackBuffer = BitmapFactory.decodeByteArray(jpgData, 0, jpgData.length);
} catch (Exception e) {
;
}
Log.i(TAG,"#onPreviewFrame: Backbuffer set.");
postInvalidate();
mFrameCount++;
long end = SystemClock.elapsedRealtime();
mTotalTime += (end-mStart);
mStart = end;
mFPS = Float.toString((1000*mFrameCount/mTotalTime))+" fps";
}
#Override
protected void onDraw(Canvas pCanvas) {
Log.i(TAG,"#onDraw: Invoked");
if (mCamera != null) {
if(mBackBuffer==null) {
Log.i(TAG, "Back buffer is null :((((((( ");
} else {
pCanvas.drawBitmap(mBackBuffer, 0, 0, null);
pCanvas.drawText(mFPS, mFPSXOff, mFPSYOff, mPaint);
mCamera.addCallbackBuffer(mVideoSource);
}
}
}
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
return true;
} else {
return false;
}
}
private Camera getCameraInstance(){
Camera c = null;
if(checkCameraHardware(mContext)) {
try {
c = Camera.open(0);
Log.i(TAG, "Camera opened successfully");
Camera.Parameters params = c.getParameters();
params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize(mPreviewWidth, mPreviewHeight);
c.setParameters(params);
Log.i(TAG, "NV21 format set to camera with resolution 1280x720");
}
catch (Exception e){
Log.i(TAG, e.getMessage());
}
}
return c;
}
private void releaseCamera(){
if (mCamera != null){
mCamera.release();
Log.i(TAG,"#releaseCamera:");
mCamera = null;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
releaseCamera();
mVideoSource = null;
mBackBuffer = null;
}
}
}

Camera preview slowing down on re-adding callback buffer in onPreviewFrame()

I'm trying to capture frames from a camera preview so that I can do some image processing on the frames in the background while the user sees the camera preview.
For that, I'm adding 60 buffers initially using addCallbackBuffer() in surfaceChanged() method of the SurfaceView and then on each onPreviewFrame() call, I'm re-adding the used buffer.
The problem is re-adding the buffer in onPreviewFrame() slows down the preview.
I'm also counting the number of calls to onPreviewFrame() every second. In the first second, I'm getting more than 70 calls to onPreviewFrame() which decreases to less than 25 in the second second and later.
Here is the code
public class MySurfaceView extends SurfaceView implements
SurfaceHolder.Callback, Camera.PreviewCallback {
private static final int BUFFER_COUNT = 60;
private SurfaceHolder mHolder;
private Camera mCamera;
private boolean isPreviewRunning;
private final FPSCounter fpscounter = new FPSCounter();
private int frameWidth, frameHeight;
private byte[] prevFrameByteArr, currFrameByteArr;
public MySurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
mHolder = getHolder();
mHolder.addCallback(this);
}
public byte[] getPrevFrameByteArray() {
return prevFrameByteArr;
}
public byte[] getCurrFrameByteArray() {
return currFrameByteArr;
}
public int getFrameRate() {
return fpscounter.getLastFrameCount();
}
public int getFrameWidth() {
return frameWidth;
}
public int getFrameHeight() {
return frameHeight;
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (this) {
prevFrameByteArr = currFrameByteArr;
currFrameByteArr = data;
}
mCamera.addCallbackBuffer(data);
fpscounter.logFrame();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
synchronized (this) {
if (isPreviewRunning)
mCamera.stopPreview();
Camera.Parameters parameters = mCamera.getParameters();
parameters.setRecordingHint(true);
parameters.setPreviewFormat(ImageFormat.NV21);
/* To get better frame rate, get the least resolution that matches the current aspect ratio */
List<Size> sizes = parameters.getSupportedPreviewSizes();
Size currPreviewSize = parameters.getPreviewSize();
float ar = (float) (Math.floor(((float) currPreviewSize.width / currPreviewSize.height) * 10) / 10);
for (Size s : sizes) {
int w = s.width, h = s.height;
float resAr = (float) (Math.floor(((float) w / h) * 10) / 10);
if (ar == resAr) {
this.frameWidth = w;
this.frameHeight = h;
parameters.setPreviewSize(w, h);
currPreviewSize = s;
for (int i = 0; i < BUFFER_COUNT; i++) {
byte[] buffer = new byte[w * h *
ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8];
mCamera.addCallbackBuffer(buffer);
}
break;
}
}
mCamera.setParameters(parameters);
try {
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.startPreview();
isPreviewRunning = true;
} catch (IOException e) {
e.printStackTrace();
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
setWillNotDraw(true);
mCamera = Camera.open();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized (this) {
try {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
isPreviewRunning = false;
}
} catch (Exception e) {
Log.e("cam error", e.getMessage());
}
}
}
}
and the FPSCounter class
private long startTime;
private int frames, lastFrameCount;
public void logFrame() {
frames++;
if (System.nanoTime() - startTime >= 1000000000) {
lastFrameCount = frames;
frames = 0;
startTime = System.nanoTime();
}
}
public int getLastFrameCount() {
return lastFrameCount;
}
Anybody know how this could be resolved?
I have not seen Android devices that reliably deliver more than 30 FPS. But one caveat that may cause slowdown is when onPreviewFrame() arrives on the main (UI) thread, and thus competes for time with UI events like touch, layout, or even rendering. Please see how you can painlessly offload the preview callbacks to secondary thread: https://stackoverflow.com/a/19154438/192373.
At any rate, pre-allocation of 60 buffers smells wrong. If you capture preview frames for more than a second, you must process and recycle the frames in real time. So, 3 buffers should be enough: one is processed by your program, one is free and can be locked by the camera at any moment, and one is locked by the camera and receives the current frame.

Categories

Resources