Capturing image using camera2 api - android

I am working in an application in which I need to capture images in LANDSCAPE mode. The functionality is working fine but the problem I am getting is, when ever the Image is captured, I am getting extra side of captured image. I am getting this problem when I am using transform method (Code is below). Please tell me how do I remove extra side of image. Please look at the images below
1) Before Capturing Image
2) After Capturing Image
Code for camera landscape mode
private void transformImage(int viewWidth, int viewHeight) {
if (null == textureView || null == previewSize) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
textureView.setTransform(matrix);
}
Code for Capturing Images
void getPicture() {
if (cameraDevice == null) {
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
assert manager != null;
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640, height = 480;
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder capturebuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
capturebuilder.addTarget(reader.getSurface());
capturebuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
capturebuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATION.get(rotation));
ImageReader.OnImageAvailableListener imageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (Exception ee) {
Log.e("ImageReader Error ", ee.getMessage());
} finally {
if (image != null)
image.close();
}
}
void save(byte[] bytes) {
File file12 = getOutputMediaFile();
OutputStream outputStream = null;
try {
assert file12 != null;
//file12 = new Compressor(VehicleEpaCamera.this).compressToFile(file12);
outputStream = new FileOutputStream(file12);
outputStream.write(bytes);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (outputStream != null)
outputStream.close();
} catch (Exception e) {
}
}
}
};
reader.setOnImageAvailableListener(imageAvailableListener, handler);
previewSSession = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
try {
session.stopRepeating();
} catch (CameraAccessException e) {
Toast.makeText(VehicleEpaCamera.this, e.getMessage(), Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
if (bytes != null) {
callCloudVision(bitmap, feature);
} else {
Toast.makeText(VehicleEpaCamera.this, "Something went wrong", Toast.LENGTH_SHORT).show();
getPicture();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(capturebuilder.build(), previewSSession, handler);
} catch (Exception e) {
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, handler);
} catch (Exception e) {
}
}

Related

Camera 2 API reduces the quality after capture

I am using Camera in my app to take pictures of ID cards, I have a rectangular overlay to which image will be cropped. issue is that the image quality is reduced once the image is captured.
I am unable to figure out where exactly it is happening. In cutImage method, I am cutting the image but I don't think I am doing anything to the resolution of the image there.
Can any one suggest where the quality might be going down.
takePicture is called when the user clicks to take the picture.
Once the picture is taken there is a button 'use picture' that is when usePicture is called.
cutImage method is used to crop the image based on the preview.
any suggestions on how to stop the resolution from going down will be very very helpful
protected void takePicture() {
Log.e(TAG, "takePicture started");
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
try {
ImageReader reader = ImageReader.newInstance(textureViewWidth, textureViewHeight, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
takenPictureBytes = bytes;
Log.d(TAG, "takenPictureBytes length - " + takenPictureBytes.length);
} catch (Exception e) {
Log.d(TAG, " onImageAvailable exception ");
e.printStackTrace();
} finally {
if (image != null) {
Log.d(TAG, " image closing");
image.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Log.d(TAG, "takePicture - camera capture session");
switchPanels(true);
progress.setVisibility(View.GONE);
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - onConfigured- camera access exception ");
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "takePicture - onConfigureFailed");
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, "takePicture - CameraAccessException ");
e.printStackTrace();
}
}
private void usePicture() {
Log.d(TAG, "usePicture - started ");
if(null != takenPictureBytes ){
try{
String imagePath = null;
Bitmap bitmap = BitmapFactory.decodeByteArray(takenPictureBytes, 0, takenPictureBytes.length);
int bitmapByteCountUsePic = byteSizeOf(bitmap);
Matrix matrix = new Matrix();
matrix.postRotate(90);
Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
if (isFrameMode) {
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
} else {
imagePath = saveBitmap(rotatedBitmap);
}
TakePhotoFragment.TakePhotoFragmentEvent takePhotoFragmentEvent = new TakePhotoFragment.TakePhotoFragmentEvent();
takePhotoFragmentEvent.setImagePath(imagePath);
// send rxjava
//pop backstack
RxBus.getInstance().post(takePhotoFragmentEvent);
getActivity().getSupportFragmentManager().popBackStack();
}catch (Exception e){
Log.d(TAG, "usePicture - exception ");
e.printStackTrace();
}
}else{
Log.d(TAG, "usePicture - takenPictureBytes is null");
DialogUtil.showErrorSnackBar(getView(), R.string.retake_photo );
}
}
public Bitmap cutImage(final Bitmap bitmap, final int pixepWidth, final int pixelsHeight, float widthRatio) {
int bitmapByteCountCutImage = byteSizeOf(bitmap);
Bitmap output = createBitmap(pixepWidth, pixelsHeight, Bitmap.Config.ARGB_8888);
Bitmap original = bitmap;
final Paint paint = new Paint();
Canvas canvas = new Canvas(output);
int padding = (int) ((float) getResources().getDimensionPixelSize(R.dimen.double_padding) * widthRatio);
Rect rect = new Rect(padding, (original.getHeight() - pixelsHeight) / 2, padding + pixepWidth, original.getHeight() - (original.getHeight() - pixelsHeight) / 2);
final RectF cutedRect = new RectF(0, 0, pixepWidth, pixelsHeight);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
canvas.drawBitmap(original, rect, cutedRect, paint);
return output;
}
private String saveBitmap(Bitmap bitmap) {
File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Toast.makeText(getActivity(), "Can't create directory to save image.", Toast.LENGTH_LONG).show();
return null;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmssSSS");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(stream.toByteArray());
fos.close();
return pictureFile.getAbsolutePath();
} catch (Exception error) {
Log.d(TAG, "File" + filename + "not saved: " + error.getMessage());
}
return null;
}
You are changing the bitmap size/resolution in this code:
float withRatio = (float) rotatedBitmap.getWidth() / (float) textureViewWidth;
float heightRatio = (float) rotatedBitmap.getHeight() / (float) textureViewHeight;
Bitmap newImage = cutImage(rotatedBitmap, (int) (photoFrameView.getWidth() * withRatio), (int) (photoFrameView.getHeight() * heightRatio), withRatio);
int bitmapByteCountNewImage = byteSizeOf(newImage);
imagePath = saveBitmap(newImage);
Put in a breakpoint and see what the new heightRatio and widthRatio are, and what the photoFrameView.getWidth() * withRatio value comes out to. I think you will find it is small compared to the original image. I'm not sure why you are calculating the Ratios with the textureViewWidth/Height, you shouldn't have to do that. Whatever you are displaying the image in should be able to 'Fill' without having to change the size of the underlying bitmap, and thus losing resolution.
You might check out this method:
rawBitmap = ((BitmapDrawable)imageToLoad.getDrawable()).getBitmap();
theBitmap = Bitmap.createScaledBitmap(rawBitmap, 285, 313, false);

Android camera-2 API Recording Preview was squished in Portrait Recording in Samsung Galaxy S8

I am facing the squished preview in Samsung S8, I'm using camera-2 API for recording in my project with the support for only 1920x1080 resolution. Quality of recording is good, but the preview is not proper in S8. I tried this solution Samsung Galaxy S8 full screen mode, but it doesn't help in my issue.
Please help and thanks in advance.
Here is my code which implemented for camera:
/*To maintain the aspect ratio 16:9 for record video*/
private Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=3840 ) {
return size;
}
} else {
if((size.getWidth()/16) == (size.getHeight()/9) && (size.getWidth() <=1280 ) ) {
return size;
}
}
}
return choices[choices.length - 1];
}
/*
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*/
private Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
double ratio = (double) h / w;
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=3840 ) {
return size;
}
} else {
if((size.getWidth()/16) == (size.getHeight()/9) && (size.getWidth() <=1280 ) ) {
return size;
}
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
return choices[0];
}
}
/*
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
/*
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
try {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}catch (Exception ex){ex.printStackTrace();}
}
/*
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
if(mBackgroundThread!=null){
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}catch (Exception e) {
e.printStackTrace();
}
}
}
private void openCamera(int width, int height) {
CameraManager manager = (CameraManager) getActivityContext.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String mCameraId = manager.getCameraIdList()[cameraId];
// Choose the sizes for camera preview and video recording
characteristics = manager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
try {
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
maximumZoomLevel = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
configureTransform(width, height);
}
boolean result = checkAccessCameraPermission();
if (result) {
manager.openCamera(mCameraId, mStateCallback, null);
}
}catch (Exception ex){ex.printStackTrace();}finally {
map=null;
Runtime.getRuntime().gc();
}
} catch (CameraAccessException e) {
Toast.makeText(getActivityContext, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
/*
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Toast.makeText(getActivityContext, "Failed", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/*
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
try {
thread.start();
if(zoom!=null){
try{
mPreviewBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), mPreviewSessionCallback, mBackgroundHandler);
}catch (Exception ex){ex.printStackTrace();}
}else{
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
}
}catch (CameraAccessException e) {
e.printStackTrace();
}catch (Exception ex){ex.printStackTrace();}finally {
thread=null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/*
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getActivityContext.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}else if (Surface.ROTATION_0 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}else if(Surface.ROTATION_180== rotation){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}
try {
mTextureView.setTransform(matrix);
}catch (Exception ex){ex.printStackTrace();}finally {
bufferRect=null;
viewRect=null;
matrix=null;
}
}
Sorry, it is a late reply, but it helps. In Android, CameraX is helping to avoid the squeeze the screen by using the default method aspectRatio().
Here is my code,
/**
* Detecting the most suitable aspect ratio for current dimensions
*
* #param width - preview width
* #param height - preview height
* #return suitable aspect ratio
*/
private fun aspectRatio(width: Int, height: Int): Int {
val previewRatio = max(width, height).toDouble() / min(width, height)
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
return AspectRatio.RATIO_4_3
}
return AspectRatio.RATIO_16_9
}
I recommend, migrate into CameraX is the best approach.
Credit goes to this repository author
CameraX-Demo

Samsung S9(18:9) camera preview stretched

I am using Android Camera API and it works good 16:9 ratio devices on both portrait and landscape modes. But in Samsung S9 18:9 ratio device, on landscape mode the preview looks stretched. On Samsung S9 I got following supported preview sizes, 1920X1080,1440X1080, 1088X1088,1280X720,1056X704, 1024X768, 960X720,800X450,720X720,720X480,640X480,352X288,320X240,256X144,176X144 So optimal preview size is 1920X1080 but actual resolution of device is 2,220 x 1,080. So that it looks stretched.But I need preview on full screen.How default camera preview shows on fullscreen?
#SuppressLint("ClickableViewAccessibility")
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera.PreviewCallback previewCallback) {
super(context);
this.previewCallback = previewCallback;
mContext = context;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCameraDisplayOrientation(Context activity,
int cameraId, Camera camera) {
Camera.CameraInfo info =
new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = ((AppCompatActivity) activity).getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
mDisplayOrientation = result;
Log.d(TAG, "setCameraDisplayOrientation: "+mDisplayOrientation);
camera.setDisplayOrientation(result);
}
public void takePhoto(final PictureCallback pCalback) {
mCamera.takePicture(null, null, pCalback);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
try {
mCamera = Camera.open();
//setCameraDisplayOrientation(mContext, 0, mCamera);
/*mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics();
int screenWidth = displayMetrics.widthPixels;
int screenHeight = displayMetrics.heightPixels;
optimalPreviewSize = getBestAspectPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight);//Bug Fix for Samsung A8
mParameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPictureSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPreviewFpsRange(30000, 30000);
mCamera.setParameters(mParameters);*/
/*mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(previewCallback);*/
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
//previewCount = 0;
} catch (Exception exception) {
mCamera = null;
//previewCount = 0;
}
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceCreated();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceDestroyed();
}
}
public void stopCamera() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
}
#SuppressWarnings("null")
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
try {
// Now that the size is known, set up the camera parameters and begin
// the preview.
mParameters = mCamera.getParameters();
Log.d("CameraFix", "parameters -> " + mParameters.flatten());
setCameraDisplayOrientation(mContext, 0, mCamera);
mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
//Size optimalPreviewSize = getOptimalPreviewSize(mParameters.getSupportedPreviewSizes(), getWidth(), getHeight());
DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics();
int screenWidth = displayMetrics.widthPixels;
int screenHeight = displayMetrics.heightPixels;
//Size optimalPreviewSize = getOptimalPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight, getHeight());
mSupportedPreviewSizes = mParameters.getSupportedPreviewSizes();
optimalPreviewSize = getBestAspectPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight);//Bug Fix for Samsung A8
Log.d("CameraFix", "optimalPreviewSize.width -> " + optimalPreviewSize.width);
Log.d("CameraFix", "optimalPreviewSize.height -> " + optimalPreviewSize.height);
mParameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPictureSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPreviewFpsRange(30000, 30000);
/*if (mDisplayOrientation == 0 || mDisplayOrientation == 180) {
setLayoutParams(new FrameLayout.LayoutParams(optimalPreviewSize.width, optimalPreviewSize.height,Gravity.CENTER));
}*/
Log.d("CameraFix", "setPreviewFpsRange");
mCamera.setParameters(mParameters);
mCamera.setPreviewDisplay(holder);
//SurfaceTexture st = new SurfaceTexture(10);
//mCamera.setPreviewTexture(st);
mCamera.setPreviewCallback(previewCallback);
mCamera.startPreview();
Log.d("CameraFix", "start preview");
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceChanged();
}
} catch (Exception e) {
e.printStackTrace();
Log.d("CameraFix", e.toString());
}
}
public void toggleFlash(boolean flashModeOn) {
if (mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
Parameters parameters = mCamera.getParameters();
if (flashModeOn) {
//parameters.setFlashMode(Parameters.FLASH_MODE_TORCH);
parameters.setFlashMode(Parameters.FLASH_MODE_ON);
mCamera.setParameters(parameters);
mCamera.startPreview();
//Toast.makeText(mContext, R.string.flash_mode_on, Toast.LENGTH_SHORT).show();
} else {
parameters.setFlashMode(Parameters.FLASH_MODE_OFF);
mCamera.setParameters(parameters);
//Toast.makeText(mContext, R.string.flash_mode_off, Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(mContext, R.string.flash_not_available, Toast.LENGTH_SHORT).show();
}
}
/**
* Source for this solution - https://stackoverflow.com/questions/21354313/camera-preview-quality-in-android-is-poor/21354442#21354442
*
* #param supportedPreviewSizes
* #param screenWidth
* #param screenHeight
* #return
*/
private Size getBestAspectPreviewSize(List<Size> supportedPreviewSizes, int screenWidth, int screenHeight) {
double targetRatio = (double) screenWidth / screenHeight;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
for (int i = 0; i < supportedPreviewSizes.size(); i++) {
Size size = supportedPreviewSizes.get(i);
Log.d(TAG, "getBestAspectPreviewSize: supportedPreviewSizes -> "+size.width +"X"+size.height);
}
Log.d(TAG, "getBestAspectPreviewSize: supportedPreviewSizes -> "+supportedPreviewSizes.toString());
Log.d(TAG, "getBestAspectPreviewSize: mDisplayOrientation -> "+mDisplayOrientation);
if (mDisplayOrientation == 90 || mDisplayOrientation == 270) {
Log.d(TAG, "getBestAspectPreviewSize: inside 90 - 270 ");
targetRatio = (double) screenHeight / screenWidth;
}
Log.d(TAG, "getBestAspectPreviewSize: targetRatio -> "+targetRatio);
Collections.sort(supportedPreviewSizes,
Collections.reverseOrder(new SizeComparator()));
for (Size size : supportedPreviewSizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) < minDiff) {
optimalSize = size;
minDiff = Math.abs(ratio - targetRatio);
}
if (minDiff < 0.0d) {
break;
}
}
return (optimalSize);
/*if (mDisplayOrientation == 0 || mDisplayOrientation == 180) {
if (optimalSize != null) {
return mCamera.new Size(optimalSize.height, optimalSize.width);
} else {
return null;
}
}
else{
return (optimalSize);
}*/
//return mCamera.new Size(2220,1080);
}
public int getDisplayOrientation() {
return mDisplayOrientation;
}
public void setDisplayOrientation(int displayOrientation) {
this.mDisplayOrientation = displayOrientation;
}
public Parameters getCameraParameters() {
return mCamera.getParameters();
}
public void setCameraPreviewListener(CameraPreviewListener cameraPreviewListener) {
mCameraPreviewListener = cameraPreviewListener;
}
public interface CameraPreviewListener {
void onCameraSurfaceCreated();
void onCameraSurfaceChanged();
void onCameraSurfaceDestroyed();
void onCameraPreviewStarted();
}
private static class SizeComparator implements
Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
int left = lhs.width * lhs.height;
int right = rhs.width * rhs.height;
if (left < right) {
return (-1);
} else if (left > right) {
return (1);
}
return (0);
}
}
You should check the displayable area of default camera again. I don't think it can display fully without stretch in that resolution. It may have a black area, toolbar, status bar...
There is nothing wrong in your implementation. We have to find the best support preview size compare with surface view which you want to display. In this case, you should make the surface view (1920 x 1080) in center, then add black padding areas on top and bottom.
Please take look into my answer hope it will help, I resolved the stretch issue by the following code, method name might change. I share my implementation, Because I know how hard to implement camera in Android, So please don't hesitate to see the following section.
Invoke loadCamera method in ButtonAction.
private void loadCamera() {
if (CommonUtils.deviceHasCamera(getActivityContext)) {
startBackgroundThread();
mCameraTimeOut=(isPermissionGranted?2500:5000);
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}else{
ShowToastUtils.INSTANCE.showCustomToast(getActivityContext, getString(R.string.msg_no_camera));
}
}
Initially SurfaceListener Invoked for Camera
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
mCameraTimeOut=(isPermissionGranted?2500:5000);
Log.e(TAG1, "chooseOptimalSize"+"-SurfaceTextureListener ---=>Width---=>"+width);
Log.e(TAG1, "chooseOptimalSize"+"-SurfaceTextureListener ---=>Height---=>"+height);
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
};
To Choose Optimal Preview Size for Texture
//Samsung-S6-choices[0]
//Samsung-S7-edge-choices[6]
//OnePlus-5T-choices[15]
/*Following is used for Camera Preview in TextureView, based on device camera resolution*/
/*
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
int loopCounter=0;
Log.e(TAG1, "Screen-->Width x Height="+screenWidth+" x "+screenHeight);
for (Size size : choices) {
Log.e(TAG1, "chooseOptimalSize:"+size);
}
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=7680 ) {//8K UHDTV Super Hi-Vision
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"--LoopPosition---==>"+loopCounter);
return size;
}
} else {
Log.e(TAG1, "chooseOptimalSize:--given--"+size);
if((size.getWidth()/16) == (size.getHeight()/9) && ((size.getWidth() <=1280)||(size.getHeight()<=1920))) {
mCameraRatio=RATIO_16_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-16:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/18) == (size.getHeight()/9) && ((size.getWidth() <=2160)||(size.getHeight()<=3840))) {
mCameraRatio=RATIO_18_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-18:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/18.5) == (size.getHeight()/9) && ((size.getWidth() <=2160)||(size.getHeight()<=3840))) {
mCameraRatio=RATIO_18_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-18.5:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((width/19) == (height/9) && ((width <=2208)||(height<=3216))) {
mCameraRatio=RATIO_19_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-19:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/19.5) == (size.getHeight()/9) && ((size.getWidth() <=3840)||(size.getHeight()<=2160))) {
mCameraRatio=RATIO_19_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-19.5:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else{
Log.e(TAG1, "chooseOptimalSize"+" not proper aspect resolution");
}
}
loopCounter++;
}
}
To Open Camera
private void openCamera(int width, int height) {
CameraManager manager = (CameraManager) getActivityContext.getSystemService(Context.CAMERA_SERVICE);
try {
Log.e(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(mCameraTimeOut, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String mCameraId = manager.getCameraIdList()[cameraId];
// Choose the sizes for camera preview and video recording
characteristics = manager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
try {
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
maximumZoomLevel = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
/*This Line will configure the Texture size*/
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.e(TAG1, "Width" + mPreviewSize.getWidth() + "X Height" + mPreviewSize.getHeight());
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
Log.e(TAG1, "Width" + mPreviewSize.getHeight() + "X Height" + mPreviewSize.getWidth());
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
//S10 preview Size
/* mTextureView.setAspectRatio(1080, 2280);*/
//mTextureView.setAspectRatio(2208, 2944);
}
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
configureTransform(width, height);
}
if (isPermissionGranted) {
manager.openCamera(mCameraId, mStateCallback, null);
}
}catch (Exception ex){ex.printStackTrace();}finally {
map=null;
Runtime.getRuntime().gc();
}
} catch (CameraAccessException e) {
Toast.makeText(getActivityContext, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
//getActivityContext.finish();
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
ConfigureTransform method used for orientation handling
/*
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getActivityContext.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}else if (Surface.ROTATION_0 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}else if(Surface.ROTATION_180== rotation){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}
try {
mTextureView.setTransform(matrix);
}catch (Exception ex){ex.printStackTrace();}finally {
bufferRect=null;
viewRect=null;
matrix=null;
}
}
Finally to Start Preview
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Toast.makeText(getActivityContext, "Failed", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
//previewSurface=null;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

JavaCamera2View Change Preview Resolution

Using OpenCV Android Tutorial I would like to change Preview Resolution. Therefore I commented out the calcPreviewSize function and set the resoultion to 640x480, which is supported and included in camera characteristics. The change of preview resolution is successful but often the app crashes although the given line does not contain any change from the official tutorial.
E/JavaCamera2View: createCaptureSession failed
java.lang.IllegalStateException: Session has been closed; further changes are illegal.
at android.hardware.camera2.impl.CameraCaptureSessionImpl.checkNotClosed(CameraCaptureSessionImpl.java:607)
at android.hardware.camera2.impl.CameraCaptureSessionImpl.setRepeatingRequest(CameraCaptureSessionImpl.java:227)
at org.opencv.android.JavaCamera2View$3.onConfigured(JavaCamera2View.java:220)
at java.lang.reflect.Method.invoke(Native Method)
public class JavaCamera2View extends CameraBridgeViewBase {
private static final String LOGTAG = "JavaCamera2View";
private ImageReader mImageReader;
private int mPreviewFormat = ImageFormat.YUV_420_888;
public int widthP = 640;
public int heightP = 480;
public CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private android.util.Size mPreviewSize = new android.util.Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
public JavaCamera2View(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCamera2View(Context context, AttributeSet attrs) {
super(context, attrs);
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("OpenCVCameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if (mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread", e);
}
}
protected boolean initializeCamera() {
Log.i(LOGTAG, "initializeCamera");
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if (camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return false;
}
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if ((mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) ||
(mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
) {
mCameraID = cameraID;
break;
}
}
}
if (mCameraID != null) {
//CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
//characteristics = manager.
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
return true;
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception", e);
}
return false;
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
}
};
private void createCameraPreviewSession() {
final int w = widthP;//mPreviewSize.getWidth(), h = mPreviewSize.getHeight();
final int h = heightP;
Log.i(LOGTAG, "createCameraPreviewSession(" + w + "x" + h + ")");
if (w < 0 || h < 0)
return;
try {
if (null == mCameraDevice) {
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
mImageReader = ImageReader.newInstance(w, h, mPreviewFormat, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null)
return;
// sanity checks - 3 planes
Image.Plane[] planes = image.getPlanes();
assert (planes.length == 3);
assert (image.getFormat() == mPreviewFormat);
// see also https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888
// Y plane (0) non-interleaved => stride == 1; U/V plane interleaved => stride == 2
assert (planes[0].getPixelStride() == 1);
assert (planes[1].getPixelStride() == 2);
assert (planes[2].getPixelStride() == 2);
ByteBuffer y_plane = planes[0].getBuffer();
ByteBuffer uv_plane = planes[1].getBuffer();
Mat y_mat = new Mat(h, w, CvType.CV_8UC1, y_plane);
Mat uv_mat = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane);
JavaCamera2Frame tempFrame = new JavaCamera2Frame(y_mat, uv_mat, w, h);
deliverAndDrawFrame(tempFrame);
tempFrame.release();
image.close();
}
}, mBackgroundHandler);
Surface surface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.i(LOGTAG, "createCaptureSession::onConfigured");
if (null == mCameraDevice) {
return; // camera is already closed
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(30,30));
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (Exception e) {
Log.e(LOGTAG, "createCaptureSession failed", e);
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
}
},
null
);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession", e);
}
}
#Override
protected void disconnectCamera() {
Log.i(LOGTAG, "closeCamera");
try {
CameraDevice c = mCameraDevice;
mCameraDevice = null;
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != c) {
c.close();
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} finally {
stopBackgroundThread();
}
}
boolean calcPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
if (mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float) width / height;
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
bestWidth = sizes[0].getWidth();
bestHeight = sizes[0].getHeight();
for (android.util.Size sz : sizes) {
int w = sz.getWidth(), h = sz.getHeight();
Log.d(LOGTAG, "trying size: " + w + "x" + h);
if (width >= w && height >= h && bestWidth <= w && bestHeight <= h
&& Math.abs(aspect - (float) w / h) < 0.2) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: " + bestWidth + "x" + bestHeight);
assert(!(bestWidth == 0 || bestHeight == 0));
if (mPreviewSize.getWidth() == bestWidth && mPreviewSize.getHeight() == bestHeight)
return false;
else {
mPreviewSize = new android.util.Size(widthP, heightP);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "calcPreviewSize - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "calcPreviewSize - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "calcPreviewSize - Security Exception", e);
}
return false;
}
#Override
protected boolean connectCamera(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize(" + width + "x" + height + ")");
startBackgroundThread();
initializeCamera();
try {
boolean needReconfig = calcPreviewSize(width, height);
mFrameWidth = mPreviewSize.getWidth();
mFrameHeight = mPreviewSize.getHeight();
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
AllocateCache();
if (needReconfig) {
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
createCameraPreviewSession();
}
} catch (RuntimeException e) {
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
return true;
}
private class JavaCamera2Frame implements CvCameraViewFrame {
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else if (mPreviewFormat == ImageFormat.YUV_420_888) {
assert (mUVFrameData != null);
Imgproc.cvtColorTwoPlane(mYuvFrameData, mUVFrameData, mRgba, Imgproc.COLOR_YUV420sp2BGRA);
// TODO: Here we had to change vom Imgproc.COLOR_YUV2RGBA_NV21 to Imgproc.COLOR_YUV420sp2BGRA to get the correct colors.
} else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCamera2Frame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mUVFrameData = null;
mRgba = new Mat();
}
public JavaCamera2Frame(Mat Y, Mat UV, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Y;
mUVFrameData = UV;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mUVFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
}
On the face of it, there may be a problem with the modified calcPreviewSize() method: it may return true even if the preview size should not change.
You will see
I/JavaCamera2View best size: WWWWxHHH
in your logcat, where WWWW is not 640 and HHH is not 480.
Changing preview size involves close and reopen of previewSession, and takes time. The code that you use expects repeated calls to calcPreviewSize() to return false.
A possible fix is to rewrite the method this way:
boolean calcPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
if (mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
if (mPreviewSize.getWidth() == widthP && mPreviewSize.getHeight() == heightP) {
return false;
}
else {
mPreviewSize = new android.util.Size(widthP, heightP);
return true;
}
}

Error in Initializing TextureView inside Service in android

I am getting error in initializing TextureView inside Service for Camera2 API. I want to capture Images with Camera2 API using Services. Below is my code:
Camera Service:
public class CameraService extends Service{
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final String FRAGMENT_DIALOG = "dialog";
int mPictureCounter = 0;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final String TAG = "Camera2BasicFragment";
private static final int STATE_PREVIEW = 0, STATE_WAITING_LOCK = 1, STATE_WAITING_PRECAPTURE = 2, STATE_WAITING_NON_PRECAPTURE = 3, STATE_PICTURE_TAKEN = 4, MAX_PREVIEW_WIDTH = 1920, MAX_PREVIEW_HEIGHT = 1080;
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private String mCameraId;
private AutoFitTextureView mTextureView;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = (Activity) getApplicationContext();
if (null != activity) {
activity.finish();
}
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private File mFile;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Calendar c = Calendar.getInstance();
mFile = new File(getApplicationContext().getExternalFilesDir(null), "pic" + c.get(Calendar.MILLISECOND) + "" + c.get(Calendar.SECOND) + "" + c.get(Calendar.MINUTE) + "" + c.get(Calendar.HOUR_OF_DAY) + "" + c.get(Calendar.DAY_OF_MONTH) + "" + c.get(Calendar.MONTH) + "" + c.get(Calendar.YEAR) + ".jpg");
Log.e("TestingProject", "\n\n\n--------------------------------**********************************\n" +
"Files Absolute Path : " + mFile.getAbsolutePath()
+ "\n**********************************--------------------------------\n\n\n");
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile));
}
};
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mState = STATE_PREVIEW;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private boolean mFlashSupported;
private int mSensorOrientation;
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
process(result);
}
};
private void showToast(final String text) {
final Activity activity = (Activity) getApplicationContext();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
public static test newInstance() {
return new test();
}
private void setUpCameraOutputs(int width, int height) {
Activity activity = (Activity) getApplicationContext();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {maxPreviewWidth = MAX_PREVIEW_WIDTH;}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {maxPreviewHeight = MAX_PREVIEW_HEIGHT;}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,maxPreviewHeight, largest);
int orientation=getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else{mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}} catch (CameraAccessException e) {e.printStackTrace();} catch (NullPointerException e) {
}
}
private void openCamera(int width, int height) {
if (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs(width, height);
configureTransform(width, height);
Activity activity = (Activity) getApplicationContext();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try{if(!mCameraOpenCloseLock.tryAcquire(2500,TimeUnit.MILLISECONDS)) {throw new RuntimeException("Time out waiting to lock camera opening.");}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(mPreviewRequestBuilder);
mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest,mCaptureCallback,mBackgroundHandler);} catch (CameraAccessException e) {e.printStackTrace();}}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = (Activity) getApplicationContext();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = Activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void takePicture() {
Log.e("TESTING", "taking Pic");
lockFocus();
}
private void lockFocus() {
try {
List<CaptureRequest> captureList = new ArrayList<CaptureRequest>();
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
for (int i = 0; i < 10; i++) {
captureList.add(mPreviewRequestBuilder.build());
}
mCaptureSession.stopRepeating();
mCaptureSession.captureBurst(captureList, cameraCaptureCallback, null);
mPreviewRequestBuilder.removeTarget(mImageReader.getSurface());
} catch (CameraAccessException e) {e.printStackTrace();}
}
CameraCaptureSession.CaptureCallback cameraCaptureCallback = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
Log.e("TestingProject", "pic # " + (mPictureCounter + 1));
mPictureCounter++;
if (mPictureCounter >= 10)
unlockFocus();
}
};
private void runPrecaptureSequence() {try{mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {e.printStackTrace();}
}
private void captureStillPicture() {
try {
final Activity activity = (Activity) getApplicationContext();
if (null == activity || null == mCameraDevice) {return;}
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,#NonNull CaptureRequest request,#NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
private void unlockFocus() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,mBackgroundHandler);
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e("TestingProject", "BHAND... " + e.getMessage());
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
private static class ImageSaver implements Runnable {
private final Image mImage;
private final File mFile;
public ImageSaver(Image image, File file) {
mImage = image;
mFile = file;
}
#Override
public void run() {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialogInterface, int i) {activity.finish();}}).create();}
}
public static class ConfirmationDialog extends DialogFragment {
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.request_permission)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {FragmentCompat.requestPermissions(parent,
new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
}).setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {Activity activity = parent.getActivity();
if (activity != null) {
activity.finish();
}
}
}).create();
}
}
#TargetApi(Build.VERSION_CODES.M)
private void requestCameraPermission() {
String[] perms = {"android.permission.RECORD_AUDIO", "android.permission.CAMERA"};
int permsRequestCode = 200;
((Activity)getApplicationContext()).requestPermissions(perms, permsRequestCode);
}
public CameraService() {
mTextureView = new AutoFitTextureView((Activity)getApplicationContext(),null,0);
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(
mPreviewSize.getWidth(), mPreviewSize.getHeight(), Gravity.CENTER));
mTextureView .setSurfaceTextureListener(mSurfaceTextureListener);
Calendar c = Calendar.getInstance();
mFile = new File(getExternalFilesDir(null), "proof_" + c.get(Calendar.MILLISECOND) + c.get(Calendar.SECOND) + c.get(Calendar.MINUTE) + c.get(Calendar.HOUR_OF_DAY) + c.get(Calendar.DAY_OF_MONTH) + c.get(Calendar.MONTH) + c.get(Calendar.YEAR) + ".jpg");
Log.e("TestingProject", "File Path : " + mFile.getAbsolutePath());
new Thread(new Runnable() {
#Override
public void run() {
try {
Thread.sleep(10000);
((Activity)getApplicationContext()).runOnUiThread(new Runnable() {public void run() {takePicture();}
});
} catch (InterruptedException e) {e.printStackTrace();}
}
}).start();
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
super.onStartCommand(intent, flags, startId);
return START_STICKY;
}
#Override
public IBinder onBind(Intent intent) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
AutoFitTextureView Class
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
Log.e("TEST", "defStyle = " +defStyle);
for (int i = 0; i < attrs.getAttributeCount() ; i++)
Log.e("TEST", "IN constructor === "+attrs.getAttributeName(i)+"---- Value =
"+attrs.getAttributeValue(i));
}
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
} }
This is the stack trace of error

Categories

Resources