Android custom camera setFocusAreas and setMeteringAreas not work on Samsung Devices - android

Maybe this question duplicate another question, but others i have not found a solution. I'm trying to write a custom android camera and the camera works fine on my device(HTC One).I tried some different devices, and works in one of them. but I'm having problems on Samsung devices. Do not turn off the autofocus feature on the device. Therefore, the device can not change the focus area. That is my touch event.
#Override
public boolean onTouchEvent(final MotionEvent event){
Camera.Parameters cameraParameters = camera.getParameters();
if (event.getAction() == MotionEvent.ACTION_UP){
focusAreas.clear();
meteringAreas.clear();
Rect focusRect = calculateTapArea(event.getX(), event.getY(), 1f);
Rect meteringRect = calculateTapArea(event.getX(), event.getY(), 1.5f);
focusAreas.add(new Camera.Area(focusRect, 800));
meteringAreas.add(new Camera.Area(meteringRect, 800));
cameraParameters.setFocusAreas(focusAreas);
cameraParameters.setMeteringAreas(meteringAreas);
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);
try{
camera.setParameters(cameraParameters);
} catch(Exception e){
Log.e("Focus problem", e.toString());
return false;
}
camera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
camera.cancelAutoFocus();
Camera.Parameters params = camera.getParameters();
if(params.getFocusMode() != Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE){
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
camera.setParameters(params);
}
}
});
focusSound = new MediaPlayer();
showSquareFocus();
try {
AssetFileDescriptor descriptor = this.getApplicationContext().getAssets()
.openFd("focus.wav");
focusSound.setDataSource(descriptor.getFileDescriptor(),
descriptor.getStartOffset(), descriptor.getLength());
descriptor.close();
focusSound.prepare();
focusSound.setLooping(false);
focusSound.start();
focusSound.setVolume(10,10);
focusSound.setOnCompletionListener(new OnCompletionListener(){
public void onCompletion(MediaPlayer mp){
mp.release();
}
});
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return true;
}
Maybe need this code...
private Rect calculateTapArea(float x, float y, float coefficient) {
int areaSize = Float.valueOf(FOCUS_AREA_SIZE * coefficient).intValue();
int left = clamp((int) x - areaSize / 2, 0, width - areaSize);
int top = clamp((int) y - areaSize / 2, 0, height - areaSize);
RectF rectF = new RectF(left, top, left + areaSize, top + areaSize);
matrix.mapRect(rectF);
return new Rect(Math.round(rectF.left), Math.round(rectF.top), Math.round(rectF.right), Math.round(rectF.bottom));
}
private int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}

I've experienced the same problem in a Samsung Galaxy S3.
The solution I've found (even if its dirty) is to set an alternative focus mode before the desired one.
private void setFocusMode(final Camera camera, final String newFocusMode)
{
// Apply an alternative focus mode
Camera.Parameters parameters = camera.getParameters();
final List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes.contains(Parameters.FOCUS_MODE_FIXED) && newFocusMode.compareToIgnoreCase(Parameters.FOCUS_MODE_FIXED) == false)
{
parameters.setFocusMode(Parameters.FOCUS_MODE_FIXED);
}
else if (focusModes.contains(Parameters.FOCUS_MODE_INFINITY) && newFocusMode.compareToIgnoreCase(Parameters.FOCUS_MODE_INFINITY) == false)
{
parameters.setFocusMode(Parameters.FOCUS_MODE_INFINITY);
}
else if (focusModes.contains(Parameters.FOCUS_MODE_MACRO) && newFocusMode.compareToIgnoreCase(Parameters.FOCUS_MODE_MACRO) == false)
{
parameters.setFocusMode(Parameters.FOCUS_MODE_MACRO);
}
else if (focusModes.contains(Parameters.FOCUS_MODE_AUTO) && newFocusMode.compareToIgnoreCase(Parameters.FOCUS_MODE_AUTO) == false)
{
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
}
camera.setParameters(parameters);
// Now apply the desired focus mode
parameters = camera.getParameters();
parameters.setFocusMode(newFocusMode);
camera.setParameters(parameters);
}

Related

How to implement AutoFocus in Camerapagerenderer in Android xamarin forms

I've used CameraPageRenderer where there is no implementation of AutoFocus. I don't know how to implement it in Xamarin Android.
public async void OnSurfaceTextureAvailable(SurfaceTexture surface, int width, int height)
{
camera = global::Android.Hardware.Camera.Open((int)cameraType);
textureView.LayoutParameters = new FrameLayout.LayoutParams(width, height);
surfaceTexture = surface;
camera.SetPreviewTexture(surface);
PrepareAndStartCamera();
}
From CameraPageRenderer, setup the "Focus Mode" in the Camera properties by implementing IAutoFocusCallback in CameraPageRenderer.
public class CameraPageRenderer : PageRenderer, TextureView.ISurfaceTextureListener, IAutoFocusCallback
{
Then implementing OnAutoFocus method, setting AutoFocus.
public void OnAutoFocus(bool success, Camera camera)
{
var parameters = camera.GetParameters();
if (parameters.FocusMode != Android.Hardware.Camera.Parameters.FocusModeContinuousPicture)
{
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeContinuousPicture;
if (parameters.MaxNumFocusAreas > 0)
{
parameters.FocusAreas = null;
}
camera.SetParameters(parameters);
camera.StartPreview();
}
}
Update:
I following this thread(How to implement visual indicator when camera is focused), to add AutoFocus for CameraPageRenderer.
private void TextureView_Touch(object sender, TouchEventArgs e)
{
if (camera != null)
{
var parameters = camera.GetParameters();
camera.CancelAutoFocus();
Rect focusRect = CalculateTapArea(e.Event.GetX(), e.Event.GetY(), 1f);
if (parameters.FocusMode != Android.Hardware.Camera.Parameters.FocusModeAuto)
{
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeAuto;
}
if (parameters.MaxNumFocusAreas > 0)
{
List<Area> mylist = new List<Area>();
mylist.Add(new Android.Hardware.Camera.Area(focusRect, 1000));
parameters.FocusAreas = mylist;
}
try
{
camera.CancelAutoFocus();
camera.SetParameters(parameters);
camera.StartPreview();
camera.AutoFocus(this);
MarginLayoutParams margin = new MarginLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WrapContent,
ViewGroup.LayoutParams.WrapContent));
margin.SetMargins(focusRect.Left, focusRect.Top,
focusRect.Right, focusRect.Bottom);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(margin);
layoutParams.Height = 200;
layoutParams.Width = 200;
takePhotoButton.LayoutParameters = layoutParams;
takePhotoButton.Visibility = ViewStates.Visible;
}
catch (System.Exception ex)
{
Console.WriteLine(ex.ToString());
Console.Write(ex.StackTrace);
}
//return true;
}
else
{
//return false;
}
}
private Rect CalculateTapArea(object x, object y, float coefficient)
{
var focusAreaSize = Math.Max(textureView.Width, textureView.Height) / 8; //Recommended focus area size from the manufacture is 1/8 of the image
int areaSize = focusAreaSize * (int)coefficient;
int left = clamp(Convert.ToInt32(x) - areaSize / 2, 0, textureView.Width - areaSize);
int top = clamp(Convert.ToInt32(y) - areaSize / 2, 0, textureView.Height - areaSize);
RectF rectF = new RectF(left, top, left + areaSize, top + areaSize);
Matrix.MapRect(rectF);
return new Rect((int)System.Math.Round(rectF.Left), (int)System.Math.Round(rectF.Top), (int)System.Math.Round(rectF.Right), (int)System.Math.Round(rectF.Bottom));
}
private int clamp(int x, int min, int max)
{
if (x > max)
{
return max;
}
if (x < min)
{
return min;
}
return x;
}
public void OnAutoFocus(bool success, Camera camera)
{
var parameters = camera.GetParameters();
if (parameters.FocusMode != Android.Hardware.Camera.Parameters.FocusModeContinuousPicture)
{
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeContinuousPicture;
if (parameters.MaxNumFocusAreas > 0)
{
parameters.FocusAreas = null;
}
camera.SetParameters(parameters);
camera.StartPreview();
}
if(success)
{
Task.Delay(1000);
this.takePhotoButton.Visibility = ViewStates.Invisible;
}
}
Update:
If you're using the Camera API 1 (the old one) you can set the focus mode to continuous like this:
public void OnSurfaceTextureAvailable(SurfaceTexture surface, int width, int height)
{
try
{
var camera = Camera.Open((int)cameraType);
var parameters = camera.GetParameters();
//SET FOCUS MODE HERE
parameters.FocusMode = Camera.Parameters.FocusModeContinuousPicture;
camera.SetParameters(parameters);
}
catch (Exception ex)
{
//log error
return;
}
//other code related to the camera
textureView.LayoutParameters = new FrameLayout.LayoutParams(width, height);
surfaceTexture = surface;
camera.SetPreviewTexture(surface);
PrepareAndStartCamera();
}

Android camera-2 API Recording Preview was squished in Portrait Recording in Samsung Galaxy S8

I am facing the squished preview in Samsung S8, I'm using camera-2 API for recording in my project with the support for only 1920x1080 resolution. Quality of recording is good, but the preview is not proper in S8. I tried this solution Samsung Galaxy S8 full screen mode, but it doesn't help in my issue.
Please help and thanks in advance.
Here is my code which implemented for camera:
/*To maintain the aspect ratio 16:9 for record video*/
private Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=3840 ) {
return size;
}
} else {
if((size.getWidth()/16) == (size.getHeight()/9) && (size.getWidth() <=1280 ) ) {
return size;
}
}
}
return choices[choices.length - 1];
}
/*
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*/
private Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
double ratio = (double) h / w;
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=3840 ) {
return size;
}
} else {
if((size.getWidth()/16) == (size.getHeight()/9) && (size.getWidth() <=1280 ) ) {
return size;
}
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
return choices[0];
}
}
/*
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
/*
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
try {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}catch (Exception ex){ex.printStackTrace();}
}
/*
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
if(mBackgroundThread!=null){
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}catch (Exception e) {
e.printStackTrace();
}
}
}
private void openCamera(int width, int height) {
CameraManager manager = (CameraManager) getActivityContext.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String mCameraId = manager.getCameraIdList()[cameraId];
// Choose the sizes for camera preview and video recording
characteristics = manager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
try {
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
maximumZoomLevel = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
configureTransform(width, height);
}
boolean result = checkAccessCameraPermission();
if (result) {
manager.openCamera(mCameraId, mStateCallback, null);
}
}catch (Exception ex){ex.printStackTrace();}finally {
map=null;
Runtime.getRuntime().gc();
}
} catch (CameraAccessException e) {
Toast.makeText(getActivityContext, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
/*
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Toast.makeText(getActivityContext, "Failed", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/*
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
try {
thread.start();
if(zoom!=null){
try{
mPreviewBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), mPreviewSessionCallback, mBackgroundHandler);
}catch (Exception ex){ex.printStackTrace();}
}else{
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
}
}catch (CameraAccessException e) {
e.printStackTrace();
}catch (Exception ex){ex.printStackTrace();}finally {
thread=null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/*
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getActivityContext.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}else if (Surface.ROTATION_0 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}else if(Surface.ROTATION_180== rotation){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}
try {
mTextureView.setTransform(matrix);
}catch (Exception ex){ex.printStackTrace();}finally {
bufferRect=null;
viewRect=null;
matrix=null;
}
}
Sorry, it is a late reply, but it helps. In Android, CameraX is helping to avoid the squeeze the screen by using the default method aspectRatio().
Here is my code,
/**
* Detecting the most suitable aspect ratio for current dimensions
*
* #param width - preview width
* #param height - preview height
* #return suitable aspect ratio
*/
private fun aspectRatio(width: Int, height: Int): Int {
val previewRatio = max(width, height).toDouble() / min(width, height)
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
return AspectRatio.RATIO_4_3
}
return AspectRatio.RATIO_16_9
}
I recommend, migrate into CameraX is the best approach.
Credit goes to this repository author
CameraX-Demo

Samsung S9(18:9) camera preview stretched

I am using Android Camera API and it works good 16:9 ratio devices on both portrait and landscape modes. But in Samsung S9 18:9 ratio device, on landscape mode the preview looks stretched. On Samsung S9 I got following supported preview sizes, 1920X1080,1440X1080, 1088X1088,1280X720,1056X704, 1024X768, 960X720,800X450,720X720,720X480,640X480,352X288,320X240,256X144,176X144 So optimal preview size is 1920X1080 but actual resolution of device is 2,220 x 1,080. So that it looks stretched.But I need preview on full screen.How default camera preview shows on fullscreen?
#SuppressLint("ClickableViewAccessibility")
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera.PreviewCallback previewCallback) {
super(context);
this.previewCallback = previewCallback;
mContext = context;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCameraDisplayOrientation(Context activity,
int cameraId, Camera camera) {
Camera.CameraInfo info =
new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = ((AppCompatActivity) activity).getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
mDisplayOrientation = result;
Log.d(TAG, "setCameraDisplayOrientation: "+mDisplayOrientation);
camera.setDisplayOrientation(result);
}
public void takePhoto(final PictureCallback pCalback) {
mCamera.takePicture(null, null, pCalback);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
try {
mCamera = Camera.open();
//setCameraDisplayOrientation(mContext, 0, mCamera);
/*mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics();
int screenWidth = displayMetrics.widthPixels;
int screenHeight = displayMetrics.heightPixels;
optimalPreviewSize = getBestAspectPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight);//Bug Fix for Samsung A8
mParameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPictureSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPreviewFpsRange(30000, 30000);
mCamera.setParameters(mParameters);*/
/*mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(previewCallback);*/
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
//previewCount = 0;
} catch (Exception exception) {
mCamera = null;
//previewCount = 0;
}
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceCreated();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceDestroyed();
}
}
public void stopCamera() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
//previewCount = 0;
}
}
#SuppressWarnings("null")
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
try {
// Now that the size is known, set up the camera parameters and begin
// the preview.
mParameters = mCamera.getParameters();
Log.d("CameraFix", "parameters -> " + mParameters.flatten());
setCameraDisplayOrientation(mContext, 0, mCamera);
mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
//Size optimalPreviewSize = getOptimalPreviewSize(mParameters.getSupportedPreviewSizes(), getWidth(), getHeight());
DisplayMetrics displayMetrics = mContext.getResources().getDisplayMetrics();
int screenWidth = displayMetrics.widthPixels;
int screenHeight = displayMetrics.heightPixels;
//Size optimalPreviewSize = getOptimalPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight, getHeight());
mSupportedPreviewSizes = mParameters.getSupportedPreviewSizes();
optimalPreviewSize = getBestAspectPreviewSize(mParameters.getSupportedPreviewSizes(), screenWidth, screenHeight);//Bug Fix for Samsung A8
Log.d("CameraFix", "optimalPreviewSize.width -> " + optimalPreviewSize.width);
Log.d("CameraFix", "optimalPreviewSize.height -> " + optimalPreviewSize.height);
mParameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPictureSize(optimalPreviewSize.width, optimalPreviewSize.height);
mParameters.setPreviewFpsRange(30000, 30000);
/*if (mDisplayOrientation == 0 || mDisplayOrientation == 180) {
setLayoutParams(new FrameLayout.LayoutParams(optimalPreviewSize.width, optimalPreviewSize.height,Gravity.CENTER));
}*/
Log.d("CameraFix", "setPreviewFpsRange");
mCamera.setParameters(mParameters);
mCamera.setPreviewDisplay(holder);
//SurfaceTexture st = new SurfaceTexture(10);
//mCamera.setPreviewTexture(st);
mCamera.setPreviewCallback(previewCallback);
mCamera.startPreview();
Log.d("CameraFix", "start preview");
if (mCameraPreviewListener != null) {
mCameraPreviewListener.onCameraSurfaceChanged();
}
} catch (Exception e) {
e.printStackTrace();
Log.d("CameraFix", e.toString());
}
}
public void toggleFlash(boolean flashModeOn) {
if (mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
Parameters parameters = mCamera.getParameters();
if (flashModeOn) {
//parameters.setFlashMode(Parameters.FLASH_MODE_TORCH);
parameters.setFlashMode(Parameters.FLASH_MODE_ON);
mCamera.setParameters(parameters);
mCamera.startPreview();
//Toast.makeText(mContext, R.string.flash_mode_on, Toast.LENGTH_SHORT).show();
} else {
parameters.setFlashMode(Parameters.FLASH_MODE_OFF);
mCamera.setParameters(parameters);
//Toast.makeText(mContext, R.string.flash_mode_off, Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(mContext, R.string.flash_not_available, Toast.LENGTH_SHORT).show();
}
}
/**
* Source for this solution - https://stackoverflow.com/questions/21354313/camera-preview-quality-in-android-is-poor/21354442#21354442
*
* #param supportedPreviewSizes
* #param screenWidth
* #param screenHeight
* #return
*/
private Size getBestAspectPreviewSize(List<Size> supportedPreviewSizes, int screenWidth, int screenHeight) {
double targetRatio = (double) screenWidth / screenHeight;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
for (int i = 0; i < supportedPreviewSizes.size(); i++) {
Size size = supportedPreviewSizes.get(i);
Log.d(TAG, "getBestAspectPreviewSize: supportedPreviewSizes -> "+size.width +"X"+size.height);
}
Log.d(TAG, "getBestAspectPreviewSize: supportedPreviewSizes -> "+supportedPreviewSizes.toString());
Log.d(TAG, "getBestAspectPreviewSize: mDisplayOrientation -> "+mDisplayOrientation);
if (mDisplayOrientation == 90 || mDisplayOrientation == 270) {
Log.d(TAG, "getBestAspectPreviewSize: inside 90 - 270 ");
targetRatio = (double) screenHeight / screenWidth;
}
Log.d(TAG, "getBestAspectPreviewSize: targetRatio -> "+targetRatio);
Collections.sort(supportedPreviewSizes,
Collections.reverseOrder(new SizeComparator()));
for (Size size : supportedPreviewSizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) < minDiff) {
optimalSize = size;
minDiff = Math.abs(ratio - targetRatio);
}
if (minDiff < 0.0d) {
break;
}
}
return (optimalSize);
/*if (mDisplayOrientation == 0 || mDisplayOrientation == 180) {
if (optimalSize != null) {
return mCamera.new Size(optimalSize.height, optimalSize.width);
} else {
return null;
}
}
else{
return (optimalSize);
}*/
//return mCamera.new Size(2220,1080);
}
public int getDisplayOrientation() {
return mDisplayOrientation;
}
public void setDisplayOrientation(int displayOrientation) {
this.mDisplayOrientation = displayOrientation;
}
public Parameters getCameraParameters() {
return mCamera.getParameters();
}
public void setCameraPreviewListener(CameraPreviewListener cameraPreviewListener) {
mCameraPreviewListener = cameraPreviewListener;
}
public interface CameraPreviewListener {
void onCameraSurfaceCreated();
void onCameraSurfaceChanged();
void onCameraSurfaceDestroyed();
void onCameraPreviewStarted();
}
private static class SizeComparator implements
Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
int left = lhs.width * lhs.height;
int right = rhs.width * rhs.height;
if (left < right) {
return (-1);
} else if (left > right) {
return (1);
}
return (0);
}
}
You should check the displayable area of default camera again. I don't think it can display fully without stretch in that resolution. It may have a black area, toolbar, status bar...
There is nothing wrong in your implementation. We have to find the best support preview size compare with surface view which you want to display. In this case, you should make the surface view (1920 x 1080) in center, then add black padding areas on top and bottom.
Please take look into my answer hope it will help, I resolved the stretch issue by the following code, method name might change. I share my implementation, Because I know how hard to implement camera in Android, So please don't hesitate to see the following section.
Invoke loadCamera method in ButtonAction.
private void loadCamera() {
if (CommonUtils.deviceHasCamera(getActivityContext)) {
startBackgroundThread();
mCameraTimeOut=(isPermissionGranted?2500:5000);
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}else{
ShowToastUtils.INSTANCE.showCustomToast(getActivityContext, getString(R.string.msg_no_camera));
}
}
Initially SurfaceListener Invoked for Camera
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
mCameraTimeOut=(isPermissionGranted?2500:5000);
Log.e(TAG1, "chooseOptimalSize"+"-SurfaceTextureListener ---=>Width---=>"+width);
Log.e(TAG1, "chooseOptimalSize"+"-SurfaceTextureListener ---=>Height---=>"+height);
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
};
To Choose Optimal Preview Size for Texture
//Samsung-S6-choices[0]
//Samsung-S7-edge-choices[6]
//OnePlus-5T-choices[15]
/*Following is used for Camera Preview in TextureView, based on device camera resolution*/
/*
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
int loopCounter=0;
Log.e(TAG1, "Screen-->Width x Height="+screenWidth+" x "+screenHeight);
for (Size size : choices) {
Log.e(TAG1, "chooseOptimalSize:"+size);
}
for (Size size : choices) {
int orientation = getActivityContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if((size.getWidth()/16) == (size.getHeight()/9) && size.getWidth() <=7680 ) {//8K UHDTV Super Hi-Vision
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"--LoopPosition---==>"+loopCounter);
return size;
}
} else {
Log.e(TAG1, "chooseOptimalSize:--given--"+size);
if((size.getWidth()/16) == (size.getHeight()/9) && ((size.getWidth() <=1280)||(size.getHeight()<=1920))) {
mCameraRatio=RATIO_16_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-16:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/18) == (size.getHeight()/9) && ((size.getWidth() <=2160)||(size.getHeight()<=3840))) {
mCameraRatio=RATIO_18_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-18:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/18.5) == (size.getHeight()/9) && ((size.getWidth() <=2160)||(size.getHeight()<=3840))) {
mCameraRatio=RATIO_18_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-18.5:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((width/19) == (height/9) && ((width <=2208)||(height<=3216))) {
mCameraRatio=RATIO_19_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-19:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else if((size.getWidth()/19.5) == (size.getHeight()/9) && ((size.getWidth() <=3840)||(size.getHeight()<=2160))) {
mCameraRatio=RATIO_19_9;
Log.e(TAG1, "chooseOptimalSize:"+size.getWidth()+"x"+size.getHeight()+"-19.5:9"+"--LoopPosition---==>"+loopCounter);
return size;
}else{
Log.e(TAG1, "chooseOptimalSize"+" not proper aspect resolution");
}
}
loopCounter++;
}
}
To Open Camera
private void openCamera(int width, int height) {
CameraManager manager = (CameraManager) getActivityContext.getSystemService(Context.CAMERA_SERVICE);
try {
Log.e(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(mCameraTimeOut, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String mCameraId = manager.getCameraIdList()[cameraId];
// Choose the sizes for camera preview and video recording
characteristics = manager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
try {
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
maximumZoomLevel = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
/*This Line will configure the Texture size*/
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.e(TAG1, "Width" + mPreviewSize.getWidth() + "X Height" + mPreviewSize.getHeight());
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
Log.e(TAG1, "Width" + mPreviewSize.getHeight() + "X Height" + mPreviewSize.getWidth());
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
//S10 preview Size
/* mTextureView.setAspectRatio(1080, 2280);*/
//mTextureView.setAspectRatio(2208, 2944);
}
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
configureTransform(width, height);
}
if (isPermissionGranted) {
manager.openCamera(mCameraId, mStateCallback, null);
}
}catch (Exception ex){ex.printStackTrace();}finally {
map=null;
Runtime.getRuntime().gc();
}
} catch (CameraAccessException e) {
Toast.makeText(getActivityContext, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
//getActivityContext.finish();
e.printStackTrace();
} catch (NullPointerException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
ConfigureTransform method used for orientation handling
/*
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getActivityContext.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(),(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}else if (Surface.ROTATION_0 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}else if(Surface.ROTATION_180== rotation){
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale=Math.max((float) viewWidth / mPreviewSize.getWidth(), (float) viewHeight / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(0, centerX, centerY);
}
try {
mTextureView.setTransform(matrix);
}catch (Exception ex){ex.printStackTrace();}finally {
bufferRect=null;
viewRect=null;
matrix=null;
}
}
Finally to Start Preview
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Toast.makeText(getActivityContext, "Failed", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
//previewSurface=null;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

camera2 preview with SurfaceTexture works perfectly in SDK v21, but stays black in v23

I test SDK v21 on a Samsung Galaxy A3 (2014), v23 on a Nexus 5X. I tested the Camera2Basic example, too, and it works on both devices. I wonder how my little code restructuring (I use an Activity instead of a Fragment) causes this error on only one of both devices:
// ...
protected void onCreate() {
// setContentView ...
viewfinder = (AutoFitTextureView)findViewById(R.id.viewfinder);
// Choose back camera device, choose maxPictureSize (for JPEG), find out whether dimensions must be swapped
if (!swappedDimensions)
viewfinder.setAspectRatio(maxPictureSize.getWidth(), maxPictureSize.getHeight());
else
viewfinder.setAspectRatio(maxPictureSize.getHeight(), maxPictureSize.getWidth());
// initialize image reader
}
protected void onResume() {
if (viewfinder.isAvailable()) {
// Alternative to call in onSurfaceTextureAvailable()
init();
}
else {
viewfinder.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
Log.d(tag(), "surfaceTexture available: " + width + " x " + height);
configureSurface(width, height);
init();
}
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
Log.d(tag(), "surfaceTexture size changed: " + width + " x " + height);
// Currently, don't react to changes. In opposite to the surface change listener, this method is not called after onSurfaceTextureAvailable and only needs to be implemented if the texture view size will change during the app is running
}
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
Log.d(tag(), "surfaceTexture destroyed");
return true;
}
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
Log.d(tag(), "surfaceTexture updated");
}
});
}
try {
if (!openCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
cameraManager.openCamera(deviceId, new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
openCloseLock.release();
device = camera;
init();
}
// ...
}, null);
}
catch(InterruptedException e) {
// ...
}
}
protected void configureSurface(int view_width, int view_height) {
SurfaceTexture surfaceTexture = viewfinder.getSurfaceTexture();
if (surfaceTexture == null)
return;
// Can this be changed after the session has been started?:
Size rotatedSurfaceSize = swappedDimensions
? new Size(view_height, view_width)
: new Size(view_width, view_height);
Size previewSize = bestOutputSize(SurfaceTexture.class, rotatedSurfaceSize, Sizes.aspectRatio(maxPictureSize));
Log.i(tag(), "Preview size for " + rotatedSurfaceSize + " (" + Sizes.aspectRatio(maxPictureSize) + ":1): "+previewSize);
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
//
// Copied from Camera2Basic, just changed var names:
int rotation = getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, view_width, view_height);
RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) view_height / previewSize.getHeight(),
(float) view_width / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
viewfinder.setTransform(matrix);
//
}
/**
* Prerequisites:
* - The device must be opened.
* - The surface texture must be available.
*/
protected void init() {
// Executed only after second call, loadingState makes sure both prerequisites are true
if (++loadingState != 2)
return;
final CameraDevice d = device;
final Surface surface = new Surface(viewfinder.getSurfaceTexture());
try {
previewRequestBuilder = d.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
} catch (CameraAccessException e) {
e.printStackTrace();
}
CameraCaptureSession.StateCallback cb = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (d == null) {
return;
}
Log.d(tag(), "Configured!");
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
Log.d(tag(), "Surface: " + surface);
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF);
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(previewRequest, previewCallback, null);
Log.d(tag(), "Preview started!");
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(tag(), "Configure failed!");
}
};
try {
d.createCaptureSession(Arrays.asList(surface, imageReader.getSurface()), cb, null);
Log.d(tag(), "Session started");
}
catch (CameraAccessException e) {
}
}
// ...
My own log output is the same on both devices without errors or exceptions thrown, both CameraCaptureSession.CaptureCallback.onCaptureCompleted and TextureView.SurfaceTextureListener.onSurfaceTextureUpdated are regularly called. (Probably there are entries from the camera service that will tell another story, but then those errors should be reflected by explicit errors thrown in the activity.)
The preview is only live on my Samsung device, on my Nexus it has the right size but stays black.
I had a same issue ever.
I used same code you coded.
As you know, viewfinder.isavailable() returns false.
Finally I found out AutoTextureView class is not available in API level 23.
So you should create new TextureView class which is available in API level 23.

Android setFocusArea and Auto Focus

I've been battling with this feature for couple of days now...
It seems, that camera is ignoring(?) focus areas that I've defined. Here is snippets of the code:
Focusing:
protected void focusOnTouch(MotionEvent event) {
if (camera != null) {
Rect rect = calculateFocusArea(event.getX(), event.getY());
Parameters parameters = camera.getParameters();
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(Lists.newArrayList(new Camera.Area(rect, 500)));
camera.setParameters(parameters);
camera.autoFocus(this);
}
}
Focus area calculation:
private Rect calculateFocusArea(float x, float y) {
int left = clamp(Float.valueOf((x / getSurfaceView().getWidth()) * 2000 - 1000).intValue(), focusAreaSize);
int top = clamp(Float.valueOf((y / getSurfaceView().getHeight()) * 2000 - 1000).intValue(), focusAreaSize);
return new Rect(left, top, left + focusAreaSize, top + focusAreaSize);
}
Couple of log events from Camera.AutoFocusCallback#onAutoFocus
Log.d(TAG, String.format("Auto focus success=%s. Focus mode: '%s'. Focused on: %s",
focused,
camera.getParameters().getFocusMode(),
camera.getParameters().getFocusAreas().get(0).rect.toString()));
08-27 11:19:42.240: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-109, 643 - -13, 739)
08-27 11:19:55.514: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(20, 457 - 116, 553)
08-27 11:19:58.037: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-159, 536 - -63, 632)
08-27 11:20:00.129: DEBUG/MyCameraActivity(26268): Auto focus success=true. Focus mode: 'auto'. Focused on: Rect(-28, 577 - 68, 673)
Visually it looks like focus succeeds on logged area, but the suddenly it loses focus and focus on center (0, 0), or what takes bigger part of SurfaceView is obtained.
focusAreaSize used in calculation is about 210px (96dp).
Testing on HTC One where Camera.getParameters().getMaxNumFocusAreas() is 1.
Initial focus mode (before first tap) is set to FOCUS_MODE_CONTINUOUS_PICTURE.
Am I doing something wrong here?
Tinkering with Camera.Area rectangle size or weight doesn't show any noticeable effect.
My problem was much simpler :)
All I had to do is cancel previously called autofocus. Basically the correct order of actions is this:
protected void focusOnTouch(MotionEvent event) {
if (camera != null) {
camera.cancelAutoFocus();
Rect focusRect = calculateTapArea(event.getX(), event.getY(), 1f);
Rect meteringRect = calculateTapArea(event.getX(), event.getY(), 1.5f);
Parameters parameters = camera.getParameters();
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(Lists.newArrayList(new Camera.Area(focusRect, 1000)));
if (meteringAreaSupported) {
parameters.setMeteringAreas(Lists.newArrayList(new Camera.Area(meteringRect, 1000)));
}
camera.setParameters(parameters);
camera.autoFocus(this);
}
}
Update
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
...
Parameters p = camera.getParameters();
if (p.getMaxNumMeteringAreas() > 0) {
this.meteringAreaSupported = true;
}
...
}
/**
* Convert touch position x:y to {#link Camera.Area} position -1000:-1000 to 1000:1000.
*/
private Rect calculateTapArea(float x, float y, float coefficient) {
int areaSize = Float.valueOf(focusAreaSize * coefficient).intValue();
int left = clamp((int) x - areaSize / 2, 0, getSurfaceView().getWidth() - areaSize);
int top = clamp((int) y - areaSize / 2, 0, getSurfaceView().getHeight() - areaSize);
RectF rectF = new RectF(left, top, left + areaSize, top + areaSize);
matrix.mapRect(rectF);
return new Rect(Math.round(rectF.left), Math.round(rectF.top), Math.round(rectF.right), Math.round(rectF.bottom));
}
private int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}
Beside setting:
parameters.setFocusMode(Parameters.FOCUS_MODE_AUTO);
you need to set:
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
if you want real 'live' auto-focus. Also, it will be good to check available focuses:
List<String> focusModes = parameters.getSupportedFocusModes();
LLog.d("focusModes=" + focusModes);
if (focusModes.contains(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE))
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
On Samsung S6 you must set this with little delay (~ 500 ms) after getting camera preview.
I had this problem today :/
And after hours of struggling, I found the solution!
It's strange, but it appears that setting focus-mode to "macro" right before setting focus-areas solved the problem ;)
params.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);
params.setFocusAreas(focusAreas);
mCamera.setParameters(params);
I have Galaxy S3 with Android 4.1.2
I hope this will work for you either :)
use FOCUS_MODE_FIXED
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
mCamera = Camera.open(mCameraId);
} else {
mCamera = Camera.open();
}
cameraParams = mCamera.getParameters();
// set the focus mode
cameraParams.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
// set Camera parameters
mCamera.setParameters(cameraParams);
Hi, try below code copy and change for yourself
public class CameraActivity extends AppCompatActivity implements Camera.AutoFocusCallback {
private Camera camera;
private FrameLayout fl_camera_preview;
...
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView( R.layout.camera_activity );
//this View, is lens camera
fl_camera_preview = findViewById( R.id.fl_camera_preview );
Button someButtonCapturePicture = findViewById(R.id.someButtonCapturePicture);
pictureCall = getPictureCallback();
//check camera access
if ( getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA) ) {
if ( safeCameraOpen(0) ) {
cameraPreview = new CameraPreview( this, camera );
fl_camera_preview.addView( cameraPreview );
someButtonCapturePicture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
camera.takePicture(null, null, pictureCall);
}
});
} else {
Log.w(TAG, "getCameraInstance: Camera is not available (in use or does not exist)." );
}
}
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
camera = Camera.open( id );
// set some parameters
Camera.Parameters par = camera.getParameters();
List<Camera.Size> supportedPreviewSizes = par.getSupportedPreviewSizes();
for ( Camera.Size cs : supportedPreviewSizes ) {
if ( cs.height == 720 ) {
par.setPictureSize(cs.width, cs.height);
par.setPreviewSize(cs.width, cs.height);
break;
}
}
camera.setParameters(par);
qOpened = ( camera != null );
} catch (Exception e) {
Log.e(TAG, "safeCameraOpen: failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
public void touchFocusCamera( final Rect touchFocusRect ) {
//Convert touche coordinate, in width and height to -/+ 1000 range
final Rect targetFocusRect = new Rect(
touchFocusRect.left * 2000/fl_camera_preview.getWidth() - 1000,
touchFocusRect.top * 2000/fl_camera_preview.getHeight() - 1000,
touchFocusRect.right * 2000/fl_camera_preview.getWidth() - 1000,
touchFocusRect.bottom * 2000/fl_camera_preview.getHeight() - 1000);
final List<Camera.Area> focusList = new ArrayList<Camera.Area>();
Camera.Area focusArea = new Camera.Area(targetFocusRect, 1000);
focusList.add(focusArea);
Camera.Parameters para = camera.getParameters();
List<String> supportedFocusModes = para.getSupportedFocusModes();
if ( supportedFocusModes != null &&
supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO) ) {
try {
para.setFocusAreas(focusList);
para.setMeteringAreas(focusList);
camera.setParameters(para);
camera.autoFocus( CameraActivity.this );
} catch (Exception e) {
Log.e(TAG, "handleFocus: " + e.getMessage() );
}
}
}
#Override
public void onAutoFocus(boolean success, Camera camera) {
if ( success ) {
camera.cancelAutoFocus();
}
float focusDistances[] = new float[3];
camera.getParameters().getFocusDistances(focusDistances);
}
/**
* Get Bitmap from camera
* #return picture
*/
private Camera.PictureCallback getPictureCallback() {
Camera.PictureCallback picture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "onPictureTaken: size bytes photo: " + data.length );
}
};
return picture;
}
...
}
//And SurfaceView with Callback
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraPreview";
SurfaceHolder holder;
Camera camera;
public CameraPreview( Context context, Camera _camera ) {
super(context);
camera = _camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
holder = getHolder();
holder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
camera.setPreviewDisplay(holder);
camera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if( event.getAction() == MotionEvent.ACTION_DOWN ) {
// Get the pointer's current position
float x = event.getX();
float y = event.getY();
float touchMajor = event.getTouchMajor();
float touchMinor = event.getTouchMinor();
Rect touchRect = new Rect(
(int)(x - touchMajor/2),
(int)(y - touchMinor/2),
(int)(x + touchMajor/2),
(int)(y + touchMinor/2));
((CameraActivity)getContext())
.touchFocusCamera( touchRect );
}
return true;
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (this.holder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
camera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera.setPreviewDisplay(this.holder);
camera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
...
}

Categories

Resources