I'm working with the new Camera2 API on a Samsung S5. The supported hardware level this device is reporting is LEGACY, which is fine.
However, I cannot seem to be able to auto-focus on this device. The request to trigger auto-focus looks like this:
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
state = STATE_PREVIEW;
try {
captureSession.setRepeatingRequest(previewRequestBuilder.build(), captureCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
After the request is sent, the result of the request is always CONTROL_AF_STATE_ACTIVE_SCAN and occasionally CONTROL_AF_STATE_NOT_FOCUSED_LOCKED.
The strange thing is that, when the state is CONTROL_AF_STATE_NOT_FOCUSED_LOCKED, the auto-focus goes back into the CONTROL_AF_STATE_ACTIVE_SCAN state for a while and then back to CONTROL_AF_STATE_NOT_FOCUSED_LOCKED, resulting in a infinite focus loop. According to the docs, when state is CONTROL_AF_STATE_NOT_FOCUSED_LOCKED...
The lens will remain stationary until the AF mode (android.control.afMode) is changed or a new AF trigger is sent to the camera device (android.control.afTrigger).
I'm wondering if this discrepancy is because of the fact that the hardware level is LEGACY and that I should go back to using the deprecated Camera API, but that seems crazy for such a prevalent feature such as auto focus.
Is there any reccomendations how how to treat devices that are reporting LEGACY?
I branched form google's Camera2Basic example and changed it to use CaptureRequest.CONTROL_AF_MODE_AUTO instead of CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
You can take the project from git and test it - https://github.com/pinhassi/android-Camera2Basic
Or just add this to Camera2BasicFragment:
private static final long LOCK_FOCUS_DELAY_ON_FOCUSED = 5000;
private static final long LOCK_FOCUS_DELAY_ON_UNFOCUSED = 1000;
private Integer mLastAfState = null;
private Handler mUiHandler = new Handler(); // UI handler
private Runnable mLockAutoFocusRunnable = new Runnable() {
#Override
public void run() {
lockAutoFocus();
}
};
public void lockAutoFocus() {
try {
// This is how to tell the camera to lock focus.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
CaptureRequest captureRequest = mPreviewRequestBuilder.build();
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null); // prevent CONTROL_AF_TRIGGER_START from calling over and over again
mCaptureSession.capture(captureRequest, mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
*
* #return
*/
private float getMinimumFocusDistance() {
if (mCameraId == null)
return 0;
Float minimumLens = null;
try {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics c = manager.getCameraCharacteristics(mCameraId);
minimumLens = c.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
} catch (Exception e) {
Log.e(TAG, "isHardwareLevelSupported Error", e);
}
if (minimumLens != null)
return minimumLens;
return 0;
}
/**
*
* #return
*/
private boolean isAutoFocusSupported() {
return isHardwareLevelSupported(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) || getMinimumFocusDistance() > 0;
}
// Returns true if the device supports the required hardware level, or better.
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
private boolean isHardwareLevelSupported(int requiredLevel) {
boolean res = false;
if (mCameraId == null)
return res;
try {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(mCameraId);
int deviceLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
switch (deviceLevel) {
case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3:
Log.d(TAG, "Camera support level: INFO_SUPPORTED_HARDWARE_LEVEL_3");
break;
case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
Log.d(TAG, "Camera support level: INFO_SUPPORTED_HARDWARE_LEVEL_FULL");
break;
case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY:
Log.d(TAG, "Camera support level: INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY");
break;
case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED:
Log.d(TAG, "Camera support level: INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED");
break;
default:
Log.d(TAG, "Unknown INFO_SUPPORTED_HARDWARE_LEVEL: " + deviceLevel);
break;
}
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
res = requiredLevel == deviceLevel;
} else {
// deviceLevel is not LEGACY, can use numerical sort
res = requiredLevel <= deviceLevel;
}
} catch (Exception e) {
Log.e(TAG, "isHardwareLevelSupported Error", e);
}
return res;
}
Then, add to STATE_PREVIEW block:
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is working normally.
// TODO: handle auto focus
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState != null && !afState.equals(mLastAfState)) {
switch (afState) {
case CaptureResult.CONTROL_AF_STATE_INACTIVE:
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_INACTIVE");
lockAutoFocus();
break;
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN");
break;
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED");
mUiHandler.removeCallbacks(mLockAutoFocusRunnable);
mUiHandler.postDelayed(mLockAutoFocusRunnable, LOCK_FOCUS_DELAY_ON_FOCUSED);
break;
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
mUiHandler.removeCallbacks(mLockAutoFocusRunnable);
mUiHandler.postDelayed(mLockAutoFocusRunnable, LOCK_FOCUS_DELAY_ON_UNFOCUSED);
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED");
break;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
mUiHandler.removeCallbacks(mLockAutoFocusRunnable);
//mUiHandler.postDelayed(mLockAutoFocusRunnable, LOCK_FOCUS_DELAY_ON_UNFOCUSED);
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED");
break;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN");
break;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
mUiHandler.removeCallbacks(mLockAutoFocusRunnable);
//mUiHandler.postDelayed(mLockAutoFocusRunnable, LOCK_FOCUS_DELAY_ON_FOCUSED);
Log.d(TAG, "CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED");
break;
}
}
mLastAfState = afState;
break;
}
And replace all occurrences of:
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
With:
if (isAutoFocusSupported())
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_AUTO);
else
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
I think the issue is with your setRepeatingRequest. As far as I know, CaptureRequest.CONTROL_AF_MODE_AUTO should only cause an autofocus to occur once, but setRepeatingRequest will send continuous requests. Try using capture instead:
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
state = STATE_PREVIEW;
try {
mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
} catch (Exception e) {e.printStackTrace();}
I experience the same issue with a Galaxy Note 4 running Android 5.1.1 - while the same code works fine on a variety of other Android devices. There have been reports of similar issues with Galaxy-S4/S5/S6.
http://developer.samsung.com/forum/board/thread/view.do?boardName=SDK&messageId=289824&startId=zzzzz~
https://www.youtube.com/watch?v=lnMoYZwVaFM
So to anwer you question: This is most likely a bug in Samsung's implementation of the Camera-2 implementation - which seems to be of very low quality, unfourtunately.
The Samsung S5 with autofocus returned INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, which means it does not support Camera2 api.
I have the below filter for using camera in my application.
if (Build.VERSION.SDK_INT >= 21 && isDeviceCompatibleOfCamera2()) {
// Use camera2
} else {
// Use old camera
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
public boolean isDeviceCompatibleOfCamera2() {
try {
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
String backCameraId = manager.getCameraIdList()[0];
CameraCharacteristics backCameraInfo = manager.getCameraCharacteristics(backCameraId);
int level = backCameraInfo.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
return level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
} catch (CameraAccessException e) {
ETLog.d(TAG, "Device not compatible of camera2 api" + e);
}
return false;
}
Related
I have a Camera App developed for Android 26 SDK. I have been using it happily with a Motorola G5 and G6 but when I move to a Motorola G7 the app crashes when I press the button to take a picture in my app.
The G7 is running Android 9. I have another Android 9 phone a Samsung S10 plus. The S10 plus does not crash when I press the button for taking a picture.
While debugging I noticed that the G7 doesn't call ImageReader.OnImageAvailableListener while the S10 does. Looking at the code this is where the image is saved for use later on in CameraCaptureSession.CaptureCallback. The callback expects bytes to be populated and crashes when it isn't (I haven't included the stack trace because it's not a little unhelpful but I can if you think you would like to see it).
I can get the G7 to save the image if I run it slowly through debug on 'some' occasions.
So I have a button that calls the function onImageCaptureClick() inside it does a bunch of stuff but one of the things it does is create an ImageReader.OnImageAvailableListener. The OnImageAvailableListener saves the image and populates a variable bytes from the image buffer. This onImageAvailableListener is attached to my reader by using reader.setOnImageAvailableListener(readerListener, null), and this listener is never used. When I get in to the CaptureCallBack the class variable bytes is not populated and the app crashes.
Do you have any idea where I would look to solve this?
protected void onImageCaptureClick() {
if (null == mCameraDevice) {
logger.debug("null == mCameraDevice");
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List < Surface > outputSurfaces = new ArrayList < > (2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
if (mFlashMode == FLASH_MODE_OFF) {
captureBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
logger.debug("FLASH OFF");
}
if (mFlashMode == CONTROL_AE_MODE_ON) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
captureBuilder.set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_TORCH);
logger.debug("FLASH ON");
}
if (mFlashMode == CONTROL_AE_MODE_ON_AUTO_FLASH) {
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
captureBuilder.set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_OFF);
logger.debug("FLASH AUTO");
}
captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(_pictureUri.getPath());
logger.debug("OnImageCaptureClick: _pictureUri is: " + _pictureUri.getPath());
// ************************************
// this listener is not used on the G7,
// and so the image isn't saved.
// ************************************
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.capacity()];
buffer.get(bytes);
logger.debug("onImageCaptureClick, the filesize to save is: " + bytes.toString());
save();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save() throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
// ********************************************************
// the reader sets the listener here but it is never called
// and when I get in to the CaptureCallback the BitmapUtils
// expects bytes to be populated and crashes the app
// ********************************************************
reader.setOnImageAvailableListener(readerListener, null);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
try {
BitmapUtils.addTimeStampAndRotate(_pictureUri, bytes);
Intent intent = new Intent(CameraActivity.this, CameraReviewPhotoActivity.class);
intent.putExtra(MediaStore.EXTRA_OUTPUT, _pictureUri);
startActivityForResult(intent, CameraActivity.kRequest_Code_Approve_Image);
} catch (IOException e) {
e.printStackTrace();
} catch (ImageReadException e) {
e.printStackTrace();
} catch (ImageWriteException e) {
e.printStackTrace();
}
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.w(TAG, "Failed to configure camera");
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
} finally {
takePictureButton.setEnabled(false);
mTextureView.setEnabled(false);
}
The API makes no guarantee about the order of onCaptureCompleted and OnImageAvailableListener. They may arrive in arbitrary order, depending on the device, capture settings, the load on the device, or even the particular OS build you have.
Please don't make any assumptions about it.
Instead, if you need both callbacks to fire before you process something, then wait for both to happen before you move forward. For example, check if the other callback has fired in each callback, and if so, call the method to do the processing.
I think I have found the solution to this.
I have the following phones:
Samsung S10 Plus
Motorola G7
Motorola G6
My app works on the S10 and the G6.
The S10 and G6 both call the OnImageAvailableListener function before the onCaptureCompleted callback. The G7 however calls them both the other way around onCaptureCompleted then OnImageAvailableListener.
According to https://proandroiddev.com/understanding-camera2-api-from-callbacks-part-1-5d348de65950 the correct way is onCaptureCompleted then OnImageAvailableListener.
In my code I am assuming that OnImageAvailableListener has saved the image and then OnCaptureCompleted tries to manipulate it, which causes the crash.
Looking at the INFO_SUPPORTED_HARDWARE_LEVEL of each device I have the following levels of support from none level 0 to uber level 3.
Samsung S10 Plus reports device level support level 1
Motorola G7 reports device level support level 3
Motorola G6 reports device level support level 2
My assumption at this point is that the events fire in a different order when you support the android-camera2 API at Level 3 compared to other levels.
Hope this helps
I'm working on an Android camera app that needs to use a fixed (manual) focus, and always uses the flash. I'm having some issues that seem to be with the flash timing. The flash always fires and an image is always acquired, but sometimes the flash doesn't actually illuminate the captured frame. Some frames have flash, some are overexposed, and some are dark; basically it's inconsistent and unpredictable.
I based my code off the Camera2Basic example. I think I've shown all the relevant parts here:
My preview request builder has the following setup
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
float minimumLens = mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
mPreviewRequestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, minimumLens);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,CaptureRequest.CONTROL_AWB_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
Then the actual sequence that acquires the pictures (almost straight from Camera2Basic) is:
private void takePicture() {
runPrecaptureSequence();
}
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
break;
}
case STATE_WAITING_PRECAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_CAPTURE;
}
break;
}
case STATE_CAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
#Override
public void onCaptureProgressed(**ARGS**) {
process(partialResult);
}
#Override
public void onCaptureCompleted(**ARGS**) {
process(result);
}
};
private void runPrecaptureSequence() {
try { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); CaptureBuilder.set(CaptureRequest.CONTROL_AWB_MODE,CaptureRequest.CONTROL_AWB_MODE_OFF);
float minimumLens = mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
captureBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, minimumLens);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE,CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
captureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
mFileName = getFileNameFromTime() + ".jpg";
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
resumePreview();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Saving the image takes place in the ImageReader onImageAvailableListener call and works just fine.
It does seem like the flash is firing before the image is acquired, so I tried the suggestion in this answer, but the FLASH_FIRED conditional suggested never triggered.
Can anybody with better familiarity with Camera2 than me see where I'm screwing up?
i have seen the code of camera2basic https://github.com/googlearchive/android-Camera2Basic and its newer java version https://github.com/android/camera-samples/tree/master/Camera2BasicJava
so basically there are three places in the code where the code sets flash(i am talking about the newer java version)
1) private void createCameraPreviewSession() function
2) private void captureStillPicture()
3) private void unlockFocus()
1) createCameraPreviewSession() function gets called when the preview is about to get displayed (so i you want to start flash from the beginning of the app start call these function accordingly)
2) captureStillPicture() function gets called when it is going to capture an HD picture
3) unlockFocus() function gets called when you have captured image and want to unlock focus when you click an HD picture
so if flash blinks or image gets washed out you may be setting flashmode torch at only one of last two functions
set flash mode consitent in last two functions and try to avoid first frame that you capture just after starting flash
i was setting flash like this
int flashFlag=1;
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
if (flashFlag==1)
{
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,CaptureRequest.CONTROL_AE_MODE_ON);
requestBuilder.set(CaptureRequest.FLASH_MODE,CaptureRequest.FLASH_MODE_TORCH);
}
else
{
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
}
}
}
i was making flashFlag a global variable and setting it at both the places together and rejecting the first flash HD frame because it usually gets washed out
also do not try to turnOn flash without capturing an HD picture because it will lock you in focus
Add the following three lines to the Camera2Basic Sample:
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 4000);
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, FLASH_MODE_TORCH);
When Flash is activated and a repeatingRequest is started, the manual settings are being overwritten by some kind of default-Settings (Sensor-Sensitivity: 100). But the request clearly states Sensor-Sensitivity should be 4000.
I tested it with these two lines in the onCaptureCompleted-method:
Log.d(TAG, "request: "+request.get(CaptureRequest.SENSOR_SENSITIVITY));
Log.d(TAG, "result: "+result.get(CaptureResult.SENSOR_SENSITIVITY));
With the code snippet below fails to create a CaptureRequest for a preview with fixed focal length. The image displayed in the preview is always focussed at infinity even though the report from the TotalCaptureRequest says otherwise.
Here is the code snippet:
mPreviewBuilder = mCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.set(SCaptureRequest.LENS_FOCUS_DISTANCE, 9.5f);
mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_OFF);
My device is a Samsung Galaxy S7 which reports the following properties for the back camera:
[code]CamId: 0 LensFace: BACK HwdSupportLevel: FULL SensorOrientation: 90 LensCalibration: CALIBRATED LensMinFocusDist: 10.0[/code]
Here is my log captured during the onCaptureComplete()
callback: AF_STATE: 0 Lens Focus Distance: 9.555555 LENS_STATE: 0 HAS FOCUS RANGE: true Focus Range Min: 9.762765, Max: 9.348346
I have tried this with the plain-old Android Camera2 API as well as with the Samsung SDK (found here: http://developer.samsung.com/galaxy/camera)
Samsung's own camera app as well as Camera FV-5 support this functionality without any issue.
How do I get this to work?
Adding Some More Code For Context
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(SCameraDevice.TEMPLATE_PREVIEW);
//mPreviewBuilder.set(SCaptureRequest.CONTROL_CAPTURE_INTENT, SCaptureRequest.CONTROL_CAPTURE_INTENT_MANUAL);
//mPreviewBuilder.set(SCaptureRequest.CONTROL_MODE, SCaptureRequest.CONTROL_MODE_OFF);
mPreviewBuilder.set(SCaptureRequest.CONTROL_AF_MODE, SCaptureRequest.CONTROL_AF_MODE_OFF);
mPreviewBuilder.set(SCaptureRequest.CONTROL_AE_MODE, SCaptureRequest.CONTROL_AE_MODE_ON);
mPreviewBuilder.set(SCaptureRequest.LENS_FOCUS_DISTANCE, 9.5f);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new SCameraCaptureSession.StateCallback() {
#Override
public void onConfigured(SCameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(SCameraCaptureSession cameraCaptureSession) {
Toast.makeText(getApplicationContext(),"Failed", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), mCaptureCallBack, mBackgroundHandler);
Log.i(TAG, "New Session values, control mode " + mPreviewBuilder.get(SCaptureRequest.CONTROL_MODE) +
", AF mode: " + mPreviewBuilder.get(SCaptureRequest.CONTROL_AF_MODE) +
", Focus value; " + +mPreviewBuilder.get(SCaptureRequest.LENS_FOCUS_DISTANCE));
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Did you added a surface to your mPreviewBuilder?
Surface surface = new Surface(texture);
//Add surfaces
mPreviewBuilder.addTarget(surface);
surfaces.add(surface);
Second, set your AF_MODE to off before set the LENS_FOCUS_DISTANCE, and finally update your sesion with a setRepeatingRequest over your CaptureSesion. With that it must work.
mCaptureSession.setRepeatingRequest(mPreviewBuilder.build(),
callback, mHandler);
Check the values of your CaptureRequest with a Log to check that everything is right and you are not overlapping something over your AF:
Log.i(TAG, "New Session values, control mode " + mPreviewRequestBuilder.get(CaptureRequest.CONTROL_MODE) +
", AF mode: " + mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AF_MODE) +
", Focus value; " + +mPreviewRequestBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE));
I have a problem regarding the camera in the most recent Marshmallow build, more specifically the flashlight.
On any pre-Marshmallow version all I need to do to turn the flash on/off was the following:
private void turnFlashOn(final Camera camera, int flashLightDurationMs) {
if (!isFlashOn()) {
final List<String> supportedFlashModes = camera.getParameters().getSupportedFlashModes();
if (supportedFlashModes != null && supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
mParams.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.setParameters(mParams);
}
}
}
and
private void turnFlashOff(Camera camera) {
if (camera != null) {
final List<String> supportedFlashModes = camera.getParameters().getSupportedFlashModes();
if (supportedFlashModes != null && supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
mParams.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
camera.setParameters(mParams);
}
}
}
Unfortunately, Marshmallow devices began to crash in the wild. Somehow camera.getParameters() and camera.setParameters() began to fail with messages such as:
RuntimeException: getParameters failed (empty parameters)
RuntimeException: setParameters failed
I tried starting and stopping the preview before getting the parameters, which no longer throws errors. However the preview is not resumed when I call camera.startPreview().
I fear releasing the camera and reopening it is out of the question as this takes some seconds and would produce a bad experience.
Any suggestions on how to turn the flashlight on/off in Marshmallow reliably?
Google has introduced torchmode in OS 6 (Android M).
if your purpose is only to turn on/off the flash, below code can help you with that:
private static void handleActionTurnOnFlashLight(Context context){
try{
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] list = manager.getCameraIdList();
manager.setTorchMode(list[0], true);
}
catch (CameraAccessException cae){
Log.e(TAG, cae.getMessage());
cae.printStackTrace();
}
}
private static void handleActionTurnOffFlashLight(Context context){
try{
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
manager.setTorchMode(manager.getCameraIdList()[0], false);
}
catch (CameraAccessException cae){
Log.e(TAG, cae.getMessage());
cae.printStackTrace();
}
}
All you have to do is: Get cameraid's list out of which camera ID zero(0) is your primary camera for which you want to turn flash on/off. Simply pass the cameraID to setTochMode API with boolean value for turning it on or off.
Do note that this piece of code will work only with OS 6, so you need to check for device OS and based upon that you need to select which API's to call for pre-marshmallow devices.
Kindly mark this as solution if it solves your problem.
As Saurabh7474 has responded, to check the version of Android and use setTorchMode API it's very correct.
Although you can also use params.setFlashMode (...) in marshmallow using
mCamera.setPreviewTexture (new SurfaceTexture (100))
after Camera.open (...) and before calling mCamera.startPreview();
try {
Log.i(TAG, "getCamera");
int requestedCameraId = getIdForRequestedCamera(mFacing);
if (requestedCameraId == -1) {
throw new RuntimeException("Could not find requested camera.");
}
mCamera = Camera.open(requestedCameraId);
mCamera.setPreviewTexture(new SurfaceTexture(DUMMY_TEXTURE_NAME));
params = mCamera.getParameters();
} catch (RuntimeException e) {
Log.e("Failed to Open. Error:", e.getMessage());
} catch (IOException e) {
Log.e("Failed to Open. can't setPreviewTexture:", e.getMessage());
}
then when you want, you can use
mParams.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.setParameters(mParams);
My answer is based on CameraSource examples of Vision API that uses params.setFlashMode (...) and works in Api 23 and above.
If you decide to inspect CameraSource, the key method that has solved the same problem is "start ()", in the line 312 ...
https://github.com/googlesamples/android-vision/blob/master/visionSamples/barcode-reader/app/src/main/java/com/google/android/gms/samples/vision/barcodereader/ui/camera/CameraSource.java
The reason you can find here
https://stackoverflow.com/a/33333046/4114846
Update your app to check for permissions at runtime. You have to have android.permission.CAMERA granted. Including it in the manifest of your app is not going to grant it to you on Marshmallow. You'll need to detect whether or not it has been granted and request it.
Building off of Saurabh7474's answer, you can toggle Marshmallow's torchMode by registering a torchCallback:
final CameraManager mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
CameraManager.TorchCallback torchCallback = new CameraManager.TorchCallback() {
#Override
public void onTorchModeUnavailable(String cameraId) {
super.onTorchModeUnavailable(cameraId);
}
#Override
public void onTorchModeChanged(String cameraId, boolean enabled) {
super.onTorchModeChanged(cameraId, enabled);
boolean currentTorchState = enabled;
try {
mCameraManager.setTorchMode(cameraId, !currentTorchState);
} catch (CameraAccessException e){}
}
};
mCameraManager.registerTorchCallback(torchCallback, null);//fires onTorchModeChanged upon register
mCameraManager.unregisterTorchCallback(torchCallback);
I am integrating barcode scanning functionality using zxing in android, my app includes turn on/off flash light functionality by button press.
When the flash button is pressed it sets the flash to mode torch and that parameter is set to be a camera parameter. I got an exception: Set Paramters failed exception.
Here is my code.
Parameters p = camera.getParameters();
List<String> supportedFlashModes =p.getSupportedFlashModes();
if (active)
{
if(supportedFlashModes!= null)
{
if(supportedFlashModes.contains(Parameters.FLASH_MODE_TORCH))
{
p.setFlashMode(Parameters.FLASH_MODE_TORCH);
}
}
CaptureActivity.flashLightON=true;
}
else
{
p.setFlashMode(Parameters.FLASH_MODE_OFF);
}
camera.setParameters(p);
I got the following exception:
06-07 12:15:26.107: E/AndroidRuntime(24642): FATAL EXCEPTION: main
06-07 12:15:26.107: E/AndroidRuntime(24642): java.lang.RuntimeException:
setParameters failed
06-07 12:15:26.107: E/AndroidRuntime(24642): at
android.hardware.Camera.native_setParameters(Native Method)
Please advice me how to resolve this issue?
I had the same problem with my Google Nexus One.
The problem was solved for me by canceling autofocus, set parameters and then reset autofocus
camera.cancelAutoFocus();
camera.setParameters(parameters);
camera.autoFocus(autoFocusCallback);
Unfortunately its not an universal workaround since it does not work for S3 or Galaxy Nexus who totally loses it...
S-G starts to flashes and S3 stop autofocus.
I solved the issue for S3 by stoping preview and then restarting after the parameters were set
I had the same problem, turning the flaslight with the camera on causes some autofocus collision, when setting the parameters at the same time (especially when your autofocus interval is low).
Paste this right before enabling/disabling the flashlight, it will slow down the enabling event, but hey, no crash (not 100% crash proof though).
//sleep time should be long, 3000ms should be enough to prevent crash on some devices, 2000 may by to little (still crashes on Sony Xperia devices) - I have no idea wy this works that way :D
try {
Thread.currentThread().sleep(3000);
}
catch (InterruptedException e) {
e.printStackTrace();
}
Edit:
Better way to solve this problem, is to put your setParameters method in a loop, and catch runtime exception everytime. Exit the loop when there is no exception, or when your (optional) loop counter reaches max value.
final int MAX_TRIES = 100;
boolean success = false;
int triesCounter = 0;
while (!success) {
try {
triesCounter++;
if (triesCounter > MAX_TRIES) {
success = true;
}
parameters = camera.getParameters();
if (parameters != null) {
parameters.setFlashMode(Parameters.FLASH_MODE_OFF);
camera.setParameters(parameters);
}
success = true;
} catch (Exception exception) {
}
}
Some of the devices do not support and gives such problems. You can refer the link bellow, so that you can get a clear idea about it.
How to turn on camera flash light programmatically in Android?
One solution that works is to create a queue of camera parameters. Setting the torch would add a torch parameter to the queue.
Inside the onAutoFocus callback, call a function that iterates through all of the queue items and commits them. This way you are guaranteed that you are not autofocusing.
turning the flaslight with the camera on/trouch (FLASH_MODE_TORCH)
causes some autofocus collision, when setting the parameters at the
same time (especially when your autofocus interval is low).
I don't like the thread workaround. So I fixed it like this
...
private Boolean _flashOn = null;
...
private Runnable doAutoFocus = new Runnable() {
public void run() {
if (previewing) {
if (_flashOn != null) {
Parameters p = mCamera.getParameters();
if (_flashOn) {
p.setFlashMode(Parameters.FLASH_MODE_TORCH);
}else{
p.setFlashMode(Parameters.FLASH_MODE_OFF);
}
mCamera.setParameters(p);
_flashOn = null;
}
mCamera.autoFocus(autoFocusCB);
}
}
};
No RuntimeException now.
But still has FLASH_MODE_TORCH + autoFocus bug on some adroid devices, e.g. Motorala Milestone/Samsun I9100G/Blahblah...
See also a declined issue for Android: https://code.google.com/p/android/issues/detail?id=14360
BTW. ZBar is faster than ZXing :)
I wanted to start the flash light at the start of scanning and solve it with this.
I have changed in com.google.zxing.client.android.camera method openDriver
Camera theCamera = camera;
if (theCamera == null) {
theCamera = Camera.open();
if (theCamera == null) {
throw new IOException();
}
final Parameters p = theCamera.getParameters();
p.setFlashMode(Parameters.FLASH_MODE_TORCH);
theCamera.setParameters(p);
camera = theCamera;
}
than I have removed from com.google.zxing.client.android.camera.CameraConfigurationManager
initializeTorch(parameters, prefs);
finaly I have changed AndroidManifest.xml
<uses-feature android:name="android.hardware.camera.flash" android:required="true"/>
Try out this code. It worked fine with me.
private void turnOnFlashLight(){
if (!pm.hasSystemFeature(PackageManager.FEATURE_CAMERA))
{
//Check for Device Camera
Toast.makeText(this, getString(R.string.no_device_camera_msg), Toast.LENGTH_SHORT).show();
flashLightControl.setChecked(false);
return;
}else if (!pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH))
{
//Check for Camera flash
Toast.makeText(this, getString(R.string.no_camera_flash), Toast.LENGTH_SHORT).show();
flashLightControl.setChecked(false);
return;
}else
{
//BIG ISSUE Fash mode is device specific
//Turn On Flash
String _model = android.os.Build.MODEL;
String _manufaturer = android.os.Build.MANUFACTURER;
if((_model.contains("GT-S5830") && _manufaturer.contains("samsung"))) //|| (_manufaturer.contains("lge")))
{
new Thread(new Runnable()
{
#Override
public void run()
{
// TODO Auto-generated method stub
Log.d(TAG, "From TimerTask..!!!");
cameraParams = CameraManager.camera.getParameters();
//cameraParams.setFlashMode(Parameters.FLASH_MODE_TORCH);
cameraParams.set("flash-mode", "on");
CameraManager.camera.setParameters(cameraParams);
CameraManager.camera.startPreview();
isFlash_On_Mode_Device = true;
isLightOn = true;
try{
Thread.sleep(2000);
Log.d(TAG, "From TimerTask After sleep!!!");
cameraParams = CameraManager.camera.getParameters();
cameraParams.setFlashMode(Parameters.FLASH_MODE_OFF);
CameraManager.camera.setParameters(cameraParams);
CameraManager.camera.startPreview();
isLightOn = true;
}
catch(Exception e){}
}
}).start();
}else if(_manufaturer.contains("lge"))
{
//Log.d(TAG, "From LG");
cameraParams = CameraManager.camera.getParameters();
cameraParams.setFlashMode(Parameters.FLASH_MODE_ON);
CameraManager.camera.setParameters(cameraParams);
CameraManager.camera.startPreview();
isLightOn = true;
}
else if(CameraManager.camera != null)
{
cameraParams = CameraManager.camera.getParameters();
//cameraParams.setFlashMode(Parameters.FLASH_MODE_TORCH);
cameraParams.set("flash-mode", "torch");
CameraManager.camera.setParameters(cameraParams);
CameraManager.camera.startPreview();
isLightOn = true;
}
}
private void turnOffFlashLight()
{
if (isLightOn)
{
if(CameraManager.camera != null)
{
if(isFlash_On_Mode_Device)
{
CameraManager.get().closeDriver();
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface)
{
initCamera(surfaceHolder);
if(CameraManager.camera != null)
CameraManager.camera.startPreview();
}
//Log.d(TAG, "Stopping camera..!!!");
}else
{
cameraParams = CameraManager.camera.getParameters();
cameraParams.setFlashMode(Parameters.FLASH_MODE_OFF);
CameraManager.camera.setParameters(cameraParams);
CameraManager.camera.startPreview();
}
isLightOn = false;
isFlash_On_Mode_Device = false;
}
}
}