I need to turn on torch when camera is open.
class FlashLight(private val context: Context) {
private val cameraManager by lazy { context.getSystemService(Context.CAMERA_SERVICE) as CameraManager }
private val backCameraId by lazy { cameraManager.cameraIdList[0] }
fun turnOnFlashLight() = changeFlashStatus(shouldBeTurnedOn = true)
fun turnOffFlashLight() = changeFlashStatus(shouldBeTurnedOn = false)
private fun changeFlashStatus(shouldBeTurnedOn: Boolean) {
try {
cameraManager.setTorchMode(backCameraId, shouldBeTurnedOn)
} catch (e: Exception){
L.e(e)
}
}
}
My code is working when camera is closed but when I run camera inside my app and want to turn torch on I get error:
android.hardware.camera2.CameraAccessException: CAMERA_IN_USE (4):
setTorchMode:2325: Torch for camera "0" is not available
Related
I am creating a simple qr scanner using Camerax and google ML Kit. I am opening an intent after the string value is extracted from the QR code. The problem I'm facing is , the intent is opening multiple times. How do I resolve this?
The following is the setup for image analysis.DisplayQR intent will open after receiving string value inside QR code.
val imageAnalysis = ImageAnalysis.Builder()
.setTargetResolution(Size(640, 480))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build()
imageAnalysis.setAnalyzer(
ContextCompat.getMainExecutor(this),
CodeAnalyzer(this, object : CallBackInterface {
override fun onSuccess(qrString: String?) {
imageAnalysis.clearAnalyzer()
Toast.makeText(this#ActivityQR,qrString,Toast.LENGTH_SHORT).show()
Log.d("rty",qrString.toString())
//the following intent is opening multiple times
val visitordetails =
Intent(this#ActivityQR, DisplayQR::class.java)
visitordetails.putExtra("VISITOR_QR", qrString)
startActivity(visitordetails)
}
override fun onFailed() {
}
})
)
cameraProvider.bindToLifecycle(this, selectedCamera, imageAnalysis, cameraPreview)
Code for analyzing the image
class CodeAnalyzer(context: Context, callBackInterface: CallBackInterface):imageAnalysis.Analyzer {
private val context: Context = context
private val callback: CallBackInterface = callBackInterface
#SuppressLint("UnsafeOptInUsageError")
override fun analyze(image: ImageProxy) {
var scanner: BarcodeScanner = BarcodeScanning.getClient()
val scannedIMage = image.image
if (scannedIMage != null) {
var scannedInputImage = InputImage.fromMediaImage(
scannedIMage,
image.imageInfo.rotationDegrees
)
scanner.process(scannedInputImage).addOnSuccessListener { barCodes ->
for (qrCode in barCodes) {
when (qrCode.valueType) {
Barcode.TYPE_TEXT -> {
val qrString: String? = qrCode.rawValue
if (qrString != null) {
callback.onSuccess(qrString) //Here I am calling the callback
}
}
}
}
}.addOnFailureListener {
}.addOnCompleteListener {
image.close()
}
}
}
}
Edit: Corrected activity name
I have surfed all the internet and didn't find any solution, I am using Camera2 api to record a video from my front camera, I have tested on multiple devices and its working fine, but when I tried on my Samsung Galaxy 3, after I press the record button sometimes the recording work, and sometimes the camera preview freeze, you can find below the code I have implemented
Create Preview and Record request by lazy loading
private val previewRequest: CaptureRequest? by lazy {
mCaptureSession.device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW).apply {
addTarget(viewFinder.holder.surface)
}.build()
}
private val recordRequest: CaptureRequest by lazy {
mCaptureSession.device.createCaptureRequest(CameraDevice.TEMPLATE_RECORD).apply {
addTarget(viewFinder.holder.surface)
addTarget(mMediaRecorder.surface)
}.build()
}
I am using AutoFitSurfaceView, onSurfaceCreated I'm doing the following:
when (cameraDirection) { // I am getting this variable to see what camera I should open
CameraDirection.BACK -> { //getCameraPosition gets the cameraId for the given //LENS_FACING direction
mCameraId = getCameraPosition(CameraCharacteristics.LENS_FACING_BACK)
}
CameraDirection.FRONT -> {
mCameraId = getCameraPosition(CameraCharacteristics.LENS_FACING_FRONT)
}
else -> {
mCameraId = getCameraPosition(CameraCharacteristics.LENS_FACING_BACK)
}
}
characteristics = cameraManager.getCameraCharacteristics(mCameraId!!)
// Selects appropriate preview size and configures view finder
mPreviewSize = getPreviewOutputSize(
viewFinder.display, characteristics, SurfaceHolder::class.java
)
// Selects appropriate video size
mVideoSize = getPreviewOutputSize(
viewFinder.display, characteristics, MediaRecorder::class.java
)
viewFinder.setAspectRatio(mPreviewSize.width, mPreviewSize.height)
// To ensure that size is set, initialize camera in the view's thread
viewFinder.post {
initializeCamera()
}
initializeCamera() function look like this
private fun initializeCamera() = lifecycleScope.launch(Dispatchers.Main) {
//viewFinder is the AutoFitSurfaceView
camera = openCamera(cameraManager, mCameraId!!, cameraHandler)
setupMediaRecorder()
val targets = listOf(viewFinder.holder.surface)
camera.createCaptureSession(targets, object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
mCaptureSession = session
session.setRepeatingRequest(previewRequest!!, null, cameraHandler)
}
override fun onConfigureFailed(session: CameraCaptureSession) {
}
}, cameraHandler)
}
openCamera() looks like the following
private suspend fun openCamera(
manager: CameraManager,
cameraId: String,
handler: Handler? = null
): CameraDevice = suspendCancellableCoroutine { cont ->
manager.openCamera(cameraId, object : CameraDevice.StateCallback() {
override fun onOpened(device: CameraDevice) = cont.resume(device)
override fun onDisconnected(device: CameraDevice) {
finish()
}
override fun onError(device: CameraDevice, error: Int) {
val msg = when (error) {
ERROR_CAMERA_DEVICE -> "Fatal (device)"
ERROR_CAMERA_DISABLED -> "Device policy"
ERROR_CAMERA_IN_USE -> "Camera in use"
ERROR_CAMERA_SERVICE -> "Fatal (service)"
ERROR_MAX_CAMERAS_IN_USE -> "Maximum cameras in use"
else -> "Unknown"
}
val exc = RuntimeException("Camera $cameraId error: ($error) $msg")
if (cont.isActive) cont.resumeWithException(exc)
}
}, handler)
}
setupMediaRecorder() looks like this
private fun setupMediaRecorder() {
mMediaRecorder = MediaRecorder()
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE)
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
mMediaRecorder.setOutputFile(outputFile.absolutePath)
Log.i("CAMERA_INFO", mCameraId!!)
val profile = CamcorderProfile.get(mCameraId!!.toInt(), CamcorderProfile.QUALITY_LOW)
Log.i("CAMERA_INFO", "Frame Rate: " + profile.videoFrameRate)
mMediaRecorder.setVideoEncodingBitRate(profile.videoBitRate)
mMediaRecorder.setVideoFrameRate(profile.videoFrameRate)
mMediaRecorder.setVideoSize(mPreviewSize.width, mPreviewSize.height)
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264)
when (mCameraDirection) {
CameraDirection.BACK -> {
mMediaRecorder.setOrientationHint(90)
}
CameraDirection.FRONT -> {
mMediaRecorder.setOrientationHint(270)
}
else -> {
}
}
mMediaRecorder.prepare()
}
Explanation:
first initializeCamera() is called and in this function I am setting the Camera and the previewSession and preparing the MediaRecorder to start recording when the user press the record button
After the user press on record button I am doing the following:
Closing the previewSession
Create the recordSession
after the session is successfully configured, I am setting the recordRequest that I am initializing by lazy loading
here is the following code:
button_record_video.setOnClickListener {
mCaptureSession.close() //Closing the previewSession
try {
camera.createCaptureSession( //Creating the record session passing the viewFinder surface //and the MediaRecorder session
listOf(
viewFinder.holder.surface,
mMediaRecorder.surface
), object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
mCaptureSession = session
session.setRepeatingRequest(recordRequest, null, cameraHandler)
mMediaRecorder.start()
}
override fun onConfigureFailed(p0: CameraCaptureSession) {
}
}, cameraHandler
)
} catch (e: Exception) {
}
}
PS: This code is working when capturing from the back camera as for the front camera its working on some devices and failing OCCASIONALLY on others (device tested that this code fail OCCASIONALLY Samsung Galaxy S3).
Any more information needed, I can gladly provide
Thanks in Advance
Since you are preparing the media recorder right away, you could just have one capture session, shared between preview and recording. Then just add in the recording target Surface to the capture request once you want to start recording.
That avoids the glitch from creating a new capture session, and may be more compatible with the devices you're seeing an issue on. In addition, you might want to look at persistent recording surfaces from MediaCodec, to avoid having to create a new session for the second recording (if that's something you want to support).
I did not see in your code the handling of the camera on a thread different from the Main/GUI. In fact I never saw a good tutorial to do that - so - this is what I have done:
Declare handlerThread
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
Start and stop the background thread
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
if(mBackgroundHandlerThread == null)
return;
try {
mBackgroundHandlerThread.quitSafely();
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(TAG, e.toString());
}
catch (Exception e) {
Log.e(TAG, e.toString());
}
}
The magic is when you update the gui from the callback of the background thread
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession)
{
mPreviewSession = cameraCaptureSession;
State = Constants.RecordingState.RECORDING;
// Start recording
if (mMediaRecorder != null)
{
mMediaRecorder.start();
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
// Updating the GUI
mButtonVideo.setEnabled(true);
txtMainMessage.setText("Recording");
}
});
}
#Override public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
}
I noticed strange behavior on Xiaomi Redmi Note 9 Pro. I tested the application on hundreds of phones but this problem appears only on this device and only when used ImageReader with YUV_420_888 format and 176*144 preview resolution (for example with 320 * 240 or JPEG or without ImageReader as capture surface everything works well). onImageAvailable method doesn't call, preview shows only 8 frames in slow motion and freezes, app slows down. onCaptureCompleted() in CameraCurrentParamsReceiver also calls only 8 times.
I get the smallest resolution by using getMinPreviewSize (176 * 144 for this Xiaomi phone).
const val PREVIEW_IMAGE_FORMAT = ImageFormat.YUV_420_888
const val IMAGE_READER_MAX_SIMULTANEOUS_IMAGES = 4
val previewCaptureCallback = CameraCurrentParamsReceiver(this)
private fun startPreview(cameraDevice: CameraDevice, cameraProperties: CameraProperties)
{
val imageReader = ImageReader.newInstance(cameraProperties.previewSize.width,
cameraProperties.previewSize.height,
PREVIEW_IMAGE_FORMAT,
IMAGE_READER_MAX_SIMULTANEOUS_IMAGES)
this.imageReader = imageReader
bufferedImageConverter = BufferedImageConverter(cameraProperties.previewSize.width, cameraProperties.previewSize.height)
val previewSurface = previewSurface
val previewSurfaceForCamera =
if (previewSurface != null)
{
if (previewSurface.isValid)
{
previewSurface
}
else
{
Log.w(TAG, "Invalid preview surface - camera preview display is not available")
null
}
}
else
{
null
}
val captureSurfaces = listOfNotNull(imageReader.surface, previewSurfaceForCamera)
cameraDevice.createCaptureSession(
captureSurfaces,
object : CameraCaptureSession.StateCallback()
{
override fun onConfigureFailed(cameraCaptureSession: CameraCaptureSession)
{
Log.e(TAG, "onConfigureFailed() cannot configure camera")
if (isCameraOpened(cameraDevice))
{
shutDown("onConfigureFailed")
}
}
override fun onConfigured(cameraCaptureSession: CameraCaptureSession)
{
Log.d(TAG, "onConfigured()")
if (!isCameraOpened(cameraDevice))
{
cameraCaptureSession.close()
shutDown("onConfigured.isCameraOpened")
return
}
captureSession = cameraCaptureSession
try
{
val request = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
captureSurfaces.forEach { request.addTarget(it) }
CameraPreviewRequestInitializer.initializePreviewRequest(request, cameraProperties, controlParams, isControlParamsStrict)
captureRequestBuilder = request
val previewCallback = PreviewFrameHandler(this#Camera2)
this#Camera2.previewFrameHandler = previewCallback
imageReader.setOnImageAvailableListener(previewCallback, previewCallback.backgroundHandler)
cameraCaptureSession.setRepeatingRequest(request.build(), previewCaptureCallback, null)
}
catch (ex: CameraAccessException)
{
Log.e(TAG, "onConfigured() failed with exception", ex)
shutDown("onConfigured.CameraAccessException")
}
}
},
null)
}
private fun chooseCamera(manager: CameraManager): CameraProperties?
{
val cameraIdList = manager.cameraIdList
if (cameraIdList.isEmpty())
{
return null
}
for (cameraId in cameraIdList)
{
val characteristics = manager.getCameraCharacteristics(cameraId)
val facing = characteristics.get(CameraCharacteristics.LENS_FACING)
if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK)
{
val minPreviewSize = getMinPreviewSize(characteristics)
if (minPreviewSize == null)
{
Log.e(TAG, "chooseCamera() Cannot determine the preview size")
return null
}
Log.d(TAG, "chooseCamera() chosen camera id: $cameraId, preview size: $minPreviewSize")
return CameraProperties(cameraId,
minPreviewSize,
characteristics)
}
}
return null
}
private fun getMinPreviewSize(characteristics: CameraCharacteristics): Size?
{
val map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
if (map == null)
{
Log.e(TAG, "getMinPreviewSize() Map is empty")
return null
}
return map.getOutputSizes(Constants.Camera.PREVIEW_IMAGE_FORMAT)?.minBy { it.width * it.height }
}
PreviewFrameHandler and CameraCurrentParamsReceiver (previewCaptureCallback variable)
private class PreviewFrameHandler(private val parent: Camera2) : ImageReader.OnImageAvailableListener, Handler.Callback
{
val backgroundHandler: Handler
private val backgroundHandlerThread: HandlerThread = HandlerThread("Camera2.PreviewFrame.HandlerThread")
private val mainHandler: Handler = Handler(Looper.getMainLooper(), this)
/**
* Main thread.
*/
init
{
backgroundHandlerThread.start()
backgroundHandler = Handler(backgroundHandlerThread.looper)
}
fun shutDown()
{
backgroundHandlerThread.quit()
mainHandler.removeMessages(0)
}
override fun handleMessage(msg: Message?): Boolean
{
msg ?: return false
parent.cameraFrameListener.onFrame(msg.obj as RGBImage)
return true
}
/**
* Background thread.
*/
private val relativeTimestamp = RelativeTimestamp()
override fun onImageAvailable(reader: ImageReader)
{
var image: Image? = null
try
{
image = reader.acquireNextImage()
image ?: return
val rgbImage = parent.bufferedImageConverter?.convertYUV420spToRGB(image, relativeTimestamp.updateAndGetSeconds(image.timestamp))
rgbImage ?: return
mainHandler.sendMessage(mainHandler.obtainMessage(0, rgbImage))
}
catch (ex: Exception)
{
Log.e(TAG, "onImageAvailable()", ex)
}
finally
{
image?.close()
}
}
private class RelativeTimestamp
{
private var initialNanos = 0L
fun updateAndGetSeconds(currentNanos: Long): Double
{
if (initialNanos == 0L)
{
initialNanos = currentNanos
}
return nanosToSeconds(currentNanos - initialNanos)
}
}
}
/**
* Class used to read current camera params.
*/
private class CameraCurrentParamsReceiver(private val parent: Camera2) : CameraCaptureSession.CaptureCallback()
{
private var isExposureTimeExceptionLogged = false
private var isIsoExceptionLogged = false
override fun onCaptureSequenceAborted(session: CameraCaptureSession, sequenceId: Int)
{
}
override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult)
{
try
{
val exposureTimeNanos = result.get(CaptureResult.SENSOR_EXPOSURE_TIME)
if (exposureTimeNanos != null)
{
parent.currentExposureTimeNanos = exposureTimeNanos
}
}
catch (ex: IllegalArgumentException)
{
if (!isExposureTimeExceptionLogged)
{
isExposureTimeExceptionLogged = true
}
}
try
{
val iso = result.get(CaptureResult.SENSOR_SENSITIVITY)
if (iso != null)
{
parent.currentIso = iso
}
}
catch (ex: IllegalArgumentException)
{
if (!isIsoExceptionLogged)
{
Log.i(TAG, "Cannot get current SENSOR_SENSITIVITY, exception: " + ex.message)
isIsoExceptionLogged = true
}
}
}
override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure)
{
}
override fun onCaptureSequenceCompleted(session: CameraCaptureSession, sequenceId: Int, frameNumber: Long)
{
}
override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long)
{
}
override fun onCaptureProgressed(session: CameraCaptureSession, request: CaptureRequest, partialResult: CaptureResult)
{
}
override fun onCaptureBufferLost(session: CameraCaptureSession, request: CaptureRequest, target: Surface, frameNumber: Long)
{
}
}
As I understand something is wrong with preview size but I cannot find correct way how to get this value and the strangest thing is that this problem appears only on this Xiaomi device. Any thoughts?
176x144 is sometimes a problematic resolution for devices. It's really only listed by camera devices because it's sometimes required for recording videos for MMS (multimedia text message) messages. These videos, frankly, look awful, but it's still frequently a requirement by cellular carriers that they work.
But on modern devices with 12 - 50 MP cameras, the camera hardware actually struggles to scale images down to 176x144 from the sensor full resolution (> 20x downscale!), so sometimes certain combinations of sizes can cause problems.
I'd generally recommend not using preview resolutions below 320x240, to minimize issues, and definitely not mix a 176x144 preview with a high-resolution still capture.
I'm working with Camera X for the first time and I can't find a way to check if a device has a front or rear camera in runtime...
I only need to use the preview I'm not capturing images so i can't use a button for it..
private var lensFacing = CameraX.LensFacing.FRONT
val viewFinderConfig = PreviewConfig.Builder().apply {
setLensFacing(lensFacing)
setTargetAspectRatio(screenAspectRatio)
setTargetRotation(viewFinder.display.rotation)
}.build()
How can I make sure that the app won't crash if the user device has no Front camera?
Thanks in advance!
Check if the device supports at least one camera with the specified lens facing:
version 1.0.0-alpha06:
val hasFrontCamera = CameraX.hasCameraWithLensFacing(CameraX.LensFacing.FRONT)
EDIT :
version >= 1.0.0-alpha07:
From https://developer.android.com/jetpack/androidx/releases/camera:
hasCamera() previously provided by the CameraX class call are now
available via the ProcessCameraProvider
override fun onCreate(savedInstanceState: Bundle?) {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
}
cameraProviderFuture.addListener(Runnable {
val cameraProvider = cameraProviderFuture.get()
try {
var hasCamera = cameraProvider.hasCamera(CameraSelector.DEFAULT_FRONT_CAMERA)
} catch (e: CameraInfoUnavailableException) {
e.printStackTrace()
}
}, ContextCompat.getMainExecutor(this))
I've followed the steps here to get CameraX setup, and now I am trying to get a front facing camera button working.
Here is my set up code:
private lateinit var preview: Preview
private fun startCamera() {
// Create configuration object for the viewfinder use case
val previewConfig = PreviewConfig.Builder().apply {
setLensFacing(CameraX.LensFacing.BACK)
}.build()
// Build the viewfinder use case
preview = Preview(previewConfig)
// Every time the viewfinder is updated, recompute layout
preview.setOnPreviewOutputUpdateListener {
// To update the SurfaceTexture, we have to remove it and re-add it
val parent = viewFinder.parent as ViewGroup
parent.removeView(viewFinder)
parent.addView(viewFinder, 0)
viewFinder.surfaceTexture = it.surfaceTexture
updateTransform()
}
// Bind use cases to lifecycle
CameraX.bindToLifecycle(this, preview)
}
When a user clicks the "switch" button I re-configure the preview to use the front camera, then reinitialize the Preview.
private fun initSwitchButton(view: View) {
switchButton = view.findViewById(R.id.switch_button)
switchButton.setOnClickListener {
val previewConfig = PreviewConfig.Builder().apply { setLensFacing(CameraX.LensFacing.FRONT) }.build()
preview = Preview(previewConfig)
}
}
However, this doesn't switch to the front camera. What am I missing?
Since 2021, an update to CameraX has rendered CameraX.LensFacing unusable. Use CameraSelector instead.
private CameraSelector lensFacing = CameraSelector.DEFAULT_FRONT_CAMERA;
private void flipCamera() {
if (lensFacing == CameraSelector.DEFAULT_FRONT_CAMERA) lensFacing = CameraSelector.DEFAULT_BACK_CAMERA;
else if (lensFacing == CameraSelector.DEFAULT_BACK_CAMERA) lensFacing = CameraSelector.DEFAULT_FRONT_CAMERA;
startCamera();
}
private void startCamera() {
ListenableFuture<ProcessCameraProvider> cameraFuture = ProcessCameraProvider.getInstance(requireContext());
cameraFuture.addListener(() -> {
imageCapture = new ImageCapture.Builder()
.setTargetRotation(cameraPreview.getDisplay().getRotation())
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build();
videoCapture = new VideoCapture.Builder().build();
try {
ProcessCameraProvider processCameraProvider = cameraFuture.get();
Preview preview = new Preview.Builder().build();
preview.setSurfaceProvider(cameraPreview.getSurfaceProvider());
processCameraProvider.unbindAll();
// lensFacing is used here
processCameraProvider.bindToLifecycle(getViewLifecycleOwner(), lensFacing, imageCapture, videoCapture, preview);
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}, ContextCompat.getMainExecutor(requireContext()));
}
It looks like the recommended way to achieve this is to store the LensFacing position as an instance variable and then call bindToLifecycle() to switch the camera.
Here is a code snippet that worked for me:
private var lensFacing = CameraX.LensFacing.BACK
private var imageCapture: ImageCapture? = null
#SuppressLint("RestrictedApi")
private fun startCamera() {
bindCameraUseCases()
// Listener for button used to switch cameras
switchButton = view.findViewById(R.id.switch_button)
switchButton.setOnClickListener {
lensFacing = if (CameraX.LensFacing.FRONT == lensFacing) {
CameraX.LensFacing.BACK
} else {
CameraX.LensFacing.FRONT
}
try {
// Only bind use cases if we can query a camera with this orientation
CameraX.getCameraWithLensFacing(lensFacing)
bindCameraUseCases()
} catch (exc: Exception) {
// Do nothing
}
}
}
private fun bindCameraUseCases() {
// Make sure that there are no other use cases bound to CameraX
CameraX.unbindAll()
val previewConfig = PreviewConfig.Builder().apply {
setLensFacing(lensFacing)
}.build()
val preview = Preview(previewConfig)
val imageCaptureConfig = ImageCaptureConfig.Builder().apply {
setLensFacing(lensFacing)
}.build()
imageCapture = ImageCapture(imageCaptureConfig)
// Apply declared configs to CameraX using the same lifecycle owner
CameraX.bindToLifecycle(this, preview, imageCapture)
}
private LensFacing lensFacing = CameraX.LensFacing.BACK;
private ImageCapture imageCapture = null;
private Button switchButton;
#SuppressLint("RestrictedApi")
private void startCamera() {
bindCameraUseCases();
// Listener for button used to switch cameras
switchButton = view.findViewById(R.id.switch_button);
switchButton.setOnClickListener(v -> {
lensFacing = lensFacing == LensFacing.FRONT ? LensFacing.BACK : LensFacing.FRONT;
try {
// Only bind use cases if we can query a camera with this orientation
CameraX.getCameraWithLensFacing(lensFacing);
bindCameraUseCases();
} catch (CameraInfoUnavailableException e) {
// Do nothing
}
});
}
private void bindCameraUseCases() {
// Make sure that there are no other use cases bound to CameraX
CameraX.unbindAll();
PreviewConfig previewConfig = new PreviewConfig.Builder().
setLensFacing(lensFacing)
.build();
Preview preview = new Preview(previewConfig);
ImageCaptureConfig imageCaptureConfig = new ImageCaptureConfig.Builder()
.setLensFacing(lensFacing)
.build();
imageCapture = new ImageCapture(imageCaptureConfig);
// Apply declared configs to CameraX using the same lifecycle owner
CameraX.bindToLifecycle(this, preview, imageCapture);
}
Java version
Here is how i did mine
private var defaultCameraFacing = CameraSelector.DEFAULT_BACK_CAMERA
btnFlipCamera.setOnClickListener {
Log.d("CameraFacing", defaultCameraFacing.toString())
defaultCameraFacing = if(defaultCameraFacing == CameraSelector.DEFAULT_FRONT_CAMERA){
CameraSelector.DEFAULT_BACK_CAMERA
}else{
CameraSelector.DEFAULT_FRONT_CAMERA
}
try {
// Only bind use cases if we can query a camera with this orientation
startCamera(defaultCameraFacing)
} catch (exc: Exception) {
// Do nothing
}
}
private fun startCamera(defaultCameraFacing: CameraSelector) {
llPictureCaptured.visibility = View.GONE
tvLocationLabel.visibility= View.GONE
pgLoadingLocation.visibility = View.GONE
openCamera.visibility = View.GONE
llCameraControl.visibility = View.VISIBLE
viewFinder.visibility = View.VISIBLE
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
val preview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(viewFinder.surfaceProvider)
}
imageCapture = ImageCapture.Builder()
.build()
//set image analysis, i.e luminosity analysis
val imageAnalyzer = ImageAnalysis.Builder()
.build()
.also {
it.setAnalyzer(cameraExecutor, LuminosityAnalyzer { luma ->
Log.d(TAG, "Average luminosity: $luma")
})
}
// Set camera facing
val cameraSelector = defaultCameraFacing
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture, imageAnalyzer)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}