I've followed the steps here to get CameraX setup, and now I am trying to get a front facing camera button working.
Here is my set up code:
private lateinit var preview: Preview
private fun startCamera() {
// Create configuration object for the viewfinder use case
val previewConfig = PreviewConfig.Builder().apply {
setLensFacing(CameraX.LensFacing.BACK)
}.build()
// Build the viewfinder use case
preview = Preview(previewConfig)
// Every time the viewfinder is updated, recompute layout
preview.setOnPreviewOutputUpdateListener {
// To update the SurfaceTexture, we have to remove it and re-add it
val parent = viewFinder.parent as ViewGroup
parent.removeView(viewFinder)
parent.addView(viewFinder, 0)
viewFinder.surfaceTexture = it.surfaceTexture
updateTransform()
}
// Bind use cases to lifecycle
CameraX.bindToLifecycle(this, preview)
}
When a user clicks the "switch" button I re-configure the preview to use the front camera, then reinitialize the Preview.
private fun initSwitchButton(view: View) {
switchButton = view.findViewById(R.id.switch_button)
switchButton.setOnClickListener {
val previewConfig = PreviewConfig.Builder().apply { setLensFacing(CameraX.LensFacing.FRONT) }.build()
preview = Preview(previewConfig)
}
}
However, this doesn't switch to the front camera. What am I missing?
Since 2021, an update to CameraX has rendered CameraX.LensFacing unusable. Use CameraSelector instead.
private CameraSelector lensFacing = CameraSelector.DEFAULT_FRONT_CAMERA;
private void flipCamera() {
if (lensFacing == CameraSelector.DEFAULT_FRONT_CAMERA) lensFacing = CameraSelector.DEFAULT_BACK_CAMERA;
else if (lensFacing == CameraSelector.DEFAULT_BACK_CAMERA) lensFacing = CameraSelector.DEFAULT_FRONT_CAMERA;
startCamera();
}
private void startCamera() {
ListenableFuture<ProcessCameraProvider> cameraFuture = ProcessCameraProvider.getInstance(requireContext());
cameraFuture.addListener(() -> {
imageCapture = new ImageCapture.Builder()
.setTargetRotation(cameraPreview.getDisplay().getRotation())
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build();
videoCapture = new VideoCapture.Builder().build();
try {
ProcessCameraProvider processCameraProvider = cameraFuture.get();
Preview preview = new Preview.Builder().build();
preview.setSurfaceProvider(cameraPreview.getSurfaceProvider());
processCameraProvider.unbindAll();
// lensFacing is used here
processCameraProvider.bindToLifecycle(getViewLifecycleOwner(), lensFacing, imageCapture, videoCapture, preview);
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}, ContextCompat.getMainExecutor(requireContext()));
}
It looks like the recommended way to achieve this is to store the LensFacing position as an instance variable and then call bindToLifecycle() to switch the camera.
Here is a code snippet that worked for me:
private var lensFacing = CameraX.LensFacing.BACK
private var imageCapture: ImageCapture? = null
#SuppressLint("RestrictedApi")
private fun startCamera() {
bindCameraUseCases()
// Listener for button used to switch cameras
switchButton = view.findViewById(R.id.switch_button)
switchButton.setOnClickListener {
lensFacing = if (CameraX.LensFacing.FRONT == lensFacing) {
CameraX.LensFacing.BACK
} else {
CameraX.LensFacing.FRONT
}
try {
// Only bind use cases if we can query a camera with this orientation
CameraX.getCameraWithLensFacing(lensFacing)
bindCameraUseCases()
} catch (exc: Exception) {
// Do nothing
}
}
}
private fun bindCameraUseCases() {
// Make sure that there are no other use cases bound to CameraX
CameraX.unbindAll()
val previewConfig = PreviewConfig.Builder().apply {
setLensFacing(lensFacing)
}.build()
val preview = Preview(previewConfig)
val imageCaptureConfig = ImageCaptureConfig.Builder().apply {
setLensFacing(lensFacing)
}.build()
imageCapture = ImageCapture(imageCaptureConfig)
// Apply declared configs to CameraX using the same lifecycle owner
CameraX.bindToLifecycle(this, preview, imageCapture)
}
private LensFacing lensFacing = CameraX.LensFacing.BACK;
private ImageCapture imageCapture = null;
private Button switchButton;
#SuppressLint("RestrictedApi")
private void startCamera() {
bindCameraUseCases();
// Listener for button used to switch cameras
switchButton = view.findViewById(R.id.switch_button);
switchButton.setOnClickListener(v -> {
lensFacing = lensFacing == LensFacing.FRONT ? LensFacing.BACK : LensFacing.FRONT;
try {
// Only bind use cases if we can query a camera with this orientation
CameraX.getCameraWithLensFacing(lensFacing);
bindCameraUseCases();
} catch (CameraInfoUnavailableException e) {
// Do nothing
}
});
}
private void bindCameraUseCases() {
// Make sure that there are no other use cases bound to CameraX
CameraX.unbindAll();
PreviewConfig previewConfig = new PreviewConfig.Builder().
setLensFacing(lensFacing)
.build();
Preview preview = new Preview(previewConfig);
ImageCaptureConfig imageCaptureConfig = new ImageCaptureConfig.Builder()
.setLensFacing(lensFacing)
.build();
imageCapture = new ImageCapture(imageCaptureConfig);
// Apply declared configs to CameraX using the same lifecycle owner
CameraX.bindToLifecycle(this, preview, imageCapture);
}
Java version
Here is how i did mine
private var defaultCameraFacing = CameraSelector.DEFAULT_BACK_CAMERA
btnFlipCamera.setOnClickListener {
Log.d("CameraFacing", defaultCameraFacing.toString())
defaultCameraFacing = if(defaultCameraFacing == CameraSelector.DEFAULT_FRONT_CAMERA){
CameraSelector.DEFAULT_BACK_CAMERA
}else{
CameraSelector.DEFAULT_FRONT_CAMERA
}
try {
// Only bind use cases if we can query a camera with this orientation
startCamera(defaultCameraFacing)
} catch (exc: Exception) {
// Do nothing
}
}
private fun startCamera(defaultCameraFacing: CameraSelector) {
llPictureCaptured.visibility = View.GONE
tvLocationLabel.visibility= View.GONE
pgLoadingLocation.visibility = View.GONE
openCamera.visibility = View.GONE
llCameraControl.visibility = View.VISIBLE
viewFinder.visibility = View.VISIBLE
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
val preview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(viewFinder.surfaceProvider)
}
imageCapture = ImageCapture.Builder()
.build()
//set image analysis, i.e luminosity analysis
val imageAnalyzer = ImageAnalysis.Builder()
.build()
.also {
it.setAnalyzer(cameraExecutor, LuminosityAnalyzer { luma ->
Log.d(TAG, "Average luminosity: $luma")
})
}
// Set camera facing
val cameraSelector = defaultCameraFacing
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture, imageAnalyzer)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}
Related
I am using two capture session one is previewSession() and second is recordingSession()
private fun previewSession() {
val surfaceTexture = textureView.surfaceTexture
MAX_PREVIEW_HEIGHT = textureView.height
MAX_PREVIEW_WIDTH = textureView.width
surfaceTexture?.setDefaultBufferSize(MAX_PREVIEW_HEIGHT, MAX_PREVIEW_WIDTH)
val surface = Surface(surfaceTexture)
captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
captureRequestBuilder.addTarget(surface)
mCameraDevice.createCaptureSession(
listOf(surface),
object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
if (session != null) {
captureSession = session
captureSession.setRepeatingRequest(
captureRequestBuilder.build(),
null,
null
)
}
}
override fun onConfigureFailed(session: CameraCaptureSession) {
showLog(TAG, "Failed to create CameraCaptureSession")
}
},
mBackgroundHandler
)
}
And the recordingSesion() is like:
private fun recordSession() {
setUpMediaRecorder()
val surfaceTexture = textureView.surfaceTexture
MAX_PREVIEW_HEIGHT = textureView.height
MAX_PREVIEW_WIDTH = textureView.width
surfaceTexture?.setDefaultBufferSize(MAX_PREVIEW_HEIGHT, MAX_PREVIEW_WIDTH)
val textureSurface = Surface(surfaceTexture)
val recordSurface = mediaRecorder.surface
captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD)
captureRequestBuilder.addTarget(textureSurface)
captureRequestBuilder.addTarget(recordSurface)
val surfaces = ArrayList<Surface>().apply {
add(textureSurface)
add(recordSurface)
}
mCameraDevice.createCaptureSession(
surfaces,
object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
captureSession = session
captureSession.setRepeatingRequest(
captureRequestBuilder.build(),
null,
null
)
isRecording = true
mediaRecorder.start()
}
override fun onConfigureFailed(session: CameraCaptureSession) {
showLog(TAG, "Failed to create CameraRecordSession!")
}
},
mBackgroundHandler
)
}
And the toggleFlash method to turn on/off the Flash.
fun toggleFlashState(view: View) {
if (isFlashOn) {
captureRequestBuilder.set(
CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_OFF
)
captureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null)
isFlashOn = false
} else {
captureRequestBuilder.set(
CaptureRequest.FLASH_MODE,
CameraMetadata.FLASH_MODE_TORCH
)
captureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null)
isFlashOn = true
}
}
The problem is that when I am turning on the flash and starting the recording, the flash is getting turn off. Can anyone help me with how can I turn on the flash permanently so that the flash will not turn off automatically? It should turn off and on by invoking the toggleFlash() method only.
The problem looks to be that you're overwriting your old captureRequestBuilder whenever you call recordSession (or previewSession). So whatever changes the last call to toggleFlashState did to the captureRequestBuilder will be lost at that point.
So in recordSession and previewSession, you need to look at the value of isFlashOn, and set the FLASH_MODE to the right value after you create the new request builder.
I am working with ML face detector: https://developers.google.com/ml-kit/vision/face-detection/android#kotlin_2
And I take an image from Camera2 and process it. But I constantly have the error "FaceDetector was not released with FaceDetector.release()"
More details:
W/FaceDetector: FaceDetector was not released with FaceDetector.release()
E/BufferQueueProducer: [ImageReader-960x1280f23m2-7166-0](id:1bfe00000000,api:4,p:386,c:7166) dequeueBuffer: BufferQueue has been abandoned
I had followed this tutorial in order to get the camera2: https://medium.com/#tylerwalker/integrating-camera2-api-on-android-feat-kotlin-4a4e65dc593f
Here is the code I tried to do for ML:
val realTimeOps = FaceDetectorOptions.Builder()
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
.build()
val detector = FaceDetection.getClient(realTimeOps)
imageReader.setOnImageAvailableListener({
imageReader.acquireLatestImage()?.let { image ->
val mlImage = InputImage.fromMediaImage(image, 0) // TODO change image for calculation
val result = detector.process(mlImage)
.addOnSuccessListener {faces ->
Log.d("photo", "Face found!")
}
.addOnFailureListener { e ->
Log.d("photo", "Error: $e")
}
image.close()
}
}, Handler { true })
And also, my "detector" val has no function release :'(
I hope that someone can help me with that :)
I finally figured out how did this error happen!
There must be only one instance of the detector. And the error was in another part of my code (I am new to android and first time working with camera2):
cameraCharacteristics[CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP]?.let { streamConfigurationMap ->
streamConfigurationMap.getOutputSizes(ImageFormat.YUV_420_888)
?.let { yuvSizes ->
val previewSize = yuvSizes.last()
val displayRotation = windowManager.defaultDisplay.rotation
val swappedDimensions = areDimensionsSwapped(displayRotation, cameraCharacteristics)
val rotatedPreviewWidth = if (swappedDimensions) previewSize.height else previewSize.width
val rotatedPreviewHeight = if (swappedDimensions) previewSize.width else previewSize.height
surfaceView.holder.setFixedSize(rotatedPreviewWidth, rotatedPreviewHeight)
val imageReader = ImageReader.newInstance(rotatedPreviewWidth, rotatedPreviewHeight,
ImageFormat.YUV_420_888, 2)
val realTimeOps = FaceDetectorOptions.Builder()
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
.build()
val detector: FaceDetector = FaceDetection.getClient(realTimeOps)
imageReader.setOnImageAvailableListener({
imageReader.acquireLatestImage()?.let { image ->
val mlImage = InputImage.fromMediaImage(image, getRotationCompensation(cameraDevice.id, getInstance(), true))
val result = detector.process(mlImage)
.addOnSuccessListener {faces ->
if (faces.size > 0)
Log.d("photo", "Face found!")
else
Log.d("photo", "No face have been found")
}
.addOnFailureListener { e ->
Log.d("photo", "Error: $e")
}
.addOnCompleteListener {
image.close()
}
}
}, Handler { true })
val previewSurface = surfaceView.holder.surface
val recordingSurface = imageReader.surface
val captureCallback = object : CameraCaptureSession.StateCallback() {
override fun onConfigureFailed(session: CameraCaptureSession) {
}
override fun onConfigured(session: CameraCaptureSession) {
val previewRequestBuilder = cameraDevice.createCaptureRequest(
TEMPLATE_PREVIEW
).apply {
addTarget(previewSurface)
addTarget(recordingSurface)
}
session.setRepeatingRequest(
previewRequestBuilder.build(),
object: CameraCaptureSession.CaptureCallback() {},
Handler { true }
)
}
}
cameraDevice.createCaptureSession(mutableListOf(previewSurface, recordingSurface), captureCallback, Handler {true})
}
I put the creation of the FaceDetector inside of the cameraCharacteristics, but this function was called everytime an image was captured. I needed to put the creation of the FaceDector outside of this one (so obviously since he want only one instance, google is yelling at me).
Now I got it at the begining of the listener "onOpened" of the CameraDevice.StateCallback().
I think the best would be to put the detector at null as a private attribute of the class, then load it in the listener of onOpened() and close it at the listener of onDisconnected().
I'm trying to explore cameraX beta version.
I'm stuck in my implementation.
imageCapture.takePicture() imageCapture is null.
// Bind the CameraProvider to the LifeCycleOwner
val cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build()
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
// CameraProvider
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// ImageCapture
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build()
// Must unbind the use-cases before rebinding them
cameraProvider.unbindAll()
try {
// A variable number of use-cases can be passed here -
// camera provides access to CameraControl & CameraInfo
camera = cameraProvider.bindToLifecycle(
this, cameraSelector, imageCapture)
} catch(exc: Exception) {
Log.e("TAG", "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
// Create output file to hold the image
photoFile = createFile(externalMediaDirs.first(), FILENAME, PHOTO_EXTENSION)
// Setup image capture metadata
val metadata = Metadata().apply {
// Mirror image when using the front camera
isReversedHorizontal = lensFacing == CameraSelector.LENS_FACING_FRONT
}
// Create output options object which contains file + metadata
outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile)
.setMetadata(metadata)
.build()
// Setup image capture listener which is triggered after photo has been taken
imageCapture?.takePicture(
outputOptions, cameraExecutor, object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exc.message}", exc)
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val savedUri = output.savedUri ?: Uri.fromFile(photoFile)
Log.d(TAG, "Photo capture succeeded: $savedUri")
}
})
}
(I don't use onClickListener)
My function is called by a service.
If I remove cameraProviderFuture.addListener(Runnable I get Not bound to a valid Camera
I use camerax beta version
The ImageCapture use case doesn't currently work on its own. It has to be used in combination with at least a Preview or ImageAnalysis use case. This might change in future versions of CameraX. For now, you can check the documentation on supported use case combinations.
A simple fix to your problem would be to add an ImageAnalysis use case, its Analyzer can just immediately close the images it receives.
val imageAnalysis = ImageAnalysis.Builder()
.build()
.apply {
setAnalyzer(executor, ImageAnalysis.Analyzer { image ->
image.close()
})
}
// Then bind both the imageAnalysis and ImageCapture
cameraProvider.bindToLifecycle(this, cameraSelector, imageCapture, imageAnalysis)
I try to use new CameraX api and I got this error: When I capture the image this image stored with wrong rotation. For example I capture in portrait orientation but result image in landscape orientation.
There is my code:
private fun startCamera() {
val previewConfig = PreviewConfig.Builder().apply {
setTargetResolution(Size(textureView.width, textureView.height))
setTargetRotation(textureView.display.rotation)
}.build()
val imageCaptureConfig = ImageCaptureConfig.Builder().apply {
setCaptureMode(CaptureMode.MIN_LATENCY)
setTargetAspectRatio(RATIO_4_3)
setTargetRotation(textureView.display.rotation)
}.build()
imageCapture = ImageCapture(imageCaptureConfig)
val preview = Preview(previewConfig)
preview.setOnPreviewOutputUpdateListener { previewOutput ->
removeView(textureView)
addViewMatchParent(textureView, position = 0)
textureView.surfaceTexture = previewOutput.surfaceTexture
textureView.updateTransformForCameraFinderView()
}
(context as? LifecycleOwner)?.let { lifecycleOwner ->
CameraX.bindToLifecycle(lifecycleOwner, preview, imageCapture)
}
}
private fun capturePhoto() {
tempImageFile = generateTmpFile(false)
val executor = Executor { it.run() }
imageCapture.takePicture(tempImageFile!!, executor, object : OnImageSavedListener {
override fun onError(error: ImageCaptureError, message: String, exc: Throwable?) {
exc?.printStackTrace()
}
override fun onImageSaved(photoFile: File) {
post {
// load image into ImageView by Glide
showCapturedPhotoPreview(photoFile)
}
}
})
}
Please give me advise how can I fix it?
P.S. I tried to find solution so don't copy-paste first looking like something similar)
Update: I tried to do my CameraView like in this sample but in their case it works, in my - no)
Try it:
val imageCaptureConfig = ImageCaptureConfig.Builder().apply {
setCaptureMode(CaptureMode.MIN_LATENCY)
setTargetAspectRatio(RATIO_4_3)
// play with this line!
setTargetRotation(Surface.ROTATION_0)
setTargetRotation(textureView.display.rotation)
}.build()
I select line that fixes my problem again:
setTargetRotation(Surface.ROTATION_0)
I am trying to implement face detection using Firebase MLKit and CameraX ImageAnalysis. It works fine when using back camera, but when i tried with front camera, it detected nothing:
val config = PreviewConfig.Builder()
.setLensFacing(CameraX.LensFacing.FRONT)
.build()
val previewUseCase = Preview(config)
previewUseCase.setOnPreviewOutputUpdateListener { previewOutput ->
viewFinder.post {
removeView(viewFinder)
addView(viewFinder, 0)
viewFinder.surfaceTexture = previewOutput.surfaceTexture
updateTransform(previewOutput)
}
}
val highAccuracyOpts = FirebaseVisionFaceDetectorOptions.Builder()
.setPerformanceMode(FirebaseVisionFaceDetectorOptions.ACCURATE)
.build()
val detector = FirebaseVision.getInstance().getVisionFaceDetector(highAccuracyOpts)
val imageAnalysisConfig = ImageAnalysisConfig.Builder()
.setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
.build()
val imageAnalysis = ImageAnalysis(imageAnalysisConfig).apply {
setAnalyzer(
Executors.newSingleThreadExecutor(),
ImageAnalysis.Analyzer { image, rotationDegrees ->
if (image.image != null && isBusy.compareAndSet(false, true)) {
val visionImage = FirebaseVisionImage.fromMediaImage(image.image!!, degreesToFirebaseRotation(rotationDegrees))
detector.detectInImage(visionImage)
.addOnSuccessListener { faces ->
// faces.size always zero when using front camera
Timber.d("${faces.size}")
isBusy.set(false)
}
.addOnFailureListener { error ->
Timber.d("$error")
}
}
})
}
CameraX.bindToLifecycle(lifecycleOwner, previewUseCase, imageAnalysis)
I tested on Nokia 8.1 with Android 10. I tried https://github.com/firebase/quickstart-android/tree/master/mlkit which does not use CameraX and it works fine with front camera.
Solved it by setting lensfacing for ImageAnalysis to CameraX.LensFacing.FRONT:
val imageAnalysisConfig = ImageAnalysisConfig.Builder()
.setLensFacing(CameraX.LensFacing.FRONT)
.setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
.build()