i am exciting to use CameraX on my app.,
my reference from google officialy CameraX on github,
but something wrong., please help me.,
this my code to set aspect ratio
private fun aspectRatio(width: Int, height: Int): Int {
val previewRatio = max(width, height).toDouble() / min(width, height)
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
return AspectRatio.RATIO_4_3
}
return AspectRatio.RATIO_16_9
}
And this my preview code
#SuppressLint("UnsafeExperimentalUsageError")private fun startCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
viewFinder=binding.surfaceView
cameraProviderFuture.addListener({
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
val metrics = DisplayMetrics().also { viewFinder.display.getRealMetrics(it) }
Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels)
Log.d(TAG, "Preview aspect ratio: $screenAspectRatio")
Size(metrics.widthPixels, metrics.heightPixels)
val rotation = viewFinder.display.rotation
// Preview
val preview = Preview.Builder()
.setTargetRotation(rotation)
.setTargetAspectRatio(screenAspectRatio)
.build()
imageCapture = if (flashStatus) {
ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY)
.setTargetRotation(rotationimage)
.setTargetAspectRatio(screenAspectRatio)
.build()
} else {
ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY)
.setTargetRotation(rotationimage)
.build()
}
// Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
// Unbind use cases before rebinding
cameraProvider.unbindAll()
viewFinder.scaleType=PreviewView.ScaleType.FILL_CENTER
try {
// Bind use cases to camera
val cam = cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture)
if (!flashStatus) {
if (cam.cameraInfo.hasFlashUnit()) {
cam.cameraControl.enableTorch(true)
}
} else {
cam.cameraControl.enableTorch(false)
}
// Attach the viewfinder's surface provider to preview use case
preview.setSurfaceProvider(viewFinder.surfaceProvider)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}
and this output preview
output preview is stretched
what is wrong on my code?
thank you for your help me folks.,
UPDATE
when my device on dark mode, the preview is normal.
preview normal on dark mode
how to fixed in normal mode??
Related
This is my stripped down sourcecode for barcode scanning
build.gradle
dependencies {
.....
// MLKit Dependencies
implementation 'com.google.android.gms:play-services-vision:20.1.3'
implementation 'com.google.mlkit:barcode-scanning:17.0.2'
def camerax_version = "1.1.0-beta01"
implementation "androidx.camera:camera-core:${camerax_version}"
implementation "androidx.camera:camera-camera2:${camerax_version}"
implementation "androidx.camera:camera-lifecycle:${camerax_version}"
implementation "androidx.camera:camera-video:${camerax_version}"
......
}
ScanCameraFragment.kt
class ScanCameraFragment : BaseFragment() {
private lateinit var binding: FragmentScanCameraBinding
private lateinit var cameraExecutor: ExecutorService
//region Lifecycle Methods
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
binding = FragmentScanCameraBinding.inflate(inflater, container, false)
cameraExecutor = Executors.newSingleThreadExecutor()
startCamera()
return binding.root
}
override fun onDestroyView() {
super.onDestroyView()
cameraExecutor.shutdown()
}
companion object {
fun newInstance() = ScanCameraFragment().apply {}
}
private fun startCamera() {
context?.let { context ->
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
cameraProviderFuture.addListener({
val cameraProvider = cameraProviderFuture.get()
// Preview
val preview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(binding.previewView.surfaceProvider)
}
// Image analyzer
val imageAnalyzer = ImageAnalysis.Builder()
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build()
.also {
it.setAnalyzer(cameraExecutor,
QrCodeAnalyzer(context, binding.barcodeBoxView,
binding.previewView.width.toFloat(),
binding.previewView.height.toFloat()
)
)
}
// Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
var camera = cameraProvider.bindToLifecycle(this, cameraSelector,
preview, imageAnalyzer)
} catch (exc: Exception) {
exc.printStackTrace()
}
}, ContextCompat.getMainExecutor(context))
}
}
}
QRCodeAnalyzer.kt
class QrCodeAnalyzer(private val context: Context,
private val barcodeBoxView: BarcodeBoxView, private val previewViewWidth: Float,
private val previewViewHeight: Float) : ImageAnalysis.Analyzer {
private var scaleX = 1f
private var scaleY = 1f
private fun translateX(x: Float) = x * scaleX
private fun translateY(y: Float) = y * scaleY
private fun adjustBoundingRect(rect: Rect) = RectF(
translateX(rect.left.toFloat()),
translateY(rect.top.toFloat()),
translateX(rect.right.toFloat()),
translateY(rect.bottom.toFloat())
)
#SuppressLint("UnsafeOptInUsageError")
override fun analyze(image: ImageProxy) {
val img = image.image
if (img != null) {
// Update scale factors
scaleX = previewViewWidth / img.height.toFloat()
scaleY = previewViewHeight / img.width.toFloat()
val inputImage = InputImage.fromMediaImage(img,
image.imageInfo.rotationDegrees)
// Process image searching for barcodes
val options = BarcodeScannerOptions.Builder()
.build()
val scanner = BarcodeScanning.getClient(options)
scanner.process(inputImage)
.addOnSuccessListener { barcodes ->
for (barcode in barcodes) {
barcode?.rawValue?.let {
if (it.trim().isNotBlank()) {
Scanner.updateBarcode(it)
barcode.boundingBox?.let { rect ->
barcodeBoxView.setRect(adjustBoundingRect(rect))
}
}
return#addOnSuccessListener
}
}
// coming here means no satisfiable barcode was found
barcodeBoxView.setRect(RectF())
}
.addOnFailureListener {
image.close()
}
.addOnFailureListener { }
}
image.close()
}
}
This code works and I am able to scan barcodes. But sometimes, the barcode detection is slow. The documentation says one way to increase performance is to limit the image resolution.
Don't capture input at the camera’s native resolution. On some
devices, capturing input at the native resolution produces extremely
large (10+ megapixels) images, which results in very poor latency with
no benefit to accuracy. Instead, only request the size from the camera
that's required for barcode detection, which is usually no more than 2
megapixels.
If scanning speed is important, you can further lower the image
capture resolution. However, bear in mind the minimum barcode size
requirements outlined above.
Unfortunately, the documentation doesn't specify how to reduce the image resolution. And some of my end users are using high end devices with powerful camera, so we assume the poor performance is because of the image size.
How can I reduce the resolution of the image to a fixed value (something like 1024 x 768) rather than the default camera resolution?
You can set it on the imageAnalyzer builder bij using
.setTargetResolution(Size)
val imageAnalysisUseCaseBuilder = ImageAnalysis.Builder()
imageAnalysisUseCaseBuilder.setTargetResolution(Size(1024, 768))
imageAnalysisUseCase = imageAnalysisUseCaseBuilder.build()
or in you case
val imageAnalyzer = ImageAnalysis.Builder()
.setTargetResolution(Size(1024, 768))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build()
.also {
it.setAnalyzer(cameraExecutor,
QrCodeAnalyzer(context, binding.barcodeBoxView,
binding.previewView.width.toFloat(),
binding.previewView.height.toFloat()
)
)
}
User HarmenH's answer correctly tells how to set the image resolution, so I am not repeating it here.
As it turns out, the performance issue on my end was not because of image resolution. It seems I was closing the imageProxy prematurely.
override fun analyze(image: ImageProxy) {
val img = image.image
if (img != null) {
// Update scale factors
scaleX = previewViewWidth / img.height.toFloat()
scaleY = previewViewHeight / img.width.toFloat()
val inputImage = InputImage.fromMediaImage(img,
image.imageInfo.rotationDegrees)
// Process image searching for barcodes
val options = BarcodeScannerOptions.Builder()
.build()
val scanner = BarcodeScanning.getClient(options)
scanner.process(inputImage)
.addOnSuccessListener { barcodes - >
for (barcode in barcodes) {
barcode?.rawValue?.let {
if (it.trim().isNotBlank()) {
Scanner.updateBarcode(it)
barcode.boundingBox?.let { rect - >
barcodeBoxView.setRect(adjustBoundingRect(rect))
}
}
return #addOnSuccessListener
}
}
// coming here means no satisfiable barcode was found
barcodeBoxView.setRect(RectF())
}
.addOnFailureListener {
image.close()
}
.addOnFailureListener {
//added this here.
image.close()
}
}
//Removed this because we don't close the
//imageProxy before analysis completes
//image.close()
}
I am creating an application which must implement its own camera.
I use the cameraX library provided by google.
I noticed that there is a difference between the quality of the image captured by my own application, and the image captured by the camera application installed on my phone.
although the 2 photos are captured with the same conditions (light, position...)
especially when I zoom the photo, the details of the image become more blurry to the image captured by my application
(in my own case, my phone is Google Pixel 5)
Please see these 2 photos to see the difference
Image by phone camera
Image by my app
And this is my code
/**
* Initialize CameraX, and prepare to bind the camera use cases
*/
private fun setupCamera()
{
val cameraProviderFuture : ListenableFuture<ProcessCameraProvider> = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
cameraProvider = cameraProviderFuture.get()
lensFacing = when
{
hasBackCamera() -> CameraSelector.LENS_FACING_BACK
hasFrontCamera() -> CameraSelector.LENS_FACING_FRONT
else -> throw IllegalStateException("Back and front camera are unavailable")
}
bindCameraUseCases()
setupCameraGestures()
}, ContextCompat.getMainExecutor(this))
}
/**
* Declare and bind preview, capture and analysis use cases.
*/
private fun bindCameraUseCases()
{
lifecycleScope.launch {
val cameraProvider : ProcessCameraProvider = cameraProvider ?: throw IllegalStateException("Camera initialization failed.")
// Try to apply extensions like HDR, NIGHT ##########################################
val extensionsManager : ExtensionsManager = ExtensionsManager.getInstanceAsync(this#ImageCaptureActivity, cameraProvider).await()
val defaultCameraSelector : CameraSelector = CameraSelector.Builder()
.requireLensFacing(lensFacing)
.build()
val finalCameraSelector : CameraSelector = if (extensionsManager.isExtensionAvailable(defaultCameraSelector, ExtensionMode.AUTO))
{
extensionsManager.getExtensionEnabledCameraSelector(defaultCameraSelector, ExtensionMode.AUTO)
}
else
{
defaultCameraSelector
}
// Get screen metrics used to setup camera for full screen resolution
val metrics : DisplayMetrics = resources.displayMetrics
val screenAspectRatio : Int = aspectRatio(metrics.widthPixels, metrics.heightPixels)
val rotation : Int = binding.cameraPreview.display.rotation
preview = Preview.Builder()
// We request aspect ratio but no resolution
.setTargetAspectRatio(screenAspectRatio)
// Set initial target rotation
.setTargetRotation(rotation)
.build()
imageCapture = ImageCapture.Builder()
// We request aspect ratio but no resolution to match preview config, but letting
// CameraX optimize for whatever specific resolution best fits our use cases
.setTargetAspectRatio(screenAspectRatio)
// Set initial target rotation, we will have to call this again if rotation changes
// during the lifecycle of this use case
.setTargetRotation(rotation)
.setCaptureMode(ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY)
.setJpegQuality(100)
.build()
imageAnalyzer = ImageAnalysis.Builder()
// We request aspect ratio but no resolution
.setTargetAspectRatio(screenAspectRatio)
.build()
imageAnalyzer?.setAnalyzer(cameraExecutor, LuminosityAnalyzer {})
// Must unbind the use-cases before rebinding them
cameraProvider.unbindAll()
try
{
// A variable number of use-cases can be passed here -
// camera provides access to CameraControl & CameraInfo
camera = cameraProvider.bindToLifecycle(this#ImageCaptureActivity, finalCameraSelector, preview, imageCapture, imageAnalyzer)
// Attach the viewfinder's surface provider to preview use case
preview?.setSurfaceProvider(binding.cameraPreview.surfaceProvider)
}
catch (exception : Exception)
{
exception.printStackTrace()
}
}
}
/**
* [androidx.camera.core.ImageAnalysisConfig] requires enum value of [androidx.camera.core.AspectRatio].
* Currently it has values of 4:3 & 16:9.
*
* Detecting the most suitable ratio for dimensions provided in #params by counting absolute
* of preview ratio to one of the provided values.
*
* #param width - preview width
* #param height - preview height
* #return suitable aspect ratio
*/
private fun aspectRatio(width : Int, height : Int) : Int
{
val previewRatio : Double = max(width, height).toDouble() / min(width, height)
return if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE))
{
AspectRatio.RATIO_4_3
}
else
{
AspectRatio.RATIO_16_9
}
}
fun captureImage()
{
if (!permissionsOk()) return
// Get a stable reference of the modifiable image capture use case
imageCapture?.let { imageCapture ->
// Create output file to hold the image
val photoFile : File = storageUtils.createFile(
baseFolder = getOutputPath(),
fileName = System.currentTimeMillis().toString(),
fileExtension = StorageUtils.PHOTO_EXTENSION)
// Setup image capture metadata
val metadata : Metadata = Metadata().also {
// Mirror image when using the front camera
it.isReversedHorizontal = lensFacing == CameraSelector.LENS_FACING_FRONT
it.location = locationManager.lastKnownLocation
}
// Create output options object which contains file + metadata
val outputOptions : ImageCapture.OutputFileOptions = ImageCapture.OutputFileOptions.Builder(photoFile)
.setMetadata(metadata)
.build()
imagesAdapter.addImage(photoFile)
// Setup image capture listener which is triggered after photo has been taken
imageCapture.takePicture(outputOptions, cameraExecutor, object : ImageCapture.OnImageSavedCallback
{
override fun onImageSaved(output : ImageCapture.OutputFileResults)
{
val savedUri : Uri = output.savedUri ?: return
StorageUtils.showInGallery(savedUri.path)
binding.list.post {
imagesAdapter.addImage(savedUri.toFile())
binding.list.smoothScrollToPosition(imagesAdapter.itemCount)
}
}
override fun onError(exception : ImageCaptureException)
{
exception.printStackTrace()
}
})
binding.cameraPreview.postDelayed({
binding.backgroundEffect.isVisible = true
binding.cameraPreview.postDelayed({
binding.backgroundEffect.isVisible = false
}, AppUtils.VERY_FAST_ANIMATION_MILLIS)
}, AppUtils.FAST_ANIMATION_MILLIS)
}
}
How can I improve the quality of my images? Is there any thing I should do? is there a special filter or algorithm?
i need your help please
if you took photo on Pixel probably using default cam app (GCam) - this app is fulfilled with quaility improvements backed up by some AI. tough task to comptetite with the biggest in quality... try to take a photo with some 3rd party like OpenCamera and compare this picture with one got by your app
You can use CameraX Extension feature to enable HDR & Low light.
this improves the image quality significantly.
My use case is to take two images at one time. first one at 2x zoom and second one at 1x zoom level. Also, I want to save images to files.
My idea of doing it was to take the first image at 2x zoom and when the image is saved set the zoom level at 1x and take the second image when the lens has zoomed to 1x zoom level.
However, when I take the first image the preview is stuck at the first image and callback from setting 1x zoom never happens.
This is how I create the capture use cases.
private void createImageCaptureUseCases() {
ImageCapture imageCapture1 = new ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build();
ImageCapture imageCapture2 = new ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build();
imageCaptureUseCases.clear();
imageCaptureUseCases.add(imageCapture1);
imageCaptureUseCases.add(imageCapture2);
This is how I first start the camera session.
ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(getContext());
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
preview = new Preview.Builder().build();
cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
Camera camera = cameraProvider.bindToLifecycle(
((LifecycleOwner) this),
cameraSelector,
preview,
imageCapture);
camera.getCameraControl().setZoomRatio(2f);
preview.setSurfaceProvider(previewView.createSurfaceProvider(camera.getCameraInfo()));
} catch (InterruptedException | ExecutionException e) {}
}, ContextCompat.getMainExecutor(getContext()));
this is how the capture images is called.
private void captureImage(ImageCapture imageCapture) {
File pictureFile = ImageUtils.createImageFile(getActivity());
ImageCapture.OutputFileOptions options = new
ImageCapture.OutputFileOptions.Builder(pictureFile).build();
final Activity activity = getActivity();
imageCapture.takePicture(options, ContextCompat.getMainExecutor(activity),
new ImageCapture.OnImageSavedCallback() {
#Override
public void onImageSaved(#NonNull ImageCapture.OutputFileResults outputFileResults){
Log.i("my tag", "image Saved: " + pictureFile.getAbsolutePath());
int index = imageCaptureUseCases.indexOf(imageCapture);
cameraProvider.unbind(imageCapture);
if (index < imageCaptureUseCases.size() - 1) {
Camera camera = cameraProvider.bindToLifecycle(
(LifecycleOwner) activity,
cameraSelector,
imageCaptureUseCases.get(index + 1));
ListenableFuture future = camera.getCameraControl().setZoomRatio(1f);
future.addListener(() -> captureImage(imageCaptureUseCases.get(index + 1)),
ContextCompat.getMainExecutor(activity));
} else {
createImageCaptureUseCases();
cameraProvider.unbindAll();
Camera camera = cameraProvider.bindToLifecycle(
(LifecycleOwner) activity,
cameraSelector,
preview,
imageCaptureUseCases.get(0));
camera.getCameraControl().setZoomRatio(2f);
}
}
#Override
public void onError(#NonNull ImageCaptureException exception) {
Log.i("my tag", "image save error: " + pictureFile.getAbsolutePath());
}
});
}
You don't need multiple ImageCapture instances to capture an image with 2 zoom ratios, you can use the same instance, ImageCapture handles taking a picture and saving it/providing it, irrelevant to parameters such as the zoom ratio.
Looking at your code sample, it seems you might not being binding a Preview use case the second time you try to capture an image (with a different zoom ratio). This would explain why your preview is getting stuck after the first image capture. Keep in mind that an ImageCapture use case cannot be bound on its own, it must be bound with at least 1 Preview or ImageAnalysis use case.
Below is a sample to capture 2 images, each with a different zoom ratio. The code contains some repetition, and is all in 1 block, so it can definitely be improved.
private fun setUpCamera() {
val mainExecutor = ContextCompat.getMainExecutor(this)
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
// Wait for the camera provider to be retrieved
val cameraProvider = cameraProviderFuture.get()
// Build your use cases
val preview = Preview.Builder().build()
val imageCapture = ImageCapture.Builder().build()
// Get a camera selector to use
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
// Bind the use cases to a lifecycle
val camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageCapture)
// Set the preview surface provider
preview.setSurfaceProvider(previewView.createSurfaceProvider(camera.cameraInfo))
// Set the zoom ratio for the first photo
val cameraControl = camera.cameraControl
cameraControl.setZoomRatio(1F)
// When the previewView is clicked, take the photos
previewView.setOnClickListener {
imageCapture.takePicture(createOutputFilesOptions(), mainExecutor, object : ImageCapture.OnImageSavedCallback {
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
// First image captured and saved successfully
Log.d(TAG, "OnImageSavedCallback.onImageSaved: Image saved with zoom ratio 1F")
// Set a new zoom ratio for the second image capture
cameraControl.setZoomRatio(2F)
// Capture the second picture with a different zoom ratio
imageCapture.takePicture(createOutputFilesOptions(), mainExecutor, object : ImageCapture.OnImageSavedCallback {
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
// Second image captured and saved successfully
Log.d(TAG, "OnImageSavedCallback.onImageSaved: Image saved with zoom ratio 2F")
}
override fun onError(exception: ImageCaptureException) {
Log.e(TAG, "OnImageSavedCallback.onError", exception)
}
})
}
override fun onError(exception: ImageCaptureException) {
Log.e(TAG, "OnImageSavedCallback.onError", exception)
}
})
}
}, mainExecutor)
}
}
I'm willing to implement zoom feature in my app with CameraX API. I followed this medium post to implement pinch to zoom and it works.
The problem is when I retrieve the captured image in onCaptureSuccess callback, The image is not zoomed.
Here is the code I use to implement zoom on Camera in onCreate():
//ZOOM
val listener = object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
override fun onScale(detector: ScaleGestureDetector): Boolean {
val zoomRatio = camera?.cameraInfo?.zoomState?.value?.zoomRatio ?: 0f
val scale = zoomRatio * detector.scaleFactor
camera?.cameraControl?.setZoomRatio(scale)
return true
}
}
scaleDetector = ScaleGestureDetector(context, listener)
And in method "bindCameraUseCases()" :
previewCamera.setOnTouchListener { _, event ->
scaleDetector.onTouchEvent(event)
}
The full method if needed :
/** Declare and bind preview, capture and analysis use cases */
fun bindCameraUseCases() {
// Get screen metrics used to setup camera for full screen resolution
val metrics = DisplayMetrics().also { previewCamera.display.getRealMetrics(it) }
Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
val rotation = previewCamera.display.rotation
// Bind the CameraProvider to the LifeCycleOwner
val cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build()
val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext())
cameraProviderFuture.addListener(Runnable {
// CameraProvider
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
preview = Preview.Builder()
.setTargetRotation(rotation)
.build()
previewCamera.preferredImplementationMode =
PreviewView.ImplementationMode.TEXTURE_VIEW // when setting to TEXTURE_VIEW, preview doesnt take full screen on back pressed
previewCamera.setOnTouchListener { _, event ->
scaleDetector.onTouchEvent(event)
}
// Default PreviewSurfaceProvider
preview?.setSurfaceProvider(previewCamera.createSurfaceProvider(camera?.cameraInfo))
val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels)
// ImageCapture
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.setTargetAspectRatio(screenAspectRatio)
.setTargetRotation(rotation)
.build()
// ImageAnalysis
imageAnalyzer = ImageAnalysis.Builder()
.setTargetAspectRatio(screenAspectRatio)
.setTargetRotation(rotation)
.build()
cameraProvider.unbindAll()
try {
camera = cameraProvider.bindToLifecycle(
this as LifecycleOwner, cameraSelector, preview, imageCapture, imageAnalyzer
)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, mainExecutor)
}
As I mentionned, zoom is working but then in onCaptureSucess, ImageProxy is not zoomed.
override fun onCaptureSuccess(image: ImageProxy) {
image.use { image ->
savedBitmap = image.imageProxyToBitmap()
///...
}
}
Here is the extension function to retrieve bitmap from imageProxy :
fun ImageProxy.imageProxyToBitmap(): Bitmap {
val buffer = this.planes[0].buffer
buffer.rewind()
val bytes = ByteArray(buffer.capacity())
buffer.get(bytes)
val bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.size)
val matrix = Matrix()
matrix.postRotate(90f)
return Bitmap.createBitmap(bitmap, 0, 0,bitmap.width,bitmap.height, matrix, true)
}
Here are my dependencies :
// CameraX core library
def camerax_version = "1.0.0-beta02"
implementation "androidx.camera:camera-core:$camerax_version"
// CameraX Camera2 extensions
implementation "androidx.camera:camera-camera2:$camerax_version"
// CameraX Lifecycle library
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// CameraX View class
implementation "androidx.camera:camera-view:1.0.0-alpha09"
Thank you for your help 🙏
not sure why this happens but we are unable to reproduce the issue.
In my test, ImageCapture always captures the image with zoom applied.
Currently I suspect this could a device issue. It could be helpful if you can provide the device name. It will also be helpful if you can verify it on other devices.
When I try to switch the camera preview from BACK to FRONT my screen getting freeze and if I minimize the screen and restart the same then camera preview work perfectly.
below is the camera code.
private fun startCamera() {
CameraX.unbindAll()
val metrics = DisplayMetrics().also { viewFinder.display.getRealMetrics(it) }
val screenSize = Size(metrics.widthPixels, metrics.heightPixels)
val screenAspectRatio = Rational(metrics.widthPixels, metrics.heightPixels)
val previewConfig = PreviewConfig.Builder().apply {
setLensFacing(lensFacing)
setTargetResolution(screenSize)
setTargetAspectRatio(screenAspectRatio)
setTargetRotation(windowManager.defaultDisplay.rotation)
setTargetRotation(viewFinder.display.rotation)
}.build()
preview = Preview(previewConfig)
preview.setOnPreviewOutputUpdateListener {
viewFinder.surfaceTexture = it.surfaceTexture
updateTransform()
}
// Create configuration object for the image capture use case
val imageCaptureConfig = ImageCaptureConfig.Builder()
.apply {
setLensFacing(lensFacing)
setTargetAspectRatio(screenAspectRatio)
setTargetRotation(viewFinder.display.rotation)
setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY)
}.build()
// Build the image capture use case and attach button click listener
imageCapture = ImageCapture(imageCaptureConfig)
//for recording the video
val videoCaptureConfig = VideoCaptureConfig.Builder().apply {
setLensFacing(lensFacing)
setTargetAspectRatio(screenAspectRatio)
setTargetRotation(viewFinder.display.rotation)
}.build()
videoCapture = VideoCapture(videoCaptureConfig)
CameraX.bindToLifecycle(this, preview, imageCapture, videoCapture)
}
and the updateTransform code is
private fun updateTransform() {
val matrix = Matrix()
// Compute the center of the view finder
val centerX = viewFinder.width / 2f
val centerY = viewFinder.height / 2f
// Correct preview output to account for display rotation
val rotationDegrees = when (viewFinder.display.rotation) {
Surface.ROTATION_0 -> 0
Surface.ROTATION_90 -> 90
Surface.ROTATION_180 -> 180
Surface.ROTATION_270 -> 270
else -> return
}
matrix.postRotate(-rotationDegrees.toFloat(), centerX, centerY)
// Finally, apply transformations to our TextureView
viewFinder.setTransform(matrix)
}
I have try to switch between the camera preview is :
lensFacing = if (lensFacing == CameraX.LensFacing.BACK) {
CameraX.LensFacing.FRONT
} else
CameraX.LensFacing.BACK
try {
CameraX.getCameraWithLensFacing(lensFacing)
CameraX.unbind(preview, imageCapture, videoCapture)
startCamera()
} catch (e: Exception) {
e.printStackTrace()
}
and after calling the above code on button click preview getting freez.
You may need to again add textureView in onUpdated callback.
Refer this,
https://android.googlesource.com/platform/frameworks/support/+/refs/heads/androidx-master-dev/camera/integration-tests/coretestapp/src/main/java/androidx/camera/integration/core/CameraXActivity.java#168
Let me know if it works.