Barcode reader mlkit - android kotlin - android

I am trying to implement barcode reader with new mlkit. I don't know what I am doing wrong here. I was following official guides:
https://developers.google.com/ml-kit/vision/barcode-scanning/android
https://codelabs.developers.google.com/codelabs/camerax-getting-started/#0
Also I cloned preview apps from official github page but there everything works like charm. No idea what is wrong here. I pass every frame to scan and also I do cleanup after processing image
typealias QrListener = (qrValue: String) -> Unit
class MainActivity : AppCompatActivity() {
private var imageCapture: ImageCapture? = null
private lateinit var outputDirectory: File
private lateinit var cameraExecutor: ExecutorService
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
// Request camera permissions
if (allPermissionsGranted()) {
startCamera()
} else {
ActivityCompat.requestPermissions(
this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS
)
}
// Setup the listener for take photo button
camera_capture_button.setOnClickListener { takePhoto() }
outputDirectory = getOutputDirectory()
cameraExecutor = Executors.newSingleThreadExecutor()
}
private fun takePhoto() {
// Get a stable reference of the modifiable image capture use case
val imageCapture = imageCapture ?: return
// Create timestamped output file to hold the image
val photoFile = File(
outputDirectory,
SimpleDateFormat(
FILENAME_FORMAT, Locale.US
).format(System.currentTimeMillis()) + ".jpg"
)
// Create output options object which contains file + metadata
val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build()
// Setup image capture listener which is triggered after photo has
// been taken
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(this),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exc.message}", exc)
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val savedUri = Uri.fromFile(photoFile)
val msg = "Photo capture succeeded: $savedUri"
Toast.makeText(baseContext, msg, Toast.LENGTH_SHORT).show()
Log.d(TAG, msg)
}
})
}
private fun startCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
val preview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(viewFinder.createSurfaceProvider())
}
imageCapture = ImageCapture.Builder()
.build()
val imageAnalyzer = ImageAnalysis.Builder()
.build()
.also {
it.setAnalyzer(cameraExecutor, QrAnalyzer { luma ->
Log.d(TAG, "Average luminosity: $luma")
})
}
// Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture, imageAnalyzer
)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}
private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
ContextCompat.checkSelfPermission(
baseContext, it
) == PackageManager.PERMISSION_GRANTED
}
private fun getOutputDirectory(): File {
val mediaDir = externalMediaDirs.firstOrNull()?.let {
File(it, resources.getString(R.string.app_name)).apply { mkdirs() }
}
return if (mediaDir != null && mediaDir.exists())
mediaDir else filesDir
}
override fun onDestroy() {
super.onDestroy()
cameraExecutor.shutdown()
}
override fun onRequestPermissionsResult(
requestCode: Int, permissions: Array<String>, grantResults:
IntArray
) {
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissionsGranted()) {
startCamera()
} else {
Toast.makeText(
this,
"Permissions not granted by the user.",
Toast.LENGTH_SHORT
).show()
finish()
}
}
}
companion object {
private const val TAG = "CameraXBasic"
private const val FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS"
private const val REQUEST_CODE_PERMISSIONS = 10
private val REQUIRED_PERMISSIONS = arrayOf(Manifest.permission.CAMERA)
}
}
class QrAnalyzer(private val listener: QrListener) : ImageAnalysis.Analyzer {
private fun ByteBuffer.toByteArray(): ByteArray {
rewind() // Rewind the buffer to zero
val data = ByteArray(remaining())
get(data) // Copy the buffer into a byte array
return data // Return the byte array
}
#SuppressLint("UnsafeExperimentalUsageError")
override fun analyze(image: ImageProxy) {
val mediaImage = image.image
if (mediaImage != null) {
val img = InputImage.fromMediaImage(mediaImage, image.imageInfo.rotationDegrees)
// Pass image to an ML Kit Vision API
// ...
scan(img)
}
image.close()
}
private fun scan(image: InputImage) {
val options = BarcodeScannerOptions.Builder()
.setBarcodeFormats(
Barcode.FORMAT_ALL_FORMATS
)
.build()
val scanner = BarcodeScanning.getClient(options)
val result = scanner.process(image)
.addOnSuccessListener { barcodes ->
// Task completed successfully
// [START_EXCLUDE]
// [START get_barcodes]
for (barcode in barcodes) {
val bounds = barcode.boundingBox
val corners = barcode.cornerPoints
val rawValue = barcode.rawValue
val valueType = barcode.valueType
// See API reference for complete list of supported types
Log.v("TESTING: ", "Barcode detected: $rawValue")
}
}
.addOnFailureListener { }
.addOnCompleteListener { }
}
}
and my gradle
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
android {
compileSdkVersion 29
buildToolsVersion "29.0.3"
defaultConfig {
applicationId "com.example.test_mlkit"
minSdkVersion 25
multiDexEnabled true
targetSdkVersion 29
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = "1.8"
}
aaptOptions {
noCompress "tflite"
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.core:core-ktx:1.3.1'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'com.google.mlkit:barcode-scanning:16.0.2'
def camerax_version = "1.0.0-beta07"
// CameraX core library using camera2 implementation
implementation "androidx.camera:camera-camera2:$camerax_version"
// CameraX Lifecycle Library
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// CameraX View class
implementation "androidx.camera:camera-view:1.0.0-alpha14"
}
This are the logs I receive when run the app
W/mple.test_mlki: Accessing hidden method Lsun/misc/Unsafe;->putObject(Ljava/lang/Object;JLjava/lang/Object;)V (greylist, linking, allowed)
Accessing hidden method Lsun/misc/Unsafe;->putInt(Ljava/lang/Object;JI)V (greylist, linking, allowed)
W/mple.test_mlki: Accessing hidden method Lsun/misc/Unsafe;->putObject(Ljava/lang/Object;JLjava/lang/Object;)V (greylist, linking, allowed)
Accessing hidden method Lsun/misc/Unsafe;->putInt(Ljava/lang/Object;JI)V (greylist, linking, allowed)
W/mple.test_mlki: Accessing hidden method Lsun/misc/Unsafe;->putObject(Ljava/lang/Object;JLjava/lang/Object;)V (greylist, linking, allowed)
W/mple.test_mlki: Accessing hidden method Lsun/misc/Unsafe;->putInt(Ljava/lang/Object;JI)V (greylist, linking, allowed)
W/mple.test_mlki: Accessing hidden method Lsun/misc/Unsafe;->putInt(Ljava/lang/Object;JI)V (greylist, linking, allowed)

Related

Cannot access class 'com.google.common.util.concurrent.ListenableFuture'. Check your module classpath for missing or conflicting dependencies

I am trying to integrate CameraX in my flutter app but I get error saying Cannot access class 'com.google.common.util.concurrent.ListenableFuture'. Check your module classpath for missing or conflicting dependencies
There error comes from below line
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
Below is my native view
class CealScanQrView(val context: Context, id: Int, creationParams: Map<String?, Any?>?) :
PlatformView {
private var mCameraProvider: ProcessCameraProvider? = null
private var preview: PreviewView
private var linearLayout: LinearLayout = LinearLayout(context)
private lateinit var cameraExecutor: ExecutorService
private lateinit var options: BarcodeScannerOptions
private lateinit var scanner: BarcodeScanner
private var analysisUseCase: ImageAnalysis = ImageAnalysis.Builder()
.build()
companion object {
private val REQUIRED_PERMISSIONS = mutableListOf(Manifest.permission.CAMERA).toTypedArray()
}
init {
val linearLayoutParams = ViewGroup.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT
)
linearLayout.layoutParams = linearLayoutParams
linearLayout.orientation = LinearLayout.VERTICAL
preview = PreviewView(context)
preview.layoutParams = ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT
)
linearLayout.addView(preview)
setUpCamera()
}
private fun setUpCamera(){
if (allPermissionsGranted()) {
startCamera()
}
cameraExecutor = Executors.newSingleThreadExecutor()
options = BarcodeScannerOptions.Builder()
.setBarcodeFormats(
Barcode.FORMAT_QR_CODE)
.build()
scanner = BarcodeScanning.getClient(options)
analysisUseCase.setAnalyzer(
// newSingleThreadExecutor() will let us perform analysis on a single worker thread
Executors.newSingleThreadExecutor()
) { imageProxy ->
processImageProxy(scanner, imageProxy)
}
}
override fun getView(): View {
return linearLayout
}
override fun dispose() {
cameraExecutor.shutdown()
}
#SuppressLint("UnsafeOptInUsageError")
private fun processImageProxy(
barcodeScanner: BarcodeScanner,
imageProxy: ImageProxy
) {
imageProxy.image?.let { image ->
val inputImage =
InputImage.fromMediaImage(
image,
imageProxy.imageInfo.rotationDegrees
)
barcodeScanner.process(inputImage)
.addOnSuccessListener { barcodeList ->
val barcode = barcodeList.getOrNull(0)
// `rawValue` is the decoded value of the barcode
barcode?.rawValue?.let { value ->
mCameraProvider?.unbindAll()
}
}
.addOnFailureListener {
// This failure will happen if the barcode scanning model
// fails to download from Google Play Services
}
.addOnCompleteListener {
// When the image is from CameraX analysis use case, must
// call image.close() on received images when finished
// using them. Otherwise, new images may not be received
// or the camera may stall.
imageProxy.image?.close()
imageProxy.close()
}
}
}
private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
ContextCompat.checkSelfPermission(context, it) == PackageManager.PERMISSION_GRANTED
}
private fun startCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
cameraProviderFuture.addListener({
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
mCameraProvider = cameraProvider
// Preview
val surfacePreview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(preview.surfaceProvider)
}
// Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
cameraProvider.bindToLifecycle(
(context as FlutterActivity),
cameraSelector,
surfacePreview,
analysisUseCase,
)
} catch (exc: Exception) {
// Do nothing on exception
}
}, ContextCompat.getMainExecutor(context))
}
}
class CealScanQrViewFactory : PlatformViewFactory(StandardMessageCodec.INSTANCE) {
override fun create(context: Context, viewId: Int, args: Any?): PlatformView {
val creationParams = args as Map<String?, Any?>?
return CealScanQrView(context, viewId, creationParams)
}
}
Add this line to your app build.gradle dependencies:
implementation 'com.google.guava:guava:29.0-android'

How to reduce image size captured with CameraX on Android?

I'm trying to understand how to use CameraX by studying this example:
class MainActivity : AppCompatActivity(), EasyPermissions.PermissionCallbacks {
private var imageCapture: ImageCapture? = null
private lateinit var outputDirectory: File
private lateinit var cameraExecutor: ExecutorService
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
requestPermission()
// Set up the listener for take photo button
camera_capture_button.setOnClickListener { takePhoto() }
outputDirectory = getOutputDirectory()
cameraExecutor = Executors.newSingleThreadExecutor()
}
private fun takePhoto() {
// Get a stable reference of the modifiable image capture use case
val imageCapture = imageCapture ?: return
// Create time-stamped output file to hold the image
val photoFile = File(
outputDirectory,
SimpleDateFormat(
FILENAME_FORMAT, Locale.US
).format(System.currentTimeMillis()) + ".jpg"
)
// Create output options object which contains file + metadata
val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build()
// Set up image capture listener, which is triggered after photo has
// been taken
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(this),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exc.message}", exc)
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
val savedUri = Uri.fromFile(photoFile)
val msg = "Photo capture succeeded: $savedUri"
Toast.makeText(baseContext, msg, Toast.LENGTH_SHORT).show()
Log.d(TAG, msg)
}
})
}
private fun startCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
val preview = Preview.Builder()
.build()
.also {
it.setSurfaceProvider(viewFinder.surfaceProvider)
}
imageCapture = ImageCapture.Builder()
.build()
val imageAnalyzer = ImageAnalysis.Builder()
.build()
.also {
it.setAnalyzer(cameraExecutor, LuminosityAnalyzer { luma ->
Log.d(TAG, "Average luminosity: $luma")
})
}
// Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
cameraProvider.bindToLifecycle(
this, cameraSelector, preview, imageCapture, imageAnalyzer
)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}
private fun getOutputDirectory(): File {
val mediaDir = externalMediaDirs.firstOrNull()?.let {
File(it, resources.getString(R.string.app_name)).apply { mkdirs() }
}
return if (mediaDir != null && mediaDir.exists())
mediaDir else filesDir
}
override fun onDestroy() {
super.onDestroy()
cameraExecutor.shutdown()
}
private fun requestPermission() {
if (CameraUtility.hasCameraPermissions(this)) {
startCamera()
return
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
EasyPermissions.requestPermissions(
this,
"You need to accept the camera permission to use this app",
REQUEST_CODE_CAMERA_PERMISSION,
Manifest.permission.CAMERA
)
} else {
EasyPermissions.requestPermissions(
this,
"You need to accept the camera permission to use this app",
REQUEST_CODE_CAMERA_PERMISSION,
Manifest.permission.CAMERA
)
}
}
It works. The images captured with this code are rather big. On Pixel 3, it produces 4032x3024 JPEGs (usually around 4.5 to 6 MB). Does CameraX has a built-in feature to reduce the JPEG size?
I tried this:
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.setTargetResolution(Size(800, 600))
.build()
and this:
val imageAnalyzer = ImageAnalysis.Builder()
.setTargetResolution(Size(800,600))
.build()
.also {
it.setAnalyzer(cameraExecutor, LuminosityAnalyzer { luma ->
})
}
Doesn't work. Still get the same 4032x3024 JPEG. I wonder if I don't understand the CameraX API properly.

CameraX not able to capture a photo

I added the code from
https://developer.android.com/training/camerax
link
. The preview, permissions and everything else is working just fine, however I am not able to capture photo from the button. I declared the button to have a listener and connected the function takePhoto(), but it doesn't seem to work, in the past it works fine and I don't know if it's because of the new version of the CameraX feature is using, I don't know but it doesn't seem to be responding correctly. I have the Logcat open and when I press the button, it has the message that is pressed:
D/ViewRootImpl#70b5dc2[MainActivity]: ViewPostIme pointer 0
D/ViewRootImpl#70b5dc2[MainActivity]: ViewPostIme pointer 1
But that's it really, nothing else but that is my issue at the moment, it's a small issue but I believe I miss something to make this simple issue being resolved and I don't know what it causes to do so. Here is the codes that I will provide below:
Gradle:
plugins {
id 'com.android.application'
id 'kotlin-android'
}
android {
compileSdk 30
defaultConfig {
applicationId "com.example.camerax1"
minSdk 21
targetSdk 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.6.0'
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.0'
implementation 'androidx.camera:camera-camera2:1.1.0-alpha08'
implementation 'androidx.camera:camera-lifecycle:1.1.0-alpha08'
implementation 'androidx.camera:camera-view:1.0.0-alpha28'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
}
activity_main.xml
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto"
tools:context=".MainActivity">
<androidx.camera.view.PreviewView
android:id="#+id/previewView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<Button
android:id="#+id/captureButton"
android:layout_width="85dp"
android:layout_height="85dp"
android:layout_marginBottom="25dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent"
android:importantForAccessibility="no" />
</androidx.constraintlayout.widget.ConstraintLayout>
Full code in once class MainActivity.kt
class MainActivity : AppCompatActivity() {
private var imageCapture: ImageCapture? = null
private lateinit var outputDirectory: File
private lateinit var cameraExecutor: ExecutorService
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
//Request camera permissions
if (allPermissions()) {
startCamera()
} else {
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS)
}
val captureBtn = findViewById<Button>(R.id.captureButton)
captureBtn.setOnClickListener {
takePhoto()
}
outputDirectory = getOutputDirectory()
cameraExecutor = Executors.newSingleThreadExecutor()
}
private fun takePhoto() {
Toast.makeText(baseContext, "Processing..", Toast.LENGTH_SHORT).show()
//Get a stable reference of the modifiable image capture use case
val imageCapture = imageCapture ?: return
//Create time-stamped output file to hold the image
val photoFile = File(
outputDirectory,
SimpleDateFormat(FILENAME_FORMAT, Locale.ENGLISH).format(System.currentTimeMillis()) + ".jpg")
//Create output options object which contains file + metadata
val outputOptions = ImageCapture.OutputFileOptions.Builder(photoFile).build()
//Set up image capture listener, which is triggered after photo has been taken
imageCapture.takePicture(
outputOptions, ContextCompat.getMainExecutor(this), object : ImageCapture.OnImageSavedCallback {
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
val savedUri = Uri.fromFile(photoFile)
val msg = "Photo capture succeeded: $savedUri"
Toast.makeText(baseContext, msg, Toast.LENGTH_SHORT).show()
Log.d(TAG, msg)
}
override fun onError(exception: ImageCaptureException) {
Log.e(TAG, "Photo capture failed: ${exception.message}", exception)
}
}
)
}
private fun startCamera() {
val viewFiner = findViewById<PreviewView>(R.id.previewView)
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
//Used to bind the lifecycle of camera to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
//Preview
val preview = Preview.Builder().build().also {
it.setSurfaceProvider(viewFiner.surfaceProvider)
}
//Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
//Unbind use cases before rebinding
cameraProvider.unbindAll()
//Bind use cases to camera
cameraProvider.bindToLifecycle(
this,
cameraSelector,
preview
)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
Toast.makeText(this, "Use case binding failed: $exc", Toast.LENGTH_SHORT).show()
}
}, ContextCompat.getMainExecutor(this))
}
private fun allPermissions() = REQUIRED_PERMISSIONS.all {
ContextCompat.checkSelfPermission(baseContext, it) == PackageManager.PERMISSION_GRANTED
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissions()) {
startCamera()
} else {
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show()
finish()
}
}
}
private fun getOutputDirectory(): File {
val mediaDir = externalMediaDirs.firstOrNull().let {
File(it, resources.getString(R.string.app_name)).apply { mkdirs() }
}
return if (mediaDir.exists())
mediaDir else filesDir
}
override fun onDestroy() {
super.onDestroy()
cameraExecutor.shutdown()
}
companion object {
private const val TAG = "CameraXBasic"
private const val FILENAME_FORMAT = "dd.MM.yyyy - HH:mm:ss"
private const val REQUEST_CODE_PERMISSIONS = 10
private val REQUIRED_PERMISSIONS = arrayOf(
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
)
}
}
Your code lacks binding the ImageCapture.Builder() to the camera Provider which is the prominent reason why it is not capturing Images. You need to bind imageCapture builder , it can be done in the following way . In your startCamera() function , you need to bind imageCapture in the following way :
private fun startCamera() {
val viewFiner = findViewById<PreviewView>(R.id.previewView)
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener({
//Used to bind the lifecycle of camera to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
//Preview
val preview = Preview.Builder().build().also {
it.setSurfaceProvider(viewFiner.surfaceProvider)
}
//set Image Capture Builder
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build()
//Select back camera as a default
val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
try {
//Unbind use cases before rebinding
cameraProvider.unbindAll()
//Bind use cases to camera
//Add imageCapture to lifecycle
cameraProvider.bindToLifecycle(
this,
cameraSelector,
preview,
imageCapture
)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
Toast.makeText(this, "Use case binding failed: $exc", Toast.LENGTH_SHORT).show()
}
}, ContextCompat.getMainExecutor(this))
}

How can I recognize barcode with firebase ML Kit?

I am trying to recognize qr code in my app when camera is turned on. And then transfer to according activity depend on qr code text.
I am doing it with firebase ML kit and CameraX library with help of google's documentation but I have errors with ImageAnalyzer's analyze method.
ImageAnalyzer offers me to implement analyze method even though it's already implemented. But it has two parameters: (imageProxy: ImageProxy?, degrees: Int) and says that it overrides nothing. if I delete second parameter (degrees: Int), then it recognizes, but "degrees" is not recognized.
I tried to follow some tutorials but they use lots of third party libraries. I'd like to use cameraX and ML Kit.
How can I fix this?
Here is my code:
package ge.softservice.nfcwithactivties
import android.Manifest
import android.annotation.SuppressLint
import android.content.pm.PackageManager
import android.os.Bundle
import android.util.Log
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.camera.core.*
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import com.google.firebase.ml.vision.FirebaseVision
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions
import com.google.firebase.ml.vision.common.FirebaseVisionImage
import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata
import kotlinx.android.synthetic.main.activity_qr.*
import java.io.File
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
class QrActivity : AppCompatActivity() {
private var preview: Preview? = null
private var imageAnalyzer: ImageAnalysis? = null
private var camera: Camera? = null
internal var isDetected = false
private lateinit var outputDirectory: File
private lateinit var cameraExecutor: ExecutorService
override fun onRequestPermissionsResult(
requestCode: Int, permissions: Array<String>, grantResults:
IntArray
) {
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissionsGranted()) {
startCamera()
} else {
Toast.makeText(
this,
"Permissions not granted by the user.",
Toast.LENGTH_SHORT
).show()
finish()
}
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_qr)
// Request camera permissions
if (allPermissionsGranted()) {
startCamera()
} else {
ActivityCompat.requestPermissions(
this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS
)
}
// outputDirectory = getOutputDirectory()
cameraExecutor = Executors.newSingleThreadExecutor()
}
private fun startCamera() {
val options = FirebaseVisionBarcodeDetectorOptions.Builder()
.setBarcodeFormats(
FirebaseVisionBarcode.FORMAT_QR_CODE,
FirebaseVisionBarcode.FORMAT_AZTEC
)
.build()
val detector = FirebaseVision.getInstance().getVisionBarcodeDetector(options)
val result = detector.detectInImage(image)
.addOnSuccessListener { barcodes ->
// Task completed successfully
// ...
}
.addOnFailureListener {
// Task failed with an exception
// ...
}
val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
cameraProviderFuture.addListener(Runnable {
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
// Preview
preview = Preview.Builder()
.build()
// Select back camera
val cameraSelector =
CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build()
try {
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
camera = cameraProvider.bindToLifecycle(
this, cameraSelector, preview
)
preview?.setSurfaceProvider(viewFinder.createSurfaceProvider(/*camera?.cameraInfo*/))
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}, ContextCompat.getMainExecutor(this))
}
private fun takePhoto() {
// TODO
}
private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
ContextCompat.checkSelfPermission(
baseContext, it
) == PackageManager.PERMISSION_GRANTED
}
/* fun getOutputDirectory(): File {
val mediaDir = externalMediaDirs.firstOrNull()?.let {
File(it, resources.getString(R.string.app_name)).apply { mkdirs() } }
return if (mediaDir != null && mediaDir.exists())
mediaDir else filesDir
}*/
companion object {
private const val TAG = "CameraXBasic"
private const val FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS"
private const val REQUEST_CODE_PERMISSIONS = 10
private val REQUIRED_PERMISSIONS = arrayOf(Manifest.permission.CAMERA)
}
}
private class MyImageAnalyzer : ImageAnalysis.Analyzer {
private fun degreesToFirebaseRotation(degrees: Int): Int = when(degrees) {
0 -> FirebaseVisionImageMetadata.ROTATION_0
90 -> FirebaseVisionImageMetadata.ROTATION_90
180 -> FirebaseVisionImageMetadata.ROTATION_180
270 -> FirebaseVisionImageMetadata.ROTATION_270
else -> throw Exception("Rotation must be 0, 90, 180, or 270.")
}
#SuppressLint("UnsafeExperimentalUsageError")
override fun analyze(imageProxy: ImageProxy?, degrees: Int) {
val mediaImage = imageProxy?.image
val imageRotation = degreesToFirebaseRotation(degrees)
if (mediaImage != null) {
val image = FirebaseVisionImage.fromMediaImage(mediaImage, imageRotation)
// Pass image to an ML Kit Vision API
// ...
}
}
}
Looking at your code snippet, it seems you're using camera-camera2 version beta04 with camerax-view version alpha11.
The documentation may be out of date, the Analyzer now only receives an ImageProxy inside its analyze callback. The degrees information that was previously also passed in can now be accessed via ImageProxy.getImageInfo().getRotationDegrees().
So your Analyzer should look like this
private class MyImageAnalyzer : ImageAnalysis.Analyzer {
private fun degreesToFirebaseRotation(degrees: Int): Int {
// ...
}
override fun analyze(imageProxy: ImageProxy) {
val mediaImage = imageProxy.image
val imageRotation = degreesToFirebaseRotation(imageProxy.imageInfo.rotationDegrees)
val image = FirebaseVisionImage.fromMediaImage(mediaImage, imageRotation)
// ...
}
}

Can I record video with CameraX (Android Jetpack)?

Google has released the new CameraX library as part of Jetpack. It looks great for taking pictures, but my use case also require making video's. I tried googling for that, but couldn't find anything.
So, is it possible to record videos with the CameraX Jetpack library?
Yes, we can record video using CameraX. I have tried to implement myself with help of Github demo for CameraX. Please refer below code may it helps you.
Config for Video in CameraX:
val videoCaptureConfig = VideoCaptureConfig.Builder().apply {
setLensFacing(lensFacing)
setTargetAspectRatio(screenAspectRatio)
setTargetRotation(viewFinder.display.rotation)
}.build()
videoCapture = VideoCapture(videoCaptureConfig)
CameraX.bindToLifecycle(this, preview, imageCapture, videoCapture)
To Start video recording:
videoCapture?.startRecording(videoFile, object : VideoCapture.OnVideoSavedListener {
override fun onVideoSaved(file: File?) {
Log.i(javaClass.simpleName, "Video File : $file")
}
override fun onError(useCaseError: VideoCapture.UseCaseError?, message: String?, cause: Throwable?) {
Log.i(javaClass.simpleName, "Video Error: $message")
}
})
To Stop video recording:
videoCapture?.stopRecording()
Same above I have mentioned in Github issue comment: https://github.com/android/camera/issues/2#issuecomment-490773932
Notes: There may be different in code to implementation of video recording using CameraX. Because this above code was developed by me without any other reference rather than Github Demo.
Please check important comment of Oscar Wahltinez on this answer as of 14 May 2019
This is my solution
//Versions in Gradle
def camerax_version = "1.0.0-beta06"
def camera_extensions = "1.0.0-alpha13"
private lateinit var videoCapture: VideoCapture
private lateinit var viewFinder: PreviewView
private lateinit var outputDirectory: File
private var lensFacing: Int = CameraSelector.LENS_FACING_FRONT
private val executor = Executors.newSingleThreadExecutor()
private var isRecording = false
private var camera: Camera? = null
private lateinit var cameraProviderFuture: ListenableFuture<ProcessCameraProvider>
//onCreate
viewFinder = preview_video_view
runWithPermissions(*permissions) {
startCamera(view.context)
initClicks()
}
#SuppressLint("RestrictedApi", "UnsafeExperimentalUsageError")
private fun startCamera(context: Context) {
outputDirectory = getOutputDirectory(context)
cameraProviderFuture = ProcessCameraProvider.getInstance(context)
val cameraSelector = CameraSelector.Builder().requireLensFacing(lensFacing).build()
// Create a configuration object for the video use case
val videoCaptureConfig = VideoCaptureConfig.Builder().apply {
setTargetRotation(viewFinder.display.rotation)
setCameraSelector(cameraSelector)
}
//CameraX.initialize(context, this.cameraXConfig)
videoCapture = VideoCapture(videoCaptureConfig.useCaseConfig)
val preview: Preview = Preview.Builder().apply {
setTargetAspectRatio(AspectRatio.RATIO_16_9)
setTargetRotation(viewFinder.display.rotation)
}.build()
preview.setSurfaceProvider(viewFinder.createSurfaceProvider())
cameraProviderFuture.addListener(Runnable {
val cameraProvider = cameraProviderFuture.get()
camera = cameraProvider.bindToLifecycle(
viewLifecycleOwner,
cameraSelector,
preview,
videoCapture
)
}, ContextCompat.getMainExecutor(context))
}
#SuppressLint("RestrictedApi")
private fun startRecording() {
val file = createFile(
outputDirectory,
FILENAME,
VIDEO_EXTENSION
)
videoCapture.startRecording(
file,
executor,
object : VideoCapture.OnVideoSavedCallback {
override fun onVideoSaved(file: File) {
Handler(Looper.getMainLooper()).post {
showMessage(file.name + " is saved")
}
}
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
Handler(Looper.getMainLooper()).post {
showMessage(videoCaptureError.toString() + " " + message)
}
}
}
)
}
#SuppressLint("RestrictedApi")
private fun stopRecording() {
videoCapture.stopRecording()
}
override fun getCameraXConfig(): CameraXConfig {
return Camera2Config.defaultConfig()
}
companion object {
private const val FILENAME = "yyyy_MM_dd_HH_mm_ss"
private const val VIDEO_EXTENSION = ".mp4"
private val permissions = arrayOf(
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO
)
fun getOutputDirectory(context: Context): File {
val appContext = context.applicationContext
val mediaDir = appContext.externalMediaDirs.firstOrNull()?.let {
File(it, appContext.resources.getString(R.string.app_name)).apply { mkdirs() }
}
return if (mediaDir != null && mediaDir.exists()) mediaDir else appContext.filesDir
}
fun createFile(baseFolder: File, format: String, extension: String) =
File(baseFolder, SimpleDateFormat(format, Locale.US)
.format(System.currentTimeMillis()) + extension)
}
Updating on Patel Pinkal's answer.
Having beta released, we can't use VideoCaptureConfig.Builder() anymore, instead you go with something like this:
videoCapture = VideoCapture.Builder().apply {
// init config here
}.build()
As of April 2021
val videoCapture = VideoCapture.Builder().build()
val outputDirectory = getOutputDirectory()
fun getOutputDirectory(): File {
val mediaDir = externalMediaDirs.firstOrNull()?.let {
File(it, resources.getString(R.string.app_name)).apply { mkdirs() } }
return if (mediaDir != null && mediaDir.exists())
mediaDir else filesDir
}
#SuppressLint("RestrictedApi")
private fun startRecording() {
val videoFile = File(
outputDirectory,
SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS", Locale.US
).format(System.currentTimeMillis()) + ".mp4")
val outputOptions = VideoCapture.OutputFileOptions.Builder(videoFile).build()
videoCapture?.startRecording(outputOptions, ContextCompat.getMainExecutor(this), object: VideoCapture.OnVideoSavedCallback {
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
Log.e(TAG, "Video capture failed: $message")
}
override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) {
val savedUri = Uri.fromFile(videoFile)
val msg = "Video capture succeeded: $savedUri"
Toast.makeText(baseContext, msg, Toast.LENGTH_SHORT).show()
Log.d(TAG, msg)
}
})
}
#SuppressLint("RestrictedApi")
private fun stopRecording() {
videoCapture?.stopRecording()
}
Please note that this API is still restricted and is still subject to change.
CameraX VideoCapture use case is implemented in camera-video, released since 1.1.0-alpha30. Refer to the following:
the official documentation
the reference doc
sample
youtube video(current CameraX status)
for details.
Regarding Sergei's answer, videoCapture.startRecording() recieves VideoCapture.OutputFileOptions instead of file for camerax_version = '1.0.0-rc01', so it should be used as:
videoCapture.startRecording(
VideoCapture.OutputFileOptions.Builder(file).build(),
executor,
object : VideoCapture.OnVideoSavedCallback {
override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) {
TODO("Not yet implemented")
}
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
TODO("Not yet implemented")
}
}

Categories

Resources