CameraX error: Decrementing use count occurs more times than incrementing - android

I am using CameraX 1.0.0-beta01 to take pictures from the a service. I am using ImageCapture and ImageAnalysis - i.e. no preview.
I am implementing LifeCycleOwner and calling
mLifecycleRegistry = new LifecycleRegistry(this);
mLifecycleRegistry.markState(Lifecycle.State.STARTED);
when I start my service and mLifecycleRegistry.markState(Lifecycle.State.DESTROYED)
when I finish taking pictures.
I get the following error reported from users, not specific to any android version or device.
Fatal Exception: java.lang.IllegalStateException: Decrementing use count occurs more times than incrementing
at androidx.camera.core.impl.DeferrableSurface.decrementUseCount(DeferrableSurface.java:256)
at androidx.camera.core.impl.DeferrableSurfaces.decrementAll(DeferrableSurfaces.java:173)
at androidx.camera.camera2.internal.CaptureSession.clearConfiguredSurfaces(CaptureSession.java:539)
at androidx.camera.camera2.internal.CaptureSession$StateCallback.onClosed(CaptureSession.java:928)
at androidx.camera.camera2.internal.CaptureSession.forceClose(CaptureSession.java:533)
at androidx.camera.camera2.internal.Camera2CameraImpl$StateCallback.onDisconnected(Camera2CameraImpl.java:1210)
at androidx.camera.camera2.internal.CameraDeviceStateCallbacks$ComboDeviceStateCallback.onDisconnected(CameraDeviceStateCallbacks.java:112)
at androidx.camera.camera2.internal.compat.CameraDeviceCompat$StateCallbackExecutorWrapper$2.run(CameraDeviceCompat.java:121)
at android.os.Handler.handleCallback(Handler.java:751)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:154)
at android.os.HandlerThread.run(HandlerThread.java:61)
I haven't been able to reproduce the error myself. What could cause this?
Update:
I found the source for DeferrableSurface.java, it's happening here:
/**
* Decrements the use count.
*
* <p>If this causes the use count to go to zero and the surface has been closed, this will
* complete the future returned by {#link #getTerminationFuture()}.
*/
public void decrementUseCount() {
// If this gets set, then the surface will terminate
CallbackToFutureAdapter.Completer<Void> terminationCompleter = null;
synchronized (mLock) {
if (mUseCount == 0) {
throw new IllegalStateException("Decrementing use count occurs more times than "
+ "incrementing");
}
mUseCount--;
if (mUseCount == 0 && mClosed) {
terminationCompleter = mTerminationCompleter;
mTerminationCompleter = null;
}
// ...
which is being called from DeferrableSurfaces.java:
/**
* Decrements the usage counts of every surface in the provided list.
*
* #param surfaceList The list of surfaces whose usage count should be decremented.
*/
public static void decrementAll(#NonNull List<DeferrableSurface> surfaceList) {
for (DeferrableSurface surface : surfaceList) {
surface.decrementUseCount();
}
}
which I suppose gets called when I mark the current state as DESTROYED. Tempted to say that this looks like a bug. Thoughts?
Update 2:
My code, stripped down to what's releveant:
public class MyService extends Service implements LifecycleOwner{
private LifecycleRegistry mLifecycleRegistry;
public static boolean serviceRunning = false;
private boolean isCameraOff = true;
private ImageCapture imageCapture;
private int pendingImages = 0;
#NonNull
#Override
public Lifecycle getLifecycle() {
return mLifecycleRegistry;
}
/* called from activity when we should take a picture.
can be called several times, and we therefore increment an
int, keeping track of how many pictures should be taken. */
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
serviceRunning = true;
// Increment pending pictures
pendingImages++;
// if camera is not in use, setup camera and take picture, otherwise incrementing pendingImages var is enough
if(isCameraOff){
isCameraOff = false;
cameraSetup(); // takePicture called from the end of setup
}
return START_NOT_STICKY;
}
private void cameraSetup(){
// lifecycle for camera
mLifecycleRegistry = new LifecycleRegistry(this);
mLifecycleRegistry.setCurrentState(Lifecycle.State.STARTED);
ListenableFuture cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
// Camera provider is now guaranteed to be available
ProcessCameraProvider cameraProvider = (ProcessCameraProvider)cameraProviderFuture.get();
// Dummy Analyzer
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
// empty analyzer, for the time
imageAnalysis.setAnalyzer(executor, ImageProxy::close);
// ImageCapture
imageCapture = new ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY)
.setTargetResolution(new Size(900,1200))
.build();
// Select front facing camera
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_FRONT)
.build();
// Bind Camera to lifecycle
cameraProvider.bindToLifecycle(this, cameraSelector, imageCapture, imageAnalysis);
// Give camera some time to open, 500 ms should do, before proceeding
Utils.sleepThread(500);
// take picture
takePicture();
} catch (InterruptedException | ExecutionException e) {
// Currently no exceptions thrown. cameraProviderFuture.get() should
// not block since the listener is being called, so no need to
// handle InterruptedException.
}
}, ContextCompat.getMainExecutor(this));
}
private Executor executor = Executors.newSingleThreadExecutor();
private void takePicture(){
// Create new file
File file = new File(Utils.createFilePath(context, getApplication()));
// Create Meta data
ImageCapture.Metadata meta = new ImageCapture.Metadata();
meta.setReversedHorizontal(false); // don't mirror photos
// Collect file options
ImageCapture.OutputFileOptions outputFileOptions =
new ImageCapture.OutputFileOptions.Builder(file)
.setMetadata(meta)
.build();
// Take picture
imageCapture.takePicture(outputFileOptions, executor, new ImageCapture.OnImageSavedCallback(){
#Override
public void onImageSaved(#NonNull ImageCapture.OutputFileResults outputFileResults) {
// got picture
// Decrement the number of pictures we need to take
pendingImages--;
// take another image if pending
if(pendingImages > 0){
takePicture();
}else{
closeSession();
}
}
#Override
public void onError(#NonNull ImageCaptureException exception) {
exception.printStackTrace();
closeSession();
}
});
}
private void closeSession(){
// tell lifecycle and thus camera to close (has to be called from main thread)
// closeSession is called from the camera callback and is not on main thread.
new Handler(context.getMainLooper()).post(() -> {
mLifecycleRegistry.setCurrentState(Lifecycle.State.DESTROYED);
});
// mark camera as off
isCameraOff = true;
stopSelf(); // stop service
}
#Override
public void onDestroy() {
super.onDestroy();
serviceRunning = false;
}
// Service Stuff
private final IBinder mBinder = new LocalBinder();
public class LocalBinder extends Binder {
public MyService getService() {
return MyService.this;
}
}
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
}

Related

Android Runnable not executed by MainLooper

Brief description of application:
I have Cordova/Ionic application and Custom Cordova plugin for native code execution.
Plugin contains separate CameraActivity (extends FragmentActivity) to work with Camera (parts of code based on Camera2Basic example).
On launch Activity displays AnaliseFragment, where application captures every Camera frame and passes image to the analyser on backround thread.
Execution steps are:
User presses button on Cordova UI
Cordova executes native plugin method via cordova.exec(..)
Native plugin starts CameraActivity for result via cordova.startActivityForResult(..)
CameraActivity displays AnaliseFragment
AnaliseFragment starts Camera capture session with two surfaces: first is displayed on TextureView and second analised by ImageAnaliser
Problem:
Rarely and randomly UI stops reacting on user and runnables not executed on UI thread. At the same time background threads continue working as normal: camera output is visible on TextureView and ImageAnaliser continue receive images from Camera.
Does anybody have any suggestion how to find/debug reason of such behavior? Or any ideas what can cause this?
I already tried:
log every lifecycle event of CameraActivity/AnaliseFragment = no calls between app normal state and ANR
add WAKELOCK to keep Cordova MainActivity alive = didn't help
log(trace) every method in AnalilseFragment and ImageAnaliser = nothing suspicious
Here is simplified code of AnaliseFragment:
public class AnaliseFragment extends Fragment {
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageAnalyser mImageAnalyser;
// listener is attached to camera capture session and receives every frame
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image nextImage = reader.acquireLatestImage();
mBackgroundHandler.post(() ->
try {
mImageAnalyser.AnalizeNextImage(mImage);
}
finally {
mImage.close();
}
);
}
};
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mImageAnalyser = new ImageAnalyser();
mImageAnalyser.onResultAvailable(boolResult -> {
// Runnable posted, but never executed
new Handler(Looper.getMainLooper()).post(() -> reportToActivityAndUpdateUI(boolResult));
});
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
}
#Override
public void onPause() {
stopBackgroundThread();
super.onPause();
}
private void startBackgroundThread() {
if (mBackgroundThread == null) {
mBackgroundThread = new HandlerThread("MyBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
Simplified code for ImageAnalyser:
public class ImageAnalyser {
public interface ResultAvailableListener {
void onResult(bool boolResult);
}
private ResultAvailableListener mResultAvailableListener;
public void onResultAvailable(ResultAvailableListener listener) { mResultAvailableListener = listener; }
public void AnalizeNextImage(Image image) {
// Do heavy analysis and put result into theResult
mResultAvailableListener.onResult(theResult);
}
}
There is some long-running operation in UI-thread. Try profile your app to figure out what does block your main thread.
After hours of profiling, debugging and code review I found, that
issue was caused by incorrect View invalidation from background thread
View.postInvalidate() method must be used - this method checks if View is still attached to window and then do invalidation. Instead I wrongly used View.invalidate(), when process my custom message from MainLooper, which rarely caused failures and made MainLooper stop processing any more messages.
For those who maybe have same problem I added both correct and incorrect code.
CORRECT:
public class GraphicOverlayView extends View { ... }
// Somewhere in background thread logic:
private GraphicOverlayView mGraphicOverlayView;
private void invalidateGraphicOverlayViewFromBackgroundThread(){
mGraphicOverlayView.postInvalidate();
};
WRONG:
public class GraphicOverlayView extends View { ... }
// Somewhere in background thread logic:
private GraphicOverlayView mGraphicOverlayView;
private final int MSG_INVALIDATE_GRAPHICS_OVERLAY = 1;
private Handler mUIHandler = new Handler(Looper.getMainLooper()){
#Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_INVALIDATE_GRAPHICS_OVERLAY:{
GraphicOverlayView overlay = (GraphicOverlayView)msg.obj;
// Next line can cause MainLooper stop processing other messages
overlay.invalidate();
break;
}
default:
super.handleMessage(msg);
}
}
};
private void invalidateGraphicOverlayViewFromBackgroundThread(){
Message msg = new Message();
msg.obj = mGraphicOverlayView;
msg.what = MSG_INVALIDATE_GRAPHICS_OVERLAY;
mUIHandler.dispatchMessage(msg);
};

How to pause barcode scanning in android ML-kit when using ByteBuffer from SurfaceView

Context
I'm using the Android firebase-ml-vision to scan barcodes using SurfaceView with continuous ByteBuffer of pictures frames. I used the ML kit quickstart project as a starting point and it works great.
The objective of my project is to recognise the product associated with the barcode and add it to the list of scanned items.
Problem
Once the camera focuses, the barcode processor would detect the same barcode multiple times, so you would scan 20 rather than 1 barcode in a second.
This is javadoc from CamereSource.FrameProcessingRunnable.run
* As long as the processing thread is active, this executes detection on frames continuously.
* The next pending frame is either immediately available or hasn't been received yet. Once it
* is available, we transfer the frame info to local variables and run detection on that frame.
* It immediately loops back for the next frame without pausing.
I have tried to add a "paused" check into the FrameProcessingRunnable, but I was still getting the same barcode recognised at least twice, as the next frame/frames were already being fed in for detection:
private class FrameProcessingRunnable implements Runnable {
private volatile boolean paused = false;
.
.
.
public void pause() {
synchronized (lock) {
this.paused = true;
lock.notifyAll();
}
}
public void resume() {
synchronized (lock) {
this.paused = false;
lock.notifyAll();
}
}
public void run() {
.
.
.
synchronized (processorLock) {
if (!paused) {
Log.d(TAG, "Process an image");
frameProcessor.process(...
Solution using stop & start
As I couldn't get it to pause, I opted for stop & start when a barcode is detected from the buffer:
private CameraSourcePreview preview;
public void pauseImageProcessing() {
preview.stop();
try {
preview.start(cameraSource, graphicOverlay);
} catch (IOException e) {
}
}
This works, but there is about 1 second delay before it starts up again, camera starts focusing and the next barcode can be detected. No doubt this approach also consumes unnecessary resources. You may say this is fine, but in this video you'll see the difference between the camera scanner with Stop&Start and a Bluetooth scanner:
Better approach
I'm looking for a solution that would simply discard any frames immediately after the one with successful detection and start again, but so far I have failed. I'm using 20 frames per second.
VissionProcessorBase does have code for throttling
// Whether we should ignore process(). This is usually caused by feeding input data faster than
// the model can handle.
private final AtomicBoolean shouldThrottle = new AtomicBoolean(false);
But is doesn't go far enough for my need :(
Once the camera focuses, the barcode processor would detect the same barcode multiple times, so you would scan 20 rather than 1 barcode in
a second.
VisionProcessorBase.java
private void detectInVisionImage(
FirebaseVisionImage image,
final FrameMetadata metadata) {
detectInImage(image)
.addOnSuccessListener(
new OnSuccessListener<T>() {
#Override
public void onSuccess(final T results) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
shouldThrottle.set(false);
}
},1000);
VisionProcessorBase.this.onSuccess(results, metadata);
}
})
.addOnFailureListener(
new OnFailureListener() {
#Override
public void onFailure(#NonNull Exception e) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
shouldThrottle.set(false);
}
},1000);
VisionProcessorBase.this.onFailure(e);
}
});
// Begin throttling until this frame of input has been processed, either in onSuccess or
// onFailure.
shouldThrottle.set(true);
}
Reduced the speed by half.
class variable:
private var isLocked = true
scanning:
scanner.process(barcodeImage)
.addOnSuccessListener { barcodes ->
isLocked = !isLocked
if (isLocked) return#addOnSuccessListener //<--return here
barcodes.forEach { barcode ->
if (barcode.valueType == Barcode.TYPE_TEXT) {
barcode.rawValue?.let { value -> mutableValue.postValue(value) }
}
}
}
.addOnCompleteListener {
image.close()
}

Take picture continuously with autofocus in android

I have made a custom camera view in my application. Basically i need to do something for as long as the camera in my application is open, after focusing.
in my Camera.Parameters, i have used FOCUS_MODE_CONTINUOUS_PICTURE, and it works perfectly as intended. Now, i need a callback from this continuous autofocusing practice where i can take the current focused picture and do something with it.
An alternate approach was to hook a 'Timer' technique, a function that would fire after every certain time period. And then, i could mCamera.autofocus() inside it, take picture and do my work. Unfortunately, this proved a very bad technique, since autofocusing would pose complications, varying for different devices.
So, i was wondering, what would be the perfect solution to this.
UPDATE: After making an attempt with threads
What i want to do is, AutoFocus and take picture again and again as long as the app is in the foreground.
After a number of different attempts, this is the farthest i have been able to come, still not far enough.
Please see the runnable code:
public class OCRRunnable implements Runnable {
static final String TAG = "DBG_" + "OCRRunnable";
private final Object mPauseLockDummyObject;
private boolean mPaused;
private boolean mFinished;
Camera mCamera;
public OCRRunnable(Camera cameraParam) {
mPauseLockDummyObject = new Object();
mPaused = false;
mFinished = false;
mCamera = cameraParam;
}
#Override
public void run()
{
if (mCamera != null) { // since the main activity may have been late opening the camera
try {
mCamera.autoFocus(new mAutoFocusCallback());
Log.d(TAG, "run: mCamera.autofocus()");
} catch (Exception e) {
Log.e(TAG, "run: " + e.getMessage());
}
//sleep necessary //TODO: needs refinement
//try { Thread.sleep(3000); } catch (InterruptedException e) { e.printStackTrace(); }
//Runnable regulator
synchronized (mPauseLockDummyObject) {
while (mPaused) {
try {
mPauseLockDummyObject.wait();
} catch (InterruptedException e) {
Log.e(TAG, "run: " + e.getMessage());
}
}
}
}
}
/**
* Call this on pause of the activity.
*/
public void onPause() {
//Log.d(TAG, "onPause: called");
synchronized (mPauseLockDummyObject) {
mPaused = true;
}
}
/**
* Call this on resume of the activity
*/
public void onResume() {
//Log.d(TAG, "onResume: called");
synchronized (mPauseLockDummyObject) {
mPaused = false;
mPauseLockDummyObject.notifyAll();
}
}
//////////////////////////////////////
protected class mAutoFocusCallback implements Camera.AutoFocusCallback {
#Override
public void onAutoFocus(boolean success, final Camera camera) {
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken() called");
/* NEED TO RUN THIS CODE PART REPETITIVELY */
camera.cancelAutoFocus(); //be ready for next autofocus
camera.startPreview(); //re-start cameraPreview since taking a picture stops it
run(); //TODO
}
});
}
}
}
Here is my relevant fragment:
public class ODFragment extends Fragment {
View rootView;
static final String TAG = "DBG_" + MainActivity.class.getName();
private Camera mCamera;
private CameraPreview mCameraPreview;
int ROIHeight;
FrameLayout frameLayout_cameraLens;
TextView words_container;
Thread ocrThread;
OCRRunnable ocrRunnable; //TODO: this may cause problems because mCamera is null as of this moment
public ODFragment() {}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
rootView = inflater.inflate(R.layout.fragment_od, container, false);
//one time tasks
words_container = (TextView) rootView.findViewById(R.id.words_container);
setCameraViewDimensions();
ocrThread = new Thread(ocrRunnable); //start it in onResume()
return rootView;
}
#Override
public void onResume() {
super.onResume();
hookCamera();
}
#Override
public void onPause() {
super.onPause();
unhookCamera();
}
private void hookCamera() {
try {
// 1. open camera
mCamera = Camera.open();
// 2. initialize cameraPreview
mCameraPreview = new CameraPreview(this.getActivity(), mCamera);
// 3. add view to frameLayout
frameLayout_cameraLens.addView(mCameraPreview);
mCamera.startPreview();
// 4. hook camera related listeners and threads
ocrRunnable = new OCRRunnable(mCamera);
ocrThread = new Thread(ocrRunnable);
ocrThread.start();
ocrRunnable.onResume();
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "Could not Camera.open(): " + e.getMessage());
}
}
private void unhookCamera() {
try {
// -4. unhook camera related listeners ans threads
ocrRunnable.onPause();
ocrThread = null;
ocrRunnable = null;
// -3. remove view from frameLayout
frameLayout_cameraLens.removeView(mCameraPreview);
// -2. destroy cameraPreview
mCameraPreview = null;
// -1. close camera
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void setCameraViewDimensions() {
//calculate and set dimensions of cameraLens
DisplayMetrics displaymetrics = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int width = displaymetrics.widthPixels;
int height = (int) (width * 1.3333333333333);
//Log.d(TAG, "frameLayout_cameraLens dimensions: "+height+"x"+width);
frameLayout_cameraLens = (FrameLayout) rootView.findViewById(R.id.frameLayout_cameraLens);
frameLayout_cameraLens.getLayoutParams().width = width;
frameLayout_cameraLens.getLayoutParams().height = height;
frameLayout_cameraLens.requestLayout();
//set height of ROI
ROIHeight = height / 5;
LinearLayout linearLayout = (LinearLayout) rootView.findViewById(R.id.ROI);
linearLayout.getLayoutParams().height = ROIHeight;
linearLayout.requestLayout();
}
}
Some points:
I think camera.autofocus() happens in a separate thread itself. That is why instead of putting a while(true) loop in run(), i have called run() in the end of the mAutoFocusCallback
Error at this point is that the execution comes to Log.d(TAG, "run: mCamera.autofocus()"); exactly once. Additionally, Log.d(TAG, "onPictureTaken() called"); not called even once.
Here is my relevant log:
05-29 12:51:58.460 W/art: Before Android 4.1, method android.graphics.PorterDuffColorFilter android.support.graphics.drawable.VectorDrawableCompat.updateTintFilter(android.graphics.PorterDuffColorFilter, android.content.res.ColorStateList, android.graphics.PorterDuff$Mode) would have incorrectly overridden the package-private method in android.graphics.drawable.Drawable
05-29 12:51:58.573 D/OpenGLRenderer: Use EGL_SWAP_BEHAVIOR_PRESERVED: true
05-29 12:51:58.655 W/FragmentManager: moveToState: Fragment state for VTFragment{fd65ec0 #0 id=0x7f0c006d android:switcher:2131492973:1} not updated inline; expected state 3 found 2
05-29 12:51:58.962 D/DBG_CameraPreview: CameraPreview() initialized
05-29 12:51:59.079 D/DBG_OCRRunnable: run: mCamera.autofocus()
05-29 12:51:59.097 I/Adreno-EGL: <qeglDrvAPI_eglInitialize:379>: EGL 1.4 QUALCOMM build: Nondeterministic_AU_msm8974_LA.BF.1.1.3_RB1__release_AU (I3f4bae6ca5)
OpenGL ES Shader Compiler Version: E031.29.00.00
Build Date: 02/14/16 Sun
Local Branch: mybranch18261495
Remote Branch: quic/LA.BF.1.1.3_rb1.10
Local Patches: NONE
Reconstruct Branch: NOTHING
05-29 12:51:59.101 I/OpenGLRenderer: Initialized EGL, version 1.4
05-29 12:51:59.366 I/Choreographer: Skipped 46 frames! The application may be doing too much work on its main thread.
05-29 12:51:59.556 I/Timeline: Timeline: Activity_idle id: android.os.BinderProxy#c66e1c1 time:44964952
Use mCamera.autofocus(callback) - and the callback will be triggered when the focus is ready. This is your chance to take the picture. In onPictureTaken() you will probably call mCamera.cancelAutoFocus(); mCamera.autofocus(callback); to allow the camera re-focus again.
Naturally, you may choose not to take picture every time that the camera gets focused.
To improve performance of your camera app, make sure to call Camera.open() on a background HandlerThread - otherwise the focus and picture callback will block the UI thread. This may be acceptable when these callback happen once in a while, but in your scenario they will be activated pretty often.
Update Let me present the suggested loop more clear (in pseudocode):
cameraInit() {
Open camera, set FOCUS_MODE_CONTINUOUS_PICTURE
camera.autofocus(autofocusCallback)
}
autofocusCallback() {
if (it_is_time_to_take_picture) {
takePicture(pictureCallback)
}
else {
autofocusAgain()
}
}
pictureCallback.onPictureTaken(image) {
autofocusAgain()
save image
}
autofocusAgain() {
camera.cancelAutoFocus()
wait(100ms)
camera.autofocus(autofocusCallback)
}

Android Camera.PreviewCallback scheduling (with OpenGL and OpenCV)

I'm developing an application which requires heavy image processing using camera input and real-time results display. I've decided to use OpenGL and OpenCV along with Android's normal camera API. So far it has become a bit of a multithreading nightmare, and unfortunately I feel very restricted by the lack of documentation on the onPreviewFrame() callback.
I am aware from the documentation that onPreviewFrame() is called on the thread which acquires the camera using Camera.open(). What confuses me is how this callback is scheduled - it seems to be at a fixed framerate. My current architecture relies on the onPreviewFrame() callback to initiate the image processing/display cycle, and it seems to go into deadlock when I block the camera callback thread for too long, so I suspect that the callback is inflexible when it comes to scheduling. I'd like to slow down the framerate to test this, but my device doesn't support this.
I started with the code over at http://maninara.blogspot.ca/2012/09/render-camera-preview-using-opengl-es.html. This code is not very parallel, and it is only meant to display exactly the data which the camera returns. For my needs, I adapted the code to draw bitmaps, and I use a dedicated thread to buffer the camera data to another dedicated heavy-lifting image processing thread (all outside of the OpenGL thread).
Here is my code (simplified):
CameraSurfaceRenderer.java
class CameraSurfaceRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener,
Camera.PreviewCallback
{
static int[] surfaceTexPtr;
static CameraSurfaceView cameraSurfaceView;
static FloatBuffer pVertex;
static FloatBuffer pTexCoord;
static int hProgramPointer;
static Camera camera;
static SurfaceTexture surfaceTexture;
static Bitmap procBitmap;
static int[] procBitmapPtr;
static boolean updateSurfaceTex = false;
static ConditionVariable previewFrameLock;
static ConditionVariable bitmapDrawLock;
// MarkerFinder extends CameraImgProc
static MarkerFinder markerFinder = new MarkerFinder();
static Thread previewCallbackThread;
static
{
previewFrameLock = new ConditionVariable();
previewFrameLock.open();
bitmapDrawLock = new ConditionVariable();
bitmapDrawLock.open();
}
CameraSurfaceRenderer(Context context, CameraSurfaceView view)
{
rendererContext = context;
cameraSurfaceView = view;
// … // Load pVertex and pTexCoord vertex buffers
}
public void close()
{
// … // This code usually doesn’t have the chance to get called
}
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
// .. // Initialize a texture object for the bitmap data
surfaceTexPtr = new int[1];
surfaceTexture = new SurfaceTexture(surfaceTexPtr[0]);
surfaceTexture.setOnFrameAvailableListener(this);
//Initialize camera on its own thread so preview frame callbacks are processed in parallel
previewCallbackThread = new Thread()
{
#Override
public void run()
{
try {
camera = Camera.open();
} catch (RuntimeException e) {
// … // Bitch to the user through a Toast on the UI thread
}
assert camera != null;
//Callback set on CameraSurfaceRenderer class, but executed on worker thread
camera.setPreviewCallback(CameraSurfaceRenderer.this);
try {
camera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
Log.e(Const.TAG, "Unable to set preview texture");
}
Looper.prepare();
Looper.loop();
}
};
previewCallbackThread.start();
// … // More OpenGL initialization stuff
}
#Override
public void onDrawFrame(GL10 unused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (this)
{
surfaceTexture.updateTexImage();
}
// Binds bitmap data to texture
bindBitmap(procBitmap);
// … // Acquire shader program ttributes, render
GLES20.glFlush();
}
#Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture)
{
cameraSurfaceView.requestRender();
}
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Bitmap bitmap = markerFinder.exchangeRawDataForProcessedImg(data, null, camera);
// … // Check for null bitmap
previewFrameLock.block();
procBitmap = bitmap;
previewFrameLock.close();
bitmapDrawLock.open();
}
void bindBitmap(Bitmap bitmap)
{
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, procBitmapPtr[0]);
bitmapDrawLock.block();
if (bitmap != null && !bitmap.isRecycled())
{
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
}
bitmapDrawLock.close();
previewFrameLock.open();
}
#Override
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
// … // Set camera parameters
camera.startPreview();
}
void deleteTexture()
{
GLES20.glDeleteTextures(1, surfaceTexPtr, 0);
}
}
CameraImgProc.java (abstract class)
public abstract class CameraImgProc
{
CameraImgProcThread thread = new CameraImgProcThread();
Handler handler;
ConditionVariable bufferSwapLock = new ConditionVariable(true);
Runnable processTask = new Runnable()
{
#Override
public void run()
{
imgProcBitmap = processImg(lastWidth, lastHeight, cameraDataBuffer, imgProcBitmap);
bufferSwapLock.open();
}
};
int lastWidth = 0;
int lastHeight = 0;
Mat cameraDataBuffer;
Bitmap imgProcBitmap;
public CameraImgProc()
{
thread.start();
handler = thread.getHandler();
}
protected abstract Bitmap allocateBitmapBuffer(int width, int height);
public final Bitmap exchangeRawDataForProcessedImg(byte[] data, Bitmap dirtyBuffer, Camera camera)
{
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = parameters.getPreviewSize();
// Wait for worker thread to finish processing image
bufferSwapLock.block();
bufferSwapLock.close();
Bitmap freshBuffer = imgProcBitmap;
imgProcBitmap = dirtyBuffer;
// Reallocate buffers if size changes to avoid overflow
assert size != null;
if (lastWidth != size.width || lastHeight != size.height)
{
lastHeight = size.height;
lastWidth = size.width;
if (cameraDataBuffer != null) cameraDataBuffer.release();
//YUV format requires 1.5 times as much information in vertical direction
cameraDataBuffer = new Mat((lastHeight * 3) / 2, lastWidth, CvType.CV_8UC1);
imgProcBitmap = allocateBitmapBuffer(lastWidth, lastHeight);
// Buffers had to be resized, therefore no processed data to return
cameraDataBuffer.put(0, 0, data);
handler.post(processTask);
return null;
}
// If program did not pass a buffer
if (imgProcBitmap == null)
imgProcBitmap = allocateBitmapBuffer(lastWidth, lastHeight);
// Exchange data
cameraDataBuffer.put(0, 0, data);
// Give img processing task to worker thread
handler.post(processTask);
return freshBuffer;
}
protected abstract Bitmap processImg(int width, int height, Mat cameraData, Bitmap dirtyBuffer);
class CameraImgProcThread extends Thread
{
volatile Handler handler;
#Override
public void run()
{
Looper.prepare();
handler = new Handler();
Looper.loop();
}
Handler getHandler()
{
//noinspection StatementWithEmptyBody
while (handler == null)
{
try {
Thread.currentThread();
Thread.sleep(5);
} catch (Exception e) {
//Do nothing
}
};
return handler;
}
}
}
I want an application which is robust, no matter how long it takes for the CameraImgProc.processImg() function to finish. Unfortunately, the only possible solution when camera frames are being fed in at a fixed rate is to drop frames when the image processing hasn't finished yet, or else I'll quickly have a buffer overflow.
My questions are as follows:
Is there any way to slow down the Camera.PreviewCallback frequency on demand?
Is there an existing Android API for getting frames on demand from the camera?
Are there existing solutions to this problem which I can refer to?
onPreviewFrame() is called on the thread which acquires the camera
using Camera.open()
That's a common misunderstanding. The key word that is missing from this description is "event". To schedule the camera callbacks to a non-UI thread, you need and "event thread", a synonym of HandlerThread. Please see my explanation and sample elsewhere on SO. Well, using a usual thread to open camera as in your code, is not useless, because this call itself may take few hundred milli on some devices, but event thread is much, much better.
Now let me address your questions: no, you cannot control the schedule of camera callbacks.
You can use setOneShotPreviewCallback() if you want to receive callbacks at 1 FPS or less. Your milage may vary, and it depends on the device, but I would recommend to use setPreviewCallbackWithBuffer and simply return from onPreviewFrame() if you want to check the camera more often. Performance hit from these void callbacks is minor.
Note that even when you offload the callbacks to a background thread, they are blocking: if it takes 200 ms to process a preview frame, camera will wait. Therefore, I usually send the byte[] to a working thread, and quickly release the callback thread. I won't recommend to slow down the flow of preview callbacks by processing them in blocking mode, because after you release the thread, the next callback will deliver a frame with undefined timestamp. Maybe it will be a fresh one, or maybe it will be one buffered a while ago.
You can schedule the callback in later platform releases (>4.0) indirectly. You can setup the buffers that the callback will use to deliver the data. Typically you setup two buffers; one to be written by the camera HAL while you read from the other one. No new frame will be delivered to you (by calling your onPreviewFrame) until you return a buffer that the camera can write to. It also means that the camera will drop frames.

Android how do I wait until a service is actually connected?

I have an Activity calling a Service defined in IDownloaderService.aidl:
public class Downloader extends Activity {
IDownloaderService downloader = null;
// ...
In Downloader.onCreate(Bundle) I tried to bindService
Intent serviceIntent = new Intent(this, DownloaderService.class);
if (bindService(serviceIntent, sc, BIND_AUTO_CREATE)) {
// ...
and within the ServiceConnection object sc I did this
public void onServiceConnected(ComponentName name, IBinder service) {
Log.w("XXX", "onServiceConnected");
downloader = IDownloaderService.Stub.asInterface(service);
// ...
By adding all kinds of Log.xx I found that the code after if(bindService(...)) actually goes BEFORE ServiceConnection.onServiceConnected is being called - that is, when downloader is still null - which gets me into trouble. All the samples in ApiDemos avoid this timing problem by only calling services when triggered by user actions. But what should I do to right use this service after bindService succeeds? How can I wait for ServiceConnection.onServiceConnected being called reliably?
Another question related. Are all the event handlers: Activity.onCreate, any View.onClickListener.onClick, ServiceConnection.onServiceConnected, etc. actually called in the same thread (mentioned in the doc as the "main thread")? Are there interleaves between them, or Android would schedule all events come into being handled one-by-one? Or, When exactly is ServiceConnection.onServiceConnected actually going to be called? Upon completion of Activity.onCreate or sometime when A.oC is still running?
How can I wait for
ServiceConnection.onServiceConnected
being called reliably?
You don't. You exit out of onCreate() (or wherever you are binding) and you put you "needs the connection established" code in onServiceConnected().
Are all the event handlers:
Activity.onCreate, any
View.onClickListener.onClick,
ServiceConnection.onServiceConnected,
etc. actually called in the same
thread
Yes.
When exactly is
ServiceConnection.onServiceConnected
actually going to be called? Upon
completion of Activity.onCreate or
sometime when A.oC is still running?
Your bind request probably is not even going to start until after you leave onCreate(). Hence, onServiceConnected() will called sometime after you leave onCreate().
I had the same problem. I didn't want to put my bound service dependent code in onServiceConnected, though, because I wanted to bind/unbind with onStart and onStop, but I didn't want the code to run again every time the activity came back to the front. I only wanted it to run when the activity was first created.
I finally got over my onStart() tunnel vision and used a Boolean to indicate whether this was the first onServiceConnected run or not. That way, I can unbindService in onStop and bindService again in onStart without running all the start up stuff each time.
I ended up with something like this:
1) to give the auxiliary stuff some scope, I created an internal class. At least, the ugly internals are separated from the rest of the code. I needed a remote service doing something, therefore the word Something in class name
private RemoteSomethingHelper mRemoteSomethingHelper = new RemoteSomethingHelper();
class RemoteSomethingHelper {
//...
}
2) there are two things necessary to invoke a remote service method: the IBinder and the code to execute. Since we don't know which one becomes known first, we store them:
private ISomethingService mISomethingService;
private Runnable mActionRunnable;
Each time we write to one of these fileds, we invoke _startActionIfPossible():
private void _startActionIfPossible() {
if (mActionRunnable != null && mISomethingService != null) {
mActionRunnable.run();
mActionRunnable = null;
}
}
private void performAction(Runnable r) {
mActionRunnable = r;
_startActionIfPossible();
}
This, of course, assumes that the Runnable has access to mISomethingService, but this is true for runnables created within the methods of the RemoteSomethingHelper class.
It is really good that the ServiceConnection callbacks are called on the UI thread: if we are going to invoke the service methods from the main thread, we do not need to care about synchronization.
ISomethingService is, of course, defined via AIDL.
3) Instead of just passing arguments to methods, we create a Runnable that will invoke the method with these arguments later, when invocation is possible:
private boolean mServiceBound;
void startSomething(final String arg1) {
// ... starting the service ...
final String arg2 = ...;
performAction(new Runnable() {
#Override
public void run() {
try {
// arg1 and arg2 must be final!
mISomethingService.startSomething(arg1, arg2);
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
}
4) finally, we get:
private RemoteSomethingHelper mRemoteSomethingHelper = new RemoteSomethingHelper();
class RemoteSomethingHelper {
private ISomethingService mISomethingService;
private Runnable mActionRunnable;
private boolean mServiceBound;
private void _startActionIfPossible() {
if (mActionRunnable != null && mISomethingService != null) {
mActionRunnable.run();
mActionRunnable = null;
}
}
private ServiceConnection mServiceConnection = new ServiceConnection() {
// the methods on this class are called from the main thread of your process.
#Override
public void onServiceDisconnected(ComponentName name) {
mISomethingService = null;
}
#Override
public void onServiceConnected(ComponentName name, IBinder service) {
mISomethingService = ISomethingService.Stub.asInterface(service);
_startActionIfPossible();
}
}
private void performAction(Runnable r) {
mActionRunnable = r;
_startActionIfPossible();
}
public void startSomething(final String arg1) {
Intent intent = new Intent(context.getApplicationContext(),SomethingService.class);
if (!mServiceBound) {
mServiceBound = context.getApplicationContext().bindService(intent, mServiceConnection, 0);
}
ComponentName cn = context.getApplicationContext().startService(intent);
final String arg2 = ...;
performAction(new Runnable() {
#Override
public void run() {
try {
mISomethingService.startSomething(arg1, arg2);
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
}
}
context is a field in my class; in an Activity, you can define it as Context context=this;
I did not need queuing actions; if you do, you can implement it.
You likely will need a result callback in startSomething(); I did, but this is not shown in this code.
I did something similar before, the only different is I was not binding to service, but just starting it.
I would broadcast an intent from the service to notify the caller/activity about it is started.
I wanted to add some things you should or should not do:
bind the service not on create but onResume and unbind it onPause. Your app can go into pause (background) at any time by user interaction or OS-Screens.
Use a distinct try/catch for each and every service unbinding, receiver unregistering etc in onPause so if one is not bound or registered the exception doesn't prevent the others from being destroyed too.
I usually capsule binding in a public MyServiceBinder getService() Method. I also always use a blocking boolean variable so I don't have to keep an eye on all those calls using the servie in the activity.
Example:
boolean isBindingOngoing = false;
MyService.Binder serviceHelp = null;
ServiceConnection myServiceCon = null;
public MyService.Binder getMyService()
{
if(serviceHelp==null)
{
//don't bind multiple times
//guard against getting null on fist getMyService calls!
if(isBindingOngoing)return null;
isBindingOngoing = true;
myServiceCon = new ServiceConnection(
public void onServiceConnected(ComponentName cName, IBinder binder) {
serviceHelp = (MyService.Binder) binder;
//or using aidl: serviceHelp = MyService.Stub.AsInterface(binder);
isServiceBindingOngoing = false;
continueAfterServiceConnect(); //I use a method like this to continue
}
public void onServiceDisconnected(ComponentName className) {
serviceHelp = null;
}
);
bindService(serviceStartIntent,myServiceCon);
}
return serviceHelp;
}
Android 10 has introduced a new bindService method signature when binding to a service to provide an Executor (which can be created from the Executors).
/**
* Same as {#link #bindService(Intent, ServiceConnection, int)} with executor to control
* ServiceConnection callbacks.
* #param executor Callbacks on ServiceConnection will be called on executor. Must use same
* instance for the same instance of ServiceConnection.
*/
public boolean bindService(#RequiresPermission #NonNull Intent service,
#BindServiceFlags int flags, #NonNull #CallbackExecutor Executor executor,
#NonNull ServiceConnection conn) {
throw new RuntimeException("Not implemented. Must override in a subclass.");
}
This allows to bind to the service in a thread and wait until it is connected. E.g. stub:
private final AtomicBoolean connected = new AtomicBoolean()
private final Object lock = new Object();
...
private void myConnectMethod() {
// bind to service
ExecutorService executorService = Executors.newSingleThreadExecutor();
context.bindService(new Intent(context, MyServiceClass.class), Context.BIND_AUTO_CREATE, executorService, new
ServiceConnection() {
#Override
public void onServiceConnected(ComponentName name, IBinder binder) {
synchronized (lock) {
// TODO: store service instance for calls in case of AIDL or local services
connected.set(true);
lock.notify();
}
});
synchronized (lock) {
while (!connected.get()) {
try {
lock.wait();
} catch (InterruptedException e) {
throw new RuntimeException();
}
}
}
}
It is also necessary to run the service in a separate process:
<service
android:name=".MyServiceClass"
android:process=":service"
android:enabled="true"
android:exported="true" />
I figured out that these workarounds are only worth the effort and the wait only if your bound services are running in a different process than your application's main process.
For accessing data and methods in the same process (or application), I ended up implementing singleton classes. If the classes need a context for some methods, I leak the application context to the singleton classes. There is, of course, a bad consequence of it as it breaks the "instant run". But that is an overall better compromise, I think.
*The basic idea is same with #18446744073709551615, but I will share my code as well.
As a answer of main question,
But what should I do to right use this service after bindService succeeds?
[Original expectation (but not work)]
wait until service connected like below
#Override
protected void onStart() {
bindService(service, mWebServiceConnection, BIND_AUTO_CREATE);
synchronized (mLock) { mLock.wait(40000); }
// rest of the code continues here, which uses service stub interface
// ...
}
It won't work because both bindService() in onCreate()/onStart() and onServiceConnected() is called at same main thread.
onServiceConnected() is never called before wait finishes.
[Alternative solution]
Instead of "wait", define own Runnable to be called after Service Connected and execute this runnable after service connected.
Implement custom class of ServiceConnection as follows.
public class MyServiceConnection implements ServiceConnection {
private static final String TAG = MyServiceConnection.class.getSimpleName();
private Context mContext = null;
private IMyService mMyService = null;
private ArrayList<Runnable> runnableArrayList;
private Boolean isConnected = false;
public MyServiceConnection(Context context) {
mContext = context;
runnableArrayList = new ArrayList<>();
}
public IMyService getInterface() {
return mMyService;
}
#Override
public void onServiceConnected(ComponentName name, IBinder service) {
Log.v(TAG, "Connected Service: " + name);
mMyService = MyService.Stub.asInterface(service);
isConnected = true;
/* Execute runnables after Service connected */
for (Runnable action : runnableArrayList) {
action.run();
}
runnableArrayList.clear();
}
#Override
public void onServiceDisconnected(ComponentName name) {
try {
mMyService = null;
mContext.unbindService(this);
isConnected = false;
Log.v(TAG, "Disconnected Service: " + name);
} catch(Exception e) {
Log.e(TAG, e.toString());
}
}
public void executeAfterServiceConnected(Runnable action) {
Log.v(TAG, "executeAfterServiceConnected");
if(isConnected) {
Log.v(TAG, "Service already connected, execute now");
action.run();
} else {
// this action will be executed at the end of onServiceConnected method
Log.v(TAG, "Service not connected yet, execute later");
runnableArrayList.add(action);
}
}
}
And then use it in the following way (in your Activity class or etc),
private MyServiceConnection myServiceConnection = null;
#Override
protected void onStart() {
Log.d(TAG, "onStart");
super.onStart();
Intent serviceIntent = new Intent(getApplicationContext(), MyService.class);
startService(serviceIntent);
myServiceConnection = new MyServiceConnection(getApplicationContext());
bindService(serviceIntent, myServiceConnection, BIND_AUTO_CREATE);
// Instead of "wait" here, create callback which will be called after service is connected
myServiceConnection.executeAfterServiceConnected(new Runnable() {
#Override
public void run() {
// Rest of the code comes here.
// This runnable will be executed after service connected, so we can use service stub interface
IMyService myService = myServiceConnection.getInterface();
// ...
}
});
}
It worked for me. But there may be more better way.

Categories

Resources