How to change camera exposure on android? - android

I want to perform image processing with OpenCV and android. In the first step, I need to change the camera properties like resolution, exposure, etc. By using OpenCV I only can change the resolution(mOpenCvCameraView.setMaxFrameSize(320,240);) and cannot change exposure.
By using OpenCV and camera2 When I run it, it is crashing(this code:pastebin.com/3XgvKGQN).
How can I change camera exposure?
package com.williams.drew.opencvtest;
import android.graphics.Paint;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.SurfaceView;
import android.view.WindowManager;
import org.opencv.android.JavaCameraView;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
public class MainActivity extends AppCompatActivity implements CvCameraViewListener2 {
//Prefixes for logging success and failure messages
private static final String TAG = "OCVSample::Activity";
//Loads camera view of OpenCV for us to use. This lets us see using OpenCV
private CameraBridgeViewBase mOpenCvCameraView;
//Preview Builder which changes exposure (i think)
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private long exposureTime = 1000,frameDuration = 1000;
private int sensitivity = 200;
//OPENCV Variables
Mat matRGBA;
public MainActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.show_camera);
mOpenCvCameraView = (JavaCameraView) findViewById(R.id.show_camera_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onPause() {
super.onPause();
if(mOpenCvCameraView != null) {
mOpenCvCameraView.disableView();
}
}
#Override
public void onResume() {
super.onResume();
if(!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for init");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
#Override
public void onDestroy() {
super.onDestroy();
if(mOpenCvCameraView != null) {
mOpenCvCameraView.disableView();
}
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
public void onCameraViewStarted(int width, int height) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
mPreviewRequestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, Long.valueOf(exposureTime));
mPreviewRequestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, Integer.valueOf(sensitivity));
mPreviewRequestBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, Long.valueOf(frameDuration));
matRGBA = new Mat(width, height, CvType.CV_8UC4);
}
#Override
public void onCameraViewStopped() {
matRGBA.release();
}
#Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
matRGBA = inputFrame.rgba();
return matRGBA;
}
}
Thank you for your answers.

Yes, OpenCV is not exposing all the camera parameters. You can modify JavaCameraView and add the function that calls setExposureCompensation().
You want to call that function like this:
Camera mCamera;
mCamera = Camera.open();
Camera.Parameters params = mCamera.getParameters();
params.setExposureCompensation(0);

Related

Android renderscript problem with copying byte array into allocation

I'm new to Renderscript and my app crashes every time at this line:
inAllocation.copyFromUnchecked(copyOfRange(nv21ByteArray, 0, size-1));
What I want to do is to convert the YUV-image that I get from the camera (nv21ByteArray), to a black and white image (So I just need the Y-values).
This is the java code:
package org.rwca.anthe.robocup15;
import android.support.v8.renderscript.Allocation;
import android.support.v8.renderscript.Element;
import android.support.v8.renderscript.RenderScript;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v8.renderscript.Type;
import android.widget.ImageView;
import android.widget.Toast;
import org.jetbrains.annotations.NotNull;
import io.fotoapparat.Fotoapparat;
import io.fotoapparat.error.CameraErrorListener;
import io.fotoapparat.exception.camera.CameraException;
import io.fotoapparat.parameter.ScaleType;
import io.fotoapparat.preview.Frame;
import io.fotoapparat.preview.FrameProcessor;
import io.fotoapparat.view.CameraView;
import static io.fotoapparat.log.LoggersKt.fileLogger;
import static io.fotoapparat.log.LoggersKt.logcat;
import static io.fotoapparat.log.LoggersKt.loggers;
import static io.fotoapparat.selector.LensPositionSelectorsKt.back;
import static java.util.Arrays.copyOfRange;
public class MainActivity extends AppCompatActivity {
private CameraView cameraView;
private ImageView imageView;
private Fotoapparat fotoapparat;
public byte[] nv21ByteArray;
public int size = 960 * 1280;
public int thresholdValue;
public RenderScript rs;
public Allocation inAllocation, outAllocation;
public ScriptC_imageProc mScript;
#NonNull
private Fotoapparat createFotoapparat() {
return Fotoapparat
.with(this)
.into(cameraView)
.previewScaleType(ScaleType.CenterCrop)
.lensPosition(back())
.frameProcessor(new SampleFrameProcessor())
.logger(loggers(
logcat(),
fileLogger(this)
))
.cameraErrorCallback(new CameraErrorListener() {
#Override
public void onError(CameraException e) {
Toast.makeText(MainActivity.this, e.toString(), Toast.LENGTH_LONG).show();;
}
})
.build();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = findViewById(R.id.cameraView);
imageView = findViewById(R.id.imageView);
fotoapparat = createFotoapparat();
rs = RenderScript.create(this);
mScript = new ScriptC_imageProc(rs);
Type t = Type.createX(rs, Element.U8(rs), size);
inAllocation = Allocation.createSized(rs, Element.U8(rs), size);
outAllocation = Allocation.createTyped(rs, t);
}
#Override
protected void onResume() {
super.onResume();
}
#Override
protected void onStart() {
super.onStart();
fotoapparat.start();
}
#Override
protected void onStop() {
super.onStop();
fotoapparat.stop();
}
private class SampleFrameProcessor implements FrameProcessor {
#Override
public void process(#NotNull Frame frame) {
nv21ByteArray = frame.getImage();
inAllocation.copyFromUnchecked(copyOfRange(nv21ByteArray, 0, size-1));
mScript.set_thresholdValue(thresholdValue);
mScript.forEach_root(inAllocation, outAllocation);
runOnUiThread(new Runnable() {
#Override
public void run() {
}
});
}
}
}
And this is my renderscript file:
#pragma version(1)
#pragma rs java_package_name(org.rwca.anthe.robocup15)
#include "rs_core.rsh"
#include "rs_time.rsh"
int thresholdValue;
uchar __attribute__((kernel)) root(uchar in, uint32_t x) {
//rsDebug("Called root", rsUptimeMillis());
}
What am I doing wrong?
Thank you in advance,
Anton

opencv get fps in variable

I'm new in developing on Android. I want to make an app with OpenCV.
For my app I want to have the FPS of the preview stored in a variable. I found a class in the OpenCVLibrary folder called FpsMeter, but when I call fpsmeter.init(), my app crashes. I tried different things, like setting
cameraBridgeViewBase.enableFpsMeter();
or
cameraBridgeViewBase.disableFpsMeter();
or
fpsmeter.setResolution(320, 240);
But it crashes every time.
Can someone help me please? Thanks in advance.
package be.anton.OpenCV_test;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.widget.Toast;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.FpsMeter;
import org.opencv.android.JavaCameraView;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
CameraBridgeViewBase cameraBridgeViewBase;
BaseLoaderCallback baseLoaderCallback;
FpsMeter fpsMeter;
Mat mat1;
private static final String TAG = "MyActivity";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraBridgeViewBase = (JavaCameraView) findViewById(R.id.cam);
cameraBridgeViewBase.setVisibility(SurfaceView.VISIBLE);
cameraBridgeViewBase.enableFpsMeter();
cameraBridgeViewBase.setCvCameraViewListener(this);
baseLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
// super.onManagerConnected(status);
switch (status) {
case BaseLoaderCallback.SUCCESS: {
cameraBridgeViewBase.enableView();
break;
}
default: {
super.onManagerConnected(status);
break;
}
}
}
};
fpsMeter.init(); // when I add this line my app crashes
}
#Override
protected void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Toast.makeText(getApplicationContext(), "OpenCV could not be loaded.", Toast.LENGTH_SHORT).show();
} else {
baseLoaderCallback.onManagerConnected(BaseLoaderCallback.SUCCESS);
}
}
#Override
protected void onPause() {
super.onPause();
if (cameraBridgeViewBase != null) {
cameraBridgeViewBase.disableView();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
if (cameraBridgeViewBase != null) {
cameraBridgeViewBase.disableView();
}
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mat1 = inputFrame.rgba();
return mat1;
}
#Override
public void onCameraViewStopped() {
mat1.release();
}
#Override
public void onCameraViewStarted(int width, int height) {
mat1 = new Mat(width, height, CvType.CV_8UC4);
}
}
SOLUTION:
I created my own FPS meter. This is the code:
The variables I use:
Textview txt1;
int mFPS;
long startTime = 0;
long currentTime = 1000;
Set this in onCreate():
runOnUiThread(new Runnable() {
#Override
public void run() {
txt1 = (TextView) findViewById(R.id.txt1);
}
});
And this is my onCameraFrame():
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mat1 = inputFrame.rgba();
//Log.i(TAG, mat1.get(128, 128).toString());
runOnUiThread(new Runnable() {
#Override
public void run() {
if (currentTime - startTime >= 1000) {
txt1.setText("FPS: " + String.valueOf(mFPS));
mFPS = 0;
startTime = System.currentTimeMillis();
}
currentTime = System.currentTimeMillis();
mFPS += 1;
}
});
return mat1;
}

setAutoFocusEnabled(true) doesn't work although my device supports auto focus

I'm trying to create a barcode reader. The program works perfectly when the barcode is big. To read small barcodes i will need autofocus enabled.
I added setAutoFocusEnabled(true) to cameraSource, but it doesnt work.
How can i fix the following code to get autofocus working?
My testting device is a samsung galaxy J1 sm111m android 5.1.1 API 22
package com.gutimore.android.pdf417;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Vibrator;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
SurfaceView cameraPreview;
TextView txtResult;
BarcodeDetector barcodeDetector;
CameraSource cameraSource;
final int RequestCameraPermissionID = 1001;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraPermissionID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
break;
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraPreview = findViewById(R.id.cameraPreview);
txtResult = findViewById(R.id.txtResult);
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.PDF417)
.build();
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.build();
//Add Event
cameraPreview.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
//Request permission
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA},RequestCameraPermissionID);
return;
}
try {
cameraSource.start(cameraPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
cameraSource.stop();
}
});
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> qrcodes = detections.getDetectedItems();
if(qrcodes.size() != 0)
{
txtResult.post(new Runnable() {
#Override
public void run() {
//Create vibrate
Vibrator vibrator = (Vibrator)getApplicationContext().getSystemService(Context.VIBRATOR_SERVICE);
vibrator.vibrate(1000);
txtResult.setText(qrcodes.valueAt(0).displayValue);
}
});
}
}
});
}
}
Use CamaraSource from (link)
package com.google.android.gms.samples.vision.barcodereader.ui.camera;
And initialize, as below
CameraSource camera = new CameraSource.Builder(requireContext(), barcodeDetector)
.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)
.build();
But on some old samsung devices autofocus still won't be working. Better use ZXing

Android - Real-Time get image data from camera?

I want to do some image processing jobs and I want to get real-time image data from my camera(during preview state) instead of after taking pictures.
I looked on this post but don't know how to use this.
After setting this,
SurfaceHolder.Callback surfaceCallback=new SurfaceHolder.Callback()
{
public void surfaceCreated(SurfaceHolder holder) {
camera.setPreviewCallback(previewCallback);
}
}
and this
private Camera.PreviewCallback previewCallback= new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame(byte[] data,Camera cam)
{
Camera.Size previewSize = cam.getParameters().getPreviewSize();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21,previewSize.width,previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0,0,previewSize.width,previewSize.height),80,baos);
byte[] jdata = baos.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jdata,0,jdata.length);
}
};
How can I real-time get my image data? Could anyone please kindly give me an short example code?
Thanks
For doing image processing based operations you can use with OpenCV Library.
Here i given sample code to process the frames directly from camera using OpenCV
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import static org.opencv.core.CvType.CV_8UC1;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;
Mat frame;
public static final int CAMERA_PERMISSION_REQUEST_CODE = 3;
static {
System.loadLibrary("MyOpenCVLibs");
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status){
case LoaderCallbackInterface.SUCCESS:
{
javaCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(this,Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.CAMERA},CAMERA_PERMISSION_REQUEST_CODE);
}
javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(View.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
}
#Override
protected void onPause(){
super.onPause();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onDestroy(){
super.onDestroy();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onResume(){
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.i(TAG, "OpenCV loaded successfully.");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
Log.i(TAG, "OpenCV not loaded.");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
frame=new Mat(height,width,CV_8UC4);
}
#Override
public void onCameraViewStopped() {
frame.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame=inputFrame.rgba();
}
}
This code checks OpenCV properly imported or not and Checks permission for Camera.
And each input frames are saved in frame variable.

real time video processing with android openCV

I am a first time android programmer.
The project I am working on requires me to do (simple?) real time video processing.
The app, once finished needs to do this:
When we click on the inbuilt camera application, it opens. I then proceed to choose the video recording option. Using that I can see the surroundings without needing to record. What I am trying to accomplish is to delay that display by a few hundred milliseconds. A colleague of mine could do this pretty easily with the delay option using the laptop webcam and openCV (for computers). I am trying to accomplish the same with an android phone.
Perhaps I am doing a poor job of explaining the situation. Kindly reply at the earliest.
I am working on the code now and being a first time programmer taking some time.
Excited to start with Android programming!
no idea if this task actually needs opencv ( might be a bit of overkill ) but if you opt for that, its fairly easy.
see all we do here is record frames continuously, and toggle between realtime/playback mode on some event (onTouch for simplicity here):
package com.berak.echo;
import java.util.ArrayList;
import java.util.List;
import android.os.Bundle;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.app.Activity;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import com.berak.echo.R;
public class EchoActivity extends Activity implements CvCameraViewListener2, OnTouchListener {
CameraBridgeViewBase mOpenCvCameraView;
List<Mat> ring = new ArrayList<Mat>(); // recording buffer
int delay = 100; // delay == length of buffer
boolean delayed = false; // state
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_echo);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.cam3_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setOnTouchListener(this); // setup as touchlistener
}
// lots of boilerplate, ugly, but needed.
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
mOpenCvCameraView.enableView();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
#Override
public void onResume() {;
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5,this, mLoaderCallback);
}
#Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onCameraViewStarted(int width, int height) { }
#Override
public void onCameraViewStopped() { }
// here's the bread & butter stuff:
#Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat mRgba = inputFrame.rgba();
ring.add(mRgba.clone()); // add one at the end
if ( ring.size() >= delay ) { // pop one from the front
ring.get(0).release();
ring.remove(0);
}
Mat ret;
String txt;
if ( delayed && ring.size()>0 ) { // depending on 'delayed' return either playback
ret = ring.get(0); // return the 'oldest'
txt = "playback";
} else {
ret = mRgba; // or realtime frame
txt = "realtime";
}
Core.putText(ret, txt, new Point(20,20), Core.FONT_HERSHEY_PLAIN, 1.2, new Scalar(200,0,0));
return ret;
}
#Override
public boolean onTouch(View v, MotionEvent event) {
// just toggle between delayed an realtime view:
delayed = ! delayed;
return false;
}
}

Categories

Resources