how to add image as mask in camera frame android opencv - android

i am new in opencv for android and trying to develop an application to detect object. i used sample face detection from opencv sdk 2.4.11 . i want to add an image as mask in camera frame after detect specific logo. here my code:
public class MainActivity extends Activity implements CvCameraViewListener2, OnTouchListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
private CameraBridgeViewBase mOpenCvCameraView;
private int mWidth;
private int mHeight;
private int mAbsoluteFaceSize =0;
private CascadeClassifier cascadeClassifier;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(CameraCalibrationActivity.this);
InputStream is = getResources().openRawResource(R.raw.cascade_a3_23);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
File mCascadeFile = new File(cascadeDir, "cascade_a3_23.xml");
FileOutputStream os = null;
try {
os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
cascadeClassifier= new CascadeClassifier(mCascadeFile.getAbsolutePath());
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public CameraCalibrationActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
LinearLayout linearLayout=new LinearLayout(this);
linearLayout.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
mOpenCvCameraView = new JavaCameraView(this, CameraBridgeViewBase.CAMERA_ID_BACK);
mOpenCvCameraView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
linearLayout.addView(mOpenCvCameraView);
setContentView(linearLayout);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
super.onResume();
}
public void onCameraViewStarted(int width, int height) {
if (mWidth != width || mHeight != height) {
mWidth = width;
mHeight = height;
}
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat mRgba = inputFrame.rgba();
Mat mGrey = inputFrame.gray();
MatOfRect faces = new MatOfRect();
int height = mGrey.rows();
if (Math.round(height * 0.2) > 0) {
mAbsoluteFaceSize = (int) Math.round(height * 0.2);
}
cascadeClassifier.detectMultiScale(mGrey, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
return mRgba;
}
and i want to add this image mask into my camera view
Can anyone help me?

Related

How to play a sound on android with a delay?

I'm doing a small school project of object detection by color. When the object arrives in the center of the screen, it must have a sound. So the sound does not stay uninterrupted, I tried to put a delay on it. But every time the application arrives at the point of playing the sound, it closes. I already researched here and in other forums and the solutions presented did not work.
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
static {
if(!OpenCVLoader.initDebug()){
Log.d("TAG", "OpenCV not loaded");
} else {
Log.d("TAG", "OpenCV loaded");
}
}
public void voltatela(View v) {
setContentView(R.layout.activity_main);
}
Mat imgHVS, imgThresholded;
Scalar sc1, sc2;
JavaCameraView cameraView;
int largura, altura;
public void Verde(View v) {
sc1 = new Scalar(45, 20, 10);
sc2 = new Scalar(75, 255, 255);
irTelaCamera();
}
public void Azul(View v) {
sc1 = new Scalar(80, 50, 50);
sc2 = new Scalar(100, 255, 255);
irTelaCamera();
}
public void Vermelho(View v) {
sc1 = new Scalar(110, 100, 50);
sc2 = new Scalar(130, 255, 255);
irTelaCamera();
}
public void irTelaCamera(){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.telacamera);
cameraView = (JavaCameraView)findViewById(R.id.cameraview);
cameraView.setCameraIndex(0); //0 para traseira e 1 para dianteira
cameraView.setCvCameraViewListener(this);
cameraView.enableView();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
DisplayMetrics displayMetrics = new DisplayMetrics();
WindowManager windowmanager = (WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE);
windowmanager.getDefaultDisplay().getMetrics(displayMetrics);
}
#Override
protected void onPause() {
super.onPause();
cameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
imgHVS = new Mat(width,height, CvType.CV_16UC4);
imgThresholded = new Mat(width,height, CvType.CV_16UC4);
largura = width;
altura = height;
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Point centrotela = new Point((largura*0.5),(altura*0.5));
final MediaPlayer som = MediaPlayer.create(this, R.raw.bip);
Imgproc.medianBlur(imgHVS,imgHVS,1);
Imgproc.cvtColor(inputFrame.rgba(), imgHVS,Imgproc.COLOR_BGR2HSV);
Core.inRange(imgHVS, sc1, sc2, imgThresholded);
Imgproc.GaussianBlur(imgThresholded, imgThresholded, new Size(3, 3), 1, 1);
Mat circles = new Mat();
double dp = 1.2d;
int minRadius = 20;
int maxRadius = 0;
double param1 = 100, param2 = 20;
int desvio = (int) (minRadius*0.5);
Imgproc.HoughCircles(imgThresholded, circles, Imgproc.HOUGH_GRADIENT, dp, imgThresholded.rows()/4, 100, 20, minRadius, maxRadius);
int numCircles = (circles.rows() == 0) ? 0 : circles.cols();
for (int i = 0; i < numCircles; i++) {
double[] circleCoordinates = circles.get(0, i);
int x = (int) circleCoordinates[0], y = (int) circleCoordinates[1];
Point center = new Point(x, y);
int radius = (int) circleCoordinates[2];
if((((center.x-desvio) <= centrotela.x) && ((center.x+desvio) >= centrotela.x))) {
if ((((center.y-desvio) <= centrotela.y) && ((center.y+desvio) >= centrotela.y))) {
som.start();
Imgproc.circle(imgThresholded, center, radius, new Scalar(100, 255, 255), 4);
// Play sound after 2 sec delay
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
som.stop();
}
}, 2000);
}}
}
Imgproc.circle(imgThresholded,centrotela,50, new Scalar(100,255,255),7);
Imgproc.circle(imgThresholded,centrotela,25, new Scalar(100,255,255),4);
Imgproc.circle(imgThresholded,centrotela,5, new Scalar(100,255,255),-1);
return imgThresholded;
}
}
You have to write sop.start(); instead of sop.stop() inside the handler;
Have you tried playing the sound in a different thread (AsyncTask? See example here). You would have to keep track if you have already spawned a task though, otherwise, you will end up creating too many threads playing the same sound.

Data Timestamp Android

I am developing an application where i do some real time image processing. In my camera view I do the image processing inside the onPreviewFrame where i loop through each pixel and then find the sum Y value of each frame. I then save this into a csv file and everything works perfect. Except, i also want to store the time elapsed between each frame in the csv along with each y-sum.
ImageProcessing
public abstract class ImageProcessing {
public static int YUV420SPtoYSum(byte[] yuv420sp, int width, int height){
if(yuv420sp == null)
return 0;
int sum = 0;
final int ii = 0;
final int ij = 0;
final int di = +1;
final int dj = +1;
int y = 0;
for (int i = 0, ci = ii; i < height; ++i, ci += di) {
for (int j = 0, cj = ij; j < width; ++j, cj += dj) {
y = (0xff & ((int) yuv420sp[ci * width + cj]));
//y = y < 16 ? 16 : y;
sum += y;
}
}
return sum;
}
}
CameraView Class
public class CameraView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CameraView";
Camera.Size mPreviewSize;
List<Camera.Size> mSupportedPreviewSizes;
private SurfaceHolder mHolder;
private Camera mCamera;
int img_Y_Avg, img_U_Avg, img_V_Avg;
public interface PreviewReadyCallback {
void onPreviewFrame(int yAverage, int uAverage, int vAverage); // Any value you want to get
}
PreviewReadyCallback mPreviewReadyCallback = null;
public void setOnPreviewReady(PreviewReadyCallback cb) {
mPreviewReadyCallback = cb;
}
public CameraView(Context context, Camera camera){
super(context);
mCamera = camera;
//mCamera.setDisplayOrientation(90);
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
for(Camera.Size str: mSupportedPreviewSizes)
Log.e(TAG, str.width + "/" + str.height);
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder){
try{
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
}catch(Exception e){
Log.d("ERROR","Camera error on SurfaceCreated" + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
if(mHolder.getSurface() == null)
return;
try{
mCamera.stopPreview();
}catch(Exception e) {
Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
}
try {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(176, 144);
mCamera.cancelAutoFocus();
//parameters.setAutoExposureLock(false);
mCamera.setDisplayOrientation(90);
//set fps
parameters.setPreviewFpsRange(16000, 16000);
//on flash
parameters.setFlashMode(parameters.FLASH_MODE_AUTO);
//parameters.setAutoWhiteBalanceLock(true);
parameters.setPreviewFormat(ImageFormat.NV21);
/*if (parameters.getMaxNumMeteringAreas() > 0){ // check that metering areas are supported
List<Camera.Area> meteringAreas = new ArrayList<Camera.Area>();
Rect areaRect1 = new Rect(-50, -50, 50, 50); // specify an area in center of image
meteringAreas.add(new Camera.Area(areaRect1, 1000)); // set weight to 60%
parameters.setMeteringAreas(meteringAreas);
}*/
//mCamera.setDisplayOrientation(90);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallback(this);
mCamera.startPreview();
} catch (IOException e) {
Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if (mCamera != null){
//mCamera.stopPreview();
//mCamera.release();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera){
//check if data is null
if (data == null)
throw new NullPointerException();
Camera.Size size = camera.getParameters().getPreviewSize();
//check if size is null
if(size == null)
throw new NullPointerException();
//set resolution of camera view to optimal setting
int width = size.width;
int height = size.height;
Log.d("Resolution ", " "+String.valueOf(width)+" "+String.valueOf(height));
//call ImageProcess on the data to decode YUV420SP to RGB
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg);
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec){
final int width = resolveSize(getSuggestedMinimumWidth(),widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if(mSupportedPreviewSizes != null){
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
Log.d("Resolution ", " "+mPreviewSize);
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h){
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes){
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff){
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null){
minDiff = Double.MIN_VALUE;
for (Camera.Size size : sizes){
if (Math.abs(size.height - targetHeight) < minDiff){
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
}
MainActivity
public class MainActivity extends AppCompatActivity implements CameraView.PreviewReadyCallback {
private static Camera camera = null;
private CameraView image = null;
private LineChart bp_graph;
private static int img_Y_Avg = 0, img_U_Avg = 0, img_V_Avg = 0;
double valueY, valueU, valueV;
Handler handler;
private int readingRemaining = 600;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
bp_graph = (LineChart)findViewById(R.id.graph);
graph_features();
//open camera
try {
camera = Camera.open();
handler = new Handler();
final Runnable runnable = new Runnable() {
#Override
public void run() {
camera.stopPreview();
camera.release();
}
};
handler.postDelayed(runnable, 30000);
} catch (Exception e) {
Log.d("ERROR", "Failed to get camera: " + e.getMessage());
}
if (camera != null) {
image = new CameraView(this, camera);
FrameLayout camera_view = (FrameLayout) findViewById(R.id.camera_view);
camera_view.addView(image);
image.setOnPreviewReady(this);
}
//close camera button
ImageButton imgClose = (ImageButton) findViewById(R.id.imgClose);
imgClose.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
}
#Override
protected void onResume(){
super.onResume();
}
#Override
protected void onPause() {
super.onPause();
}
private void graph_features(){
bp_graph.getDescription().setEnabled(false);
//enable touch gesture
bp_graph.setTouchEnabled(true);
//enable scaling
bp_graph.setDragEnabled(true);
//scale and drag
bp_graph.setScaleEnabled(true);
bp_graph.setDrawGridBackground(false);
//enable pinch zoom in
bp_graph.setPinchZoom(true);
//alternative background color
bp_graph.setBackgroundColor(Color.LTGRAY);
//work on data
LineData lineData = new LineData();
lineData.setValueTextColor(Color.WHITE);
//add data to line chart
bp_graph.setData(lineData);
//animate
bp_graph.animateX(600);
Legend legend = bp_graph.getLegend();
//custom legend
legend.setForm(Legend.LegendForm.LINE);
legend.setTextColor(Color.WHITE);
XAxis x1 = bp_graph.getXAxis();
x1.setTextColor(Color.WHITE);
x1.setDrawGridLines(false);
x1.setAvoidFirstLastClipping(true);
x1.setPosition(XAxis.XAxisPosition.BOTTOM);
YAxis y1 = bp_graph.getAxisLeft();
y1.setTextColor(Color.WHITE);
y1.setAxisMaximum(5000000);
y1.setAxisMinimum(100000);
y1.setDrawGridLines(true);
//y1.setInverted(true);
YAxis y2 = bp_graph.getAxisRight();
y2.setEnabled(false);
}
//method to create set
private ILineDataSet createSet() {
LineDataSet set = new LineDataSet(null, "PPG");
set.setLineWidth(1.0f);
set.setCircleRadius(1.0f);
set.setColor(Color.rgb(240, 99, 99));
set.setCircleColor(Color.rgb(240, 99, 99));
set.setHighLightColor(Color.rgb(190, 190, 190));
set.setAxisDependency(YAxis.AxisDependency.LEFT);
set.setValueTextSize(1.0f);
return set;
}
#Override
public void onPreviewFrame(int ySum, int uSum, int vSum) {
img_Y_Avg = ySum;
img_U_Avg = uSum;
img_V_Avg = vSum;
//set value of Y on the text view
TextView valueOfY = (TextView)findViewById(R.id.valueY);
valueY = img_Y_Avg;
valueOfY.setText(Double.toString(img_Y_Avg));
//set value of U on the text view
TextView valueOfU = (TextView)findViewById(R.id.valueU);
valueU = img_U_Avg;
valueOfU.setText(Double.toString(img_U_Avg));
//set value of V on the text view
TextView valueOfV = (TextView)findViewById(R.id.valueV);
valueV = img_V_Avg;
valueOfV.setText(Double.toString(img_V_Avg));
//store value to array list
ArrayList<Integer> yAverage = new ArrayList<Integer>();
yAverage.add(img_Y_Avg);
//Log.d("MyEntryData", String.valueOf(yAverage));
//store u values to array
ArrayList<Integer> uAverage = new ArrayList<Integer>();
uAverage.add(img_U_Avg);
//Log.d("MyEntryData", String.valueOf(uAverage));
//store u values to array
ArrayList<Integer> vAverage = new ArrayList<Integer>();
vAverage.add(img_V_Avg);
//Log.d("MyEntryData", String.valueOf(vAverage));
float start = System.nanoTime();
int diff = (int) ((System.currentTimeMillis()/1000) - start);
ArrayList<Integer> difference = new ArrayList<Integer>();
difference.add(diff);
Log.d("time", String.valueOf(start));
ArrayList<Integer> getValues = new ArrayList<Integer>();
for(int i = 0; i < uAverage.size(); i++) {
//getValues.add(difference.get(i));
getValues.add(yAverage.get(i));
getValues.add(uAverage.get(i));
getValues.add(vAverage.get(i));
}
String filename = new SimpleDateFormat("yyyyMMddHHmm'.csv'").format(new Date());
File directoryDownload = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
File logDir = new File (directoryDownload, "bpReader"); //Creates a new folder in DOWNLOAD directory
logDir.mkdirs();
File file = new File(logDir, filename);
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file, true);
//outputStream = openFileOutput(filename, Context.MODE_PRIVATE);
for (int i = 0; i < uAverage.size(); i += 3) {
//outputStream.write((getValues.get(i) + ",").getBytes());
outputStream.write((getValues.get(i) + ",").getBytes());
outputStream.write((getValues.get(i + 1) + ",").getBytes());
outputStream.write((getValues.get(i + 2) + "\n").getBytes());
}
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
Log.d("MyEntryData", String.valueOf(getValues));
handler = new Handler();
final Runnable runnable = new Runnable() {
#Override
public void run() {
readingRemaining = readingRemaining -1;
if (readingRemaining > 0){
plotGraph(img_Y_Avg);
//plotGraph(img_U_Avg);
//plotGraph(img_V_Avg);
}
}
};
handler.postDelayed(runnable, 100);
//Log.d("MyEntryData", String.valueOf(img_Y_Avg +" "+ img_U_Avg+" "+img_V_Avg));
}
private void plotGraph(double graph_data){
LineData data = bp_graph.getData();
if (data != null){
ILineDataSet set = data.getDataSetByIndex(0);
if (set == null){
set = createSet();
data.addDataSet(set);
}
//add a new value
int randomDataSetIndex = (int) (Math.random() * data.getDataSetCount());
float yValue = (float) graph_data;
data.addEntry(new Entry(data.getDataSetByIndex(randomDataSetIndex).getEntryCount(), yValue), randomDataSetIndex);
//notify chart data have changed
bp_graph.notifyDataSetChanged();
bp_graph.setVisibleXRangeMaximum(100);
//scroll to last entry
bp_graph.moveViewTo(data.getEntryCount() - 7, 50f, YAxis.AxisDependency.RIGHT);
}
}}
I am doing the image processing inside the CameraView class and then with the help of an interface sending the values to MainActivity, where I generate the .csv file and the graph.
How do i get the time difference between each frame(or 2 consecutive y-sum) that is being generated?
Like this:
long startTime = System.currentTimeMillis();
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
long finishTime = System.currentTimeMillis();
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,finishTime-startTime);
And add this parameter to interface:
public interface PreviewReadyCallback {
void onPreviewFrame(int yAverage, int uAverage, int vAverage, long time);
}
Then save it to CSV like other values...
Update:
That was your calculation time. If you want time between frames:
In class:
long oldTime=System.currentTimeMillis();
and then
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
long newTime = System.currentTimeMillis();
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,newTime-oldTime);
oldTime=newTime;

"JavaCameraView" Camera Orientation Issue

I am new to Android development. I'm making a simple app, which has one Activity. In this Activity I'm trying to get frames from camera and process it real time, but I'm having camera orientation Issue, i.e. image received is 90 degree rotated. There are many solutions available to solve this problem but found no one for the "JavaCameraView". So please help me out how to solve the orientation issue only for "JavaCameraView".
This is my code:
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
javaCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, CAMERA_PERMISSION_REQUEST_CODE);
//}
javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(View.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
}
#Override
protected void onPause() {
super.onPause();
if (javaCameraView != null)
javaCameraView.disableView();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (javaCameraView != null)
javaCameraView.disableView();
}
#Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.i(TAG, "OpenCV loaded successfully.");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
Log.i(TAG, "OpenCV not loaded.");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
frame = new Mat(height, width, CV_8UC4);
}
#Override
public void onCameraViewStopped() {
frame.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame = inputFrame.rgba();
//frame=processFrame();
//Imgcodecs.imwrite("/storage/emulated/0/aaaaa+.jpg", frame);
return frame;
}
}
I have solve this issue :
Use below class instead of javaCameraView :
public class PortraitCameraView extends CameraBridgeViewBase implements Camera.PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
public Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mCameraId;
Handler handler;
boolean callBuffer = false;
Camera.Size bestSize = null;
Camera.Size pictureSize = null;
private LayoutMode mLayoutMode;
private int mCenterPosX = -1;
private int mCenterPosY;
public static enum LayoutMode {
FitToParent, // Scale to the size that no side is larger than the parent
NoBlank // Scale to the size that no side is smaller than the parent
}
public static class JavaCameraSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public PortraitCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public PortraitCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
handler = new Handler();
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
boolean connected = false;
int numberOfCameras = android.hardware.Camera.getNumberOfCameras();
android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
android.hardware.Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
try {
mCamera = Camera.open(i);
mCameraId = i;
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + i + "failed to open: " + e.getMessage());
}
if (connected) break;
}
}
if (mCamera == null) return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
List<Camera.Size> Picturesizes = params.getSupportedPictureSizes();
pictureSize = Picturesizes.get(0);
List<Camera.Size> sizeList = sizes;
bestSize = sizeList.get(0);
Log.d(TAG, "getSupportedPreviewSizes() " + bestSize.width + " " + bestSize.height);
Log.d(TAG, "Picturesizes() " + pictureSize.width + " " + pictureSize.height);
// bestSize.width = GlobalArea.display_width;
//// bestSize.height = GlobalArea.display_height;
for (int i = 1; i < sizeList.size(); i++) {
if ((sizeList.get(i).width * sizeList.get(i).height) > (bestSize.width * bestSize.height)) {
Log.d(TAG, "getSupportedPreviewSizes() " + sizeList.get(i).width + " " + sizeList.get(i).height);
bestSize = sizeList.get(i);
}
}
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), height, width); //use turn around values here to get the correct prev size for portrait mode
params.setPreviewFormat(ImageFormat.NV21);
Log.e(TAG, "Set preview size to " + Integer.valueOf((int) bestSize.width) + " x " + Integer.valueOf((int) bestSize.height));
Log.e(TAG, "Set preview size to " + width + " x " + height);
params.setPreviewSize((int) bestSize.width, (int) bestSize.height);
params.setPictureSize((int) pictureSize.width, (int) pictureSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
boolean hasFlash = SevenBitsDemo.getInstance().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
if (hasFlash) {
// mOpenCvCameraView.flashOn();
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
List<int[]> ints = params.getSupportedPreviewFpsRange();
for (int i = 0; i < ints.size(); i++) {
Log.e("privew size", String.valueOf(ints.get(i).length));
}
// params.setPreviewFpsRange(10000,10000);
mCamera.setParameters(params);
// boolean mSurfaceConfiguring = adjustSurfaceLayoutSize(bestSize, true, width, height);
params = mCamera.getParameters();
GlobalArea.preview_size = params.getPreviewSize();
mFrameWidth = params.getPreviewSize().height; //the frame width and height of the super class are used to generate the cached bitmap and they need to be the size of the resulting frame
mFrameHeight = params.getPreviewSize().width;
int realWidth = mFrameHeight; //the real width and height are the width and height of the frame received in onPreviewFrame ...
int realHeight = mFrameWidth;
if ((getLayoutParams().width == LinearLayout.LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LinearLayout.LayoutParams.MATCH_PARENT))
mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution((int) pictureSize.width, (int) pictureSize.height);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1); //the frame chane is still in landscape
mFrameChain[1] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight); //the camera frame is in portrait
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
} else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
#Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Wating for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
}
public void onPreviewFrame(byte[] frame, Camera arg1) {
synchronized (this) {
mFrameChain[1 - mChainIdx].put(0, 0, frame);
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Mat mRotated;
public Mat gray() {
if (mRotated != null) mRotated.release();
mRotated = mYuvFrameData.submat(0, mWidth, 0, mHeight);
//submat with reversed width and height because its done on the
landscape frame
mRotated = mRotated.t();
Core.flip(mRotated, mRotated, 1);
return mRotated;
}
public Mat rgba() {
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
if (mRotated != null) mRotated.release();
mRotated = mRgba.t();
Core.flip(mRotated, mRotated, 1);
return mRotated;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
if (mRotated != null) mRotated.release();
}
}
private class CameraWorker implements Runnable {
public void run() {
do {
synchronized (PortraitCameraView.this) {
try {
PortraitCameraView.this.wait();
} catch (InterruptedException e) {
Log.e(TAG, "CameraWorker interrupted", e);
}
}
if (!mStopThread) {
if (!mFrameChain[mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[mChainIdx]);
mChainIdx = 1 - mChainIdx;
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}
So now use PortraitCameraView in your xml and java file because i have convert javacamera view in portrait mode in this class.
You can use setMaxFrame size function.
javaCameraView.setMaxFrameSize(480, 640);
480 is width and 640 is height. Now javacameraview is portrait.

Black Screenshot android OpenCv

Currently I am using opencv what I want to happen is when the user presses the screen of course the screen/object from the displayed camera it will highlight all necessary/similar colors see:Color-Blob-Detector. Video without the screenshot.
Below there is a small camera icon for triggering a screenshot (imagebutton), however when it screenshots the outcome of the picture is always black. What I wanted is when the necessary colors are being highlighted and I press the screenshot progmatically (which is the ImageButton) it will take the picture of the screen (including those highlighted objects) and save the image in the file.
I have also tried using the camera control from opencv for taking a picture, but it is so hard for me to merge these two.
public class Camera extends Activity implements OnTouchListener, CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(Camera.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Camera() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}
}
public void camera_b(View v)
{
String path = Environment.getExternalStorageDirectory().toString() + "/" + "hellp.jpg";
v = getWindow().getDecorView().getRootView();
v.setDrawingCacheEnabled(true);
Bitmap bitmap = Bitmap.createBitmap(v.getDrawingCache());
v.setDrawingCacheEnabled(false);
OutputStream out = null;
File imageFile = new File(path);
try {
out = new FileOutputStream(imageFile);
// choose JPEG format
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
} catch (FileNotFoundException e) {
// manage exception
} catch (IOException e) {
// manage exception
} finally {
try {
if (out != null) {
out.close();
}
} catch (Exception exc) {
}
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
As you can see camera_b is where all screenshot happens.The XML for color blob detector: ColorBlobXML (I cannot paste XML code here)
Image of the output. As you can see, It can take a screenshot of the button, not the expected Image. (This Image is made by using power +vol down)
Put this in the AndroidManifest.xml
<uses-permission android:name="android.permission.CAMERA"/>
And in this
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setCameraIndex(0); //0 = back and 1 = frontal
mOpenCvCameraView.enableView();
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}

OpenCV4Android: How to find the movements of head

I'm using Viola-Jones method to detect face, whenever the face is tilted there will be possibility of algorithm won't work properly.
I want to detect that movements, when it can not able to detect the face.
Can I implement this using motion detection or is there any other methods to find the movements.
Thanks in advance,
Yes you can do it by capture and compare them.
This will help you to detect them and then you can compare the x and y positions.
class DetectNose extends JPanel implements KeyListener, ActionListener {
private static final long serialVersionUID = 1L;
private static JFrame frame;
private BufferedImage image;
private CascadeClassifier face_cascade;
private Point center;
private JLabel label;
private Image scalledItemImage;
private double customY = 0;
private double customX = 0;
private Iterator<InputStream> iterator;
private ArrayList<BufferedImage> listOfCachedImages;
private int imageIndex = 1;
private int customZ = 0;
private Size size;
private Image scalledItemImageBackup;
private Point center1;
private int imgSize = 35;
private boolean isLocked;
public DetectNose(JFrame frame, List<Long> listOfOrnaments) {
super();
this.frame = frame;
this.frame.setFocusable(true);
this.frame.requestFocusInWindow();
this.frame.addKeyListener(this);
File f = null;
try {
System.out.println(System.getProperty("os.name"));
if (System.getProperty("os.name").contains("Windows")) {
f = new File("res/opencv_lib_win/opencv_java249.dll");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
} else {
f = new File("res/opencv_lib/libopencv_java246.so");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
}
} catch (Exception ex) {
ex.printStackTrace();
}
List<InputStream> ornaments = DatabaseHandler
.getOrnamentsImagesByListOfOrnaments(listOfOrnaments);
iterator = ornaments.iterator();
listOfCachedImages = new ArrayList<BufferedImage>();
try {
while (iterator.hasNext()) {
InputStream inputStream = iterator.next();
listOfCachedImages.add(ImageIO.read(inputStream));
}
setFirstOrnament();
} catch (IOException e) {
e.printStackTrace();
}
label = new JLabel(new ImageIcon(scalledItemImage));
add(label);
face_cascade = new CascadeClassifier(
"res/cascades/haarcascade_frontalface_alt_tree.xml");
if (face_cascade.empty()) {
System.out.println("--(!)Error loading A\n");
return;
} else {
System.out.println("Face classifier loaded up");
}
}
private void setFirstOrnament() {
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700, 700,
BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize, imgSize,
BufferedImage.TYPE_INT_RGB);
repaint();
System.out.println("imageIndex = " + imageIndex);
}
private void setPrevOrnament() {
if (imageIndex > 1) {
imageIndex--;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.nextButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.prevButton.setEnabled(false);
}
}
private void setNextOrnament() {
if (listOfCachedImages.size() > imageIndex) {
imageIndex++;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.prevButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.nextButton.setEnabled(false);
}
}
private BufferedImage getimage() {
return image;
}
public void setimage(BufferedImage newimage) {
image = newimage;
return;
}
public BufferedImage matToBufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
int type;
matrix.get(0, 0, data);
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
// bgr to rgb
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image2 = new BufferedImage(cols, rows, type);
image2.getRaster().setDataElements(0, 0, cols, rows, data);
return image2;
}
public void paintComponent(Graphics g) {
try {
this.frame.requestFocusInWindow();
BufferedImage temp = getimage();
g.drawImage(temp, 0, 0, temp.getWidth(), temp.getHeight() + 50,
this);
} catch (Exception ex) {
System.out.print("Trying to load images...");
}
}
public Mat detect(Mat inputframe) {
Mat mRgba = new Mat();
Mat mGrey = new Mat();
MatOfRect faces = new MatOfRect();
inputframe.copyTo(mRgba);
inputframe.copyTo(mGrey);
Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
Imgproc.equalizeHist(mGrey, mGrey);
try {
face_cascade.detectMultiScale(mGrey, faces);
} catch (Exception e) {
System.out.print(".");
}
frame.setLocationRelativeTo(null);
frame.setResizable(false);
for (Rect rect : faces.toArray()) {
center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height
* 0.5); // You can use this to point out as first detection and last detection
size = new Size(rect.width * 0.5, rect.height * 0.5);
Core.ellipse(mRgba, center, size, 0, 0, 360, new Scalar(255, 0,
255), 1, 8, 0);
repaint();
}
return mRgba;
}
Here center is the first detection point same way you can find the last detection point from the image.

Categories

Resources