Black Screenshot android OpenCv - android

Currently I am using opencv what I want to happen is when the user presses the screen of course the screen/object from the displayed camera it will highlight all necessary/similar colors see:Color-Blob-Detector. Video without the screenshot.
Below there is a small camera icon for triggering a screenshot (imagebutton), however when it screenshots the outcome of the picture is always black. What I wanted is when the necessary colors are being highlighted and I press the screenshot progmatically (which is the ImageButton) it will take the picture of the screen (including those highlighted objects) and save the image in the file.
I have also tried using the camera control from opencv for taking a picture, but it is so hard for me to merge these two.
public class Camera extends Activity implements OnTouchListener, CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(Camera.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Camera() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}
}
public void camera_b(View v)
{
String path = Environment.getExternalStorageDirectory().toString() + "/" + "hellp.jpg";
v = getWindow().getDecorView().getRootView();
v.setDrawingCacheEnabled(true);
Bitmap bitmap = Bitmap.createBitmap(v.getDrawingCache());
v.setDrawingCacheEnabled(false);
OutputStream out = null;
File imageFile = new File(path);
try {
out = new FileOutputStream(imageFile);
// choose JPEG format
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
} catch (FileNotFoundException e) {
// manage exception
} catch (IOException e) {
// manage exception
} finally {
try {
if (out != null) {
out.close();
}
} catch (Exception exc) {
}
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
As you can see camera_b is where all screenshot happens.The XML for color blob detector: ColorBlobXML (I cannot paste XML code here)
Image of the output. As you can see, It can take a screenshot of the button, not the expected Image. (This Image is made by using power +vol down)

Put this in the AndroidManifest.xml
<uses-permission android:name="android.permission.CAMERA"/>
And in this
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setCameraIndex(0); //0 = back and 1 = frontal
mOpenCvCameraView.enableView();
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}

Related

In Android OpenCV Color Blob Detector is crashing by giving fatal error

I'm using Color Blob detector Sample code of OpenCV (Sample Code), but app is crashing at Core.multiply(contour, new Scalar(4,4), contour); giving some fatal error
libc: Fatal signal 11 (SIGSEGV), code 1, fault addr 0x0 in tid 11016 (Thread-4857)
and detailed Log is available to get idea of exact reason. In some other part of code I was using Core.add(fg, bg, markers); and app was giving same fatal error there as well. What is the issue? Please guide towards the solution. I 'm using openCV(3.1)
Code is as following:
package com.iu.kamraapp.utils;
import android.util.Log;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Cache
Mat mPyrDownMat = new Mat();
Mat mHsvMat = new Mat();
Mat mMask = new Mat();
Mat mDilatedMask = new Mat();
Mat mHierarchy = new Mat();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum() {
return mSpectrum;
}
public void setMinContourArea(double area) {
mMinContourArea = area;
}
public void process(Mat rgbaImage) {
try {
Imgproc.pyrDown(rgbaImage, mPyrDownMat);
Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
Imgproc.dilate(mMask, mDilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
Core.multiply(contour, new Scalar(4, 4), contour); // issue is here
mContours.add(contour);
}
}
}
catch (Exception e) { e.printStackTrace(); }
}
public List<MatOfPoint> getContours() {
return mContours;
}
}
package com.iu.kamraapp;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import com.iu.kamraapp.utils.AppGlobal;
import com.iu.kamraapp.utils.ColorBlobDetector;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
import java.util.List;
public class ColorBlobActivity extends AppCompatActivity implements View.OnTouchListener,CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
Context context;
int screenWidth, screenHeight;
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
setContentView(R.layout.activity_main);
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
screenWidth = AppGlobal.getScreenResolution(context, false);
screenHeight = AppGlobal.getScreenResolution(context, true);
} else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
OpenCVLoader.initDebug(true);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
try {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int) event.getX() - xOffset;
int y = (int) event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x > 4) ? x - 4 : 0;
touchedRect.y = (y > 4) ? y - 4 : 0;
touchedRect.width = (x + 4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y + 4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width * touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
} catch (Exception e) { e.printStackTrace(); }
return false; // don't need subsequent touch events
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.d(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}

how to add image as mask in camera frame android opencv

i am new in opencv for android and trying to develop an application to detect object. i used sample face detection from opencv sdk 2.4.11 . i want to add an image as mask in camera frame after detect specific logo. here my code:
public class MainActivity extends Activity implements CvCameraViewListener2, OnTouchListener {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
private CameraBridgeViewBase mOpenCvCameraView;
private int mWidth;
private int mHeight;
private int mAbsoluteFaceSize =0;
private CascadeClassifier cascadeClassifier;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(CameraCalibrationActivity.this);
InputStream is = getResources().openRawResource(R.raw.cascade_a3_23);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
File mCascadeFile = new File(cascadeDir, "cascade_a3_23.xml");
FileOutputStream os = null;
try {
os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
cascadeClassifier= new CascadeClassifier(mCascadeFile.getAbsolutePath());
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public CameraCalibrationActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
LinearLayout linearLayout=new LinearLayout(this);
linearLayout.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
mOpenCvCameraView = new JavaCameraView(this, CameraBridgeViewBase.CAMERA_ID_BACK);
mOpenCvCameraView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
linearLayout.addView(mOpenCvCameraView);
setContentView(linearLayout);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
super.onResume();
}
public void onCameraViewStarted(int width, int height) {
if (mWidth != width || mHeight != height) {
mWidth = width;
mHeight = height;
}
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat mRgba = inputFrame.rgba();
Mat mGrey = inputFrame.gray();
MatOfRect faces = new MatOfRect();
int height = mGrey.rows();
if (Math.round(height * 0.2) > 0) {
mAbsoluteFaceSize = (int) Math.round(height * 0.2);
}
cascadeClassifier.detectMultiScale(mGrey, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
return mRgba;
}
and i want to add this image mask into my camera view
Can anyone help me?

OpenCV4Android: How to find the movements of head

I'm using Viola-Jones method to detect face, whenever the face is tilted there will be possibility of algorithm won't work properly.
I want to detect that movements, when it can not able to detect the face.
Can I implement this using motion detection or is there any other methods to find the movements.
Thanks in advance,
Yes you can do it by capture and compare them.
This will help you to detect them and then you can compare the x and y positions.
class DetectNose extends JPanel implements KeyListener, ActionListener {
private static final long serialVersionUID = 1L;
private static JFrame frame;
private BufferedImage image;
private CascadeClassifier face_cascade;
private Point center;
private JLabel label;
private Image scalledItemImage;
private double customY = 0;
private double customX = 0;
private Iterator<InputStream> iterator;
private ArrayList<BufferedImage> listOfCachedImages;
private int imageIndex = 1;
private int customZ = 0;
private Size size;
private Image scalledItemImageBackup;
private Point center1;
private int imgSize = 35;
private boolean isLocked;
public DetectNose(JFrame frame, List<Long> listOfOrnaments) {
super();
this.frame = frame;
this.frame.setFocusable(true);
this.frame.requestFocusInWindow();
this.frame.addKeyListener(this);
File f = null;
try {
System.out.println(System.getProperty("os.name"));
if (System.getProperty("os.name").contains("Windows")) {
f = new File("res/opencv_lib_win/opencv_java249.dll");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
} else {
f = new File("res/opencv_lib/libopencv_java246.so");
System.load(f.getAbsolutePath());
System.out.println("Loaded :" + f.getAbsolutePath());
}
} catch (Exception ex) {
ex.printStackTrace();
}
List<InputStream> ornaments = DatabaseHandler
.getOrnamentsImagesByListOfOrnaments(listOfOrnaments);
iterator = ornaments.iterator();
listOfCachedImages = new ArrayList<BufferedImage>();
try {
while (iterator.hasNext()) {
InputStream inputStream = iterator.next();
listOfCachedImages.add(ImageIO.read(inputStream));
}
setFirstOrnament();
} catch (IOException e) {
e.printStackTrace();
}
label = new JLabel(new ImageIcon(scalledItemImage));
add(label);
face_cascade = new CascadeClassifier(
"res/cascades/haarcascade_frontalface_alt_tree.xml");
if (face_cascade.empty()) {
System.out.println("--(!)Error loading A\n");
return;
} else {
System.out.println("Face classifier loaded up");
}
}
private void setFirstOrnament() {
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700, 700,
BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize, imgSize,
BufferedImage.TYPE_INT_RGB);
repaint();
System.out.println("imageIndex = " + imageIndex);
}
private void setPrevOrnament() {
if (imageIndex > 1) {
imageIndex--;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.nextButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.prevButton.setEnabled(false);
}
}
private void setNextOrnament() {
if (listOfCachedImages.size() > imageIndex) {
imageIndex++;
scalledItemImage = listOfCachedImages.get(imageIndex - 1);
scalledItemImageBackup = scalledItemImage.getScaledInstance(700,
700, BufferedImage.TYPE_INT_RGB);
scalledItemImage = scalledItemImage.getScaledInstance(imgSize,
imgSize, BufferedImage.TYPE_INT_RGB);
GoLiveIntermediator.prevButton.setEnabled(true);
repaint();
revalidate();
System.out.println("imageIndex = " + imageIndex);
} else {
GoLiveIntermediator.nextButton.setEnabled(false);
}
}
private BufferedImage getimage() {
return image;
}
public void setimage(BufferedImage newimage) {
image = newimage;
return;
}
public BufferedImage matToBufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
int type;
matrix.get(0, 0, data);
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
// bgr to rgb
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image2 = new BufferedImage(cols, rows, type);
image2.getRaster().setDataElements(0, 0, cols, rows, data);
return image2;
}
public void paintComponent(Graphics g) {
try {
this.frame.requestFocusInWindow();
BufferedImage temp = getimage();
g.drawImage(temp, 0, 0, temp.getWidth(), temp.getHeight() + 50,
this);
} catch (Exception ex) {
System.out.print("Trying to load images...");
}
}
public Mat detect(Mat inputframe) {
Mat mRgba = new Mat();
Mat mGrey = new Mat();
MatOfRect faces = new MatOfRect();
inputframe.copyTo(mRgba);
inputframe.copyTo(mGrey);
Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
Imgproc.equalizeHist(mGrey, mGrey);
try {
face_cascade.detectMultiScale(mGrey, faces);
} catch (Exception e) {
System.out.print(".");
}
frame.setLocationRelativeTo(null);
frame.setResizable(false);
for (Rect rect : faces.toArray()) {
center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height
* 0.5); // You can use this to point out as first detection and last detection
size = new Size(rect.width * 0.5, rect.height * 0.5);
Core.ellipse(mRgba, center, size, 0, 0, 360, new Scalar(255, 0,
255), 1, 8, 0);
repaint();
}
return mRgba;
}
Here center is the first detection point same way you can find the last detection point from the image.

Android App Tracking Points: Video.calcOpticalFlow(...) method not working properly?

I am currently trying to create an Android App using OpenCV that allows the user to track points of moving objects using the smartphone camera. An analogous code in C++ that does exactly what I am looking for can be found in the following link: OpticalFlow C++ Sample Code
I have been Googling and looking around in StackOverflow, but I still can't figure out why my code is not working. I am able to place points on the screen every time I press on a certain spot, but the points seem motionless even as I move objects in front of the camera. The method used to calculate the opitcal flow is the following:
void org.opencv.video.Video.calcOpticalFlowPyrLK(Mat prevImg, Mat nextImg, MatOfPoint2f prevPts, MatOfPoint2f nextPts, MatOfByte status, MatOfFloat err)
I believe I am passing the exact parameters needed to calculate the optical flow of consecutive images, but for some reason it's not working. Below is my code:
package org.opencv.UActivity;
//INCLUDE FILES
...
public class U2Activity extends Activity implements OnTouchListener,CvCameraViewListener2{
private static final String TAG = "OCVSample::Activity";
private Mat nextGray,Rscale;
private Mat prevGray;
private MatOfPoint2f prev2D,next2D;
private MatOfByte status;
private MatOfFloat err;
private Scalar color;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(U2Activity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public U2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.u2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.u2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
color = new Scalar(0, 255, 0);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
nextGray = new Mat(height, width, CvType.CV_8UC1); //unsigned char
Rscale = new Mat(height, width, CvType.CV_8UC1);
prevGray = new Mat(height, width, CvType.CV_8UC1);
prev2D = new MatOfPoint2f(new Point());
next2D = new MatOfPoint2f(new Point());
status = new MatOfByte();
err = new MatOfFloat();
}
public void onCameraViewStopped() {
nextGray.release();
Rscale.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = nextGray.cols();
int rows = nextGray.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
prev2D.push_back(new MatOfPoint2f(new Point((double)x,(double)y)));
next2D.push_back(new MatOfPoint2f(new Point()));
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
nextGray = inputFrame.gray(); //get current image
Rscale = nextGray; //make a copy of current image
if(prevGray.empty()) prevGray = nextGray; //on start there is no prevGray. Copy current.
Video.calcOpticalFlowPyrLK(prevGray,nextGray,prev2D,next2D,status,err); //Calc the Optical Flow
prevGray = nextGray; //Overwrite old Image (prevGray)
prev2D = next2D; //Overwrite old point coordinates
for(int i=0;i<next2D.toArray().length;i++){ //Draw the points in the image
Core.circle(Rscale, next2D.toArray()[i], 3, color);
}
return Rscale;
}
}
SOLVED:
I changed:
prevGray = nextGray;
prev2D = next2D;
to:
nextGray.copyTo(prevGray);
next2D.copyTo(prev2D);
I hope it helps anyone encountering similar problems.

Unable to detect face using FaceDetector in android

I need to detect the user face and also compare the face to authenticate my application,for that I used FaceDetector API to detect the user face.
When i run my code it works without any defects.But it gives detected faces count as Zero.
public class AndroidFaceDetectorActivity extends Activity {
private static final int TAKE_PICTURE_CODE = 100;
private static final int MAX_FACES = 5;
private Bitmap cameraBitmap = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
((Button)findViewById(R.id.take_picture)).setOnClickListener(btnClick);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(TAKE_PICTURE_CODE == requestCode){
processCameraImage(data);
}
}
private void openCamera(){
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, TAKE_PICTURE_CODE);
}
private void processCameraImage(Intent intent){
setContentView(R.layout.detectlayout);
((Button)findViewById(R.id.detect_face)).setOnClickListener(btnClick);
ImageView imageView = (ImageView)findViewById(R.id.image_view);
cameraBitmap = (Bitmap)intent.getExtras().get("data");
imageView.setImageBitmap(cameraBitmap);
}
private void detectFaces(){
if(null != cameraBitmap){
Log.d("FACE_RECOGNITION","CHECK");
int width = cameraBitmap.getWidth();
int height = cameraBitmap.getHeight();
FaceDetector detector = new FaceDetector(width, height,AndroidFaceDetectorActivity.MAX_FACES);
Face[] faces = new Face[AndroidFaceDetectorActivity.MAX_FACES];
Bitmap bitmap565 = Bitmap.createBitmap(width, height, Config.RGB_565);
Paint ditherPaint = new Paint();
Paint drawPaint = new Paint();
ditherPaint.setDither(true);
drawPaint.setColor(Color.RED);
drawPaint.setStyle(Paint.Style.STROKE);
drawPaint.setStrokeWidth(2);
Canvas canvas = new Canvas();
canvas.setBitmap(bitmap565);
canvas.drawBitmap(cameraBitmap, 0, 0, ditherPaint);
int facesFound = detector.findFaces(bitmap565, faces);
PointF midPoint = new PointF();
float eyeDistance = 0.0f;
float confidence = 0.0f;
Log.i("FaceDetector", "Number of faces found: " + facesFound);
if(facesFound > 0)
{
for(int index=0; index<facesFound; ++index){
faces[index].getMidPoint(midPoint);
eyeDistance = faces[index].eyesDistance();
confidence = faces[index].confidence();
Log.i("FaceDetector",
"Confidence: " + confidence +
", Eye distance: " + eyeDistance +
", Mid Point: (" + midPoint.x + ", " + midPoint.y + ")");
canvas.drawRect((int)midPoint.x - eyeDistance ,
(int)midPoint.y - eyeDistance ,
(int)midPoint.x + eyeDistance,
(int)midPoint.y + eyeDistance, drawPaint);
}
}
String filepath = Environment.getExternalStorageDirectory() + "/facedetect" + System.currentTimeMillis() + ".jpg";
try {
FileOutputStream fos = new FileOutputStream(filepath);
bitmap565.compress(CompressFormat.JPEG, 90, fos);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
ImageView imageView = (ImageView)findViewById(R.id.image_view);
imageView.setImageBitmap(bitmap565);
}
}
private View.OnClickListener btnClick = new View.OnClickListener() {
//#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.take_picture: openCamera(); break;
case R.id.detect_face: detectFaces(); break;
}
}
};
}
What i did wrong?
or
Is any other way to do that?
Thanks
getExtras().get("data") for MediaStore.ACTION_IMAGE_CAPTURE intent produces very low-resolution bitmap (I believe it's 160x120 px) which could work as a thumbnail, but is not enough for face detection to do its job.
Normally face detection is OK on medium-res images (e.g. 64x480 px) that you can receive form Camera.previewCallback(), but this way you need permissions and code that controls the camera in your app, you cannot use an intent for that.
Here is the official into to face detection on Android: http://developer.android.com/guide/topics/media/camera.html#face-detection.
If you really prefer it this way, you may use getData() to find the captured image at its full resolution, and convert it into a bitmap, like
cameraBitmap = BitmapFactory.decodeFile(data.getData().getPath());

Categories

Resources