Android App Tracking Points: Video.calcOpticalFlow(...) method not working properly? - android

I am currently trying to create an Android App using OpenCV that allows the user to track points of moving objects using the smartphone camera. An analogous code in C++ that does exactly what I am looking for can be found in the following link: OpticalFlow C++ Sample Code
I have been Googling and looking around in StackOverflow, but I still can't figure out why my code is not working. I am able to place points on the screen every time I press on a certain spot, but the points seem motionless even as I move objects in front of the camera. The method used to calculate the opitcal flow is the following:
void org.opencv.video.Video.calcOpticalFlowPyrLK(Mat prevImg, Mat nextImg, MatOfPoint2f prevPts, MatOfPoint2f nextPts, MatOfByte status, MatOfFloat err)
I believe I am passing the exact parameters needed to calculate the optical flow of consecutive images, but for some reason it's not working. Below is my code:
package org.opencv.UActivity;
//INCLUDE FILES
...
public class U2Activity extends Activity implements OnTouchListener,CvCameraViewListener2{
private static final String TAG = "OCVSample::Activity";
private Mat nextGray,Rscale;
private Mat prevGray;
private MatOfPoint2f prev2D,next2D;
private MatOfByte status;
private MatOfFloat err;
private Scalar color;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(U2Activity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public U2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.u2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.u2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
color = new Scalar(0, 255, 0);
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
nextGray = new Mat(height, width, CvType.CV_8UC1); //unsigned char
Rscale = new Mat(height, width, CvType.CV_8UC1);
prevGray = new Mat(height, width, CvType.CV_8UC1);
prev2D = new MatOfPoint2f(new Point());
next2D = new MatOfPoint2f(new Point());
status = new MatOfByte();
err = new MatOfFloat();
}
public void onCameraViewStopped() {
nextGray.release();
Rscale.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = nextGray.cols();
int rows = nextGray.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
prev2D.push_back(new MatOfPoint2f(new Point((double)x,(double)y)));
next2D.push_back(new MatOfPoint2f(new Point()));
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
nextGray = inputFrame.gray(); //get current image
Rscale = nextGray; //make a copy of current image
if(prevGray.empty()) prevGray = nextGray; //on start there is no prevGray. Copy current.
Video.calcOpticalFlowPyrLK(prevGray,nextGray,prev2D,next2D,status,err); //Calc the Optical Flow
prevGray = nextGray; //Overwrite old Image (prevGray)
prev2D = next2D; //Overwrite old point coordinates
for(int i=0;i<next2D.toArray().length;i++){ //Draw the points in the image
Core.circle(Rscale, next2D.toArray()[i], 3, color);
}
return Rscale;
}
}

SOLVED:
I changed:
prevGray = nextGray;
prev2D = next2D;
to:
nextGray.copyTo(prevGray);
next2D.copyTo(prev2D);
I hope it helps anyone encountering similar problems.

Related

Android Opencv save mat as a picture without drawn rectangle

I want to save a Mat mRgba as a picture with Imgcodecs.imwrite(Environment.getExternalStorageDirectory() + "/final-image.jpg", mRgba); and in general it saves more that I want to. I want to save image without rectangle Imgproc.rectangle(mRgba, new Point(touchedYD, touchedXL), new Point(touchedYU, touchedXR), Util.WHITE, 2); that is drawn on screen before saving. How to achieve that?
Here is my code.
Fragment:
public class StageTwo extends Fragment implements CameraBridgeViewBase.CvCameraViewListener2, OnSwitchFragmentFromStageTwo {
#Override
public void onViewCreated(#NonNull View view, #Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
((Stages) getActivity()).onSwitchFragmentFromStageTwo = this;
view.setOnTouchListener(new View.OnTouchListener() {
#Override
public boolean onTouch(View v, MotionEvent event) {
// poziomo
if (-event.getX() + camLayHeight + (xCorrection * 10) < (camLayHeight / 2)) {
touchedXR = -event.getX() + camLayHeight + (xCorrection * 10);
if (touchedXR < 0) touchedXR = 0;
} else {
touchedXL = -event.getX() + camLayHeight + (xCorrection * 10);
if (touchedXL > camLayHeight) touchedXL = camLayHeight;
}
// pionowo
if (event.getY() - (yCorrection * 10) < (camLayWidth / 2)) {
touchedYU = event.getY() - (yCorrection * 10);
if (touchedYU < 0) touchedYU = 0;
} else {
touchedYD = event.getY() - (yCorrection * 10);
if (touchedYD > camLayWidth) touchedYD = camLayWidth;
}
return true;
}
});
kamera = view.findViewById(R.id.java_surface_view);
kamera.setCvCameraViewListener(this);
Display display = getActivity().getWindowManager().getDefaultDisplay();
android.graphics.Point size = new android.graphics.Point();
display.getSize(size);
int height = size.y;
kamera.getLayoutParams().height = height / 2;
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Core.transpose(mGray, mGray);
Core.flip(mGray, mGray, -1);
Imgproc.cvtColor(inputFrame.rgba(), mRgba, Imgproc.COLOR_RGBA2RGB, 1);
if (gridPref.equals(getString(R.string.yes))) {
Imgproc.line(mRgba, p1, p2, Util.BLUE);
Imgproc.line(mRgba, p3, p4, Util.BLUE);
}
Imgproc.rectangle(mRgba, new Point(touchedYD, touchedXL), new Point(touchedYU, touchedXR), Util.WHITE, 2);
rozmiar_y = (int) ((touchedYU - touchedYD));
rozmiar_x = (int) ((touchedXL - touchedXR));
if (rozmiar_x > rozmiar_y)
px_cm = (double) Math.round((rozmiar_x / Integer.parseInt(rozmiar)) * 100000) / 100000d;
if (rozmiar_x < rozmiar_y)
px_cm = (double) Math.round((rozmiar_y / Integer.parseInt(rozmiar)) * 100000) / 100000d;
return mRgba;
}
#Override
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC3);
new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
camLayHeight = height; // mniejsza wartosc 480
camLayWidth = width;
touchedXL = camLayHeight / 2;
touchedXR = camLayHeight / 2;
touchedYD = camLayWidth / 2;
touchedYU = camLayWidth / 2;
}
#Override
public void onCameraViewStopped() {
}
#Override
public double onSwitchFragmentFromFragmentTwo() {
if (px_cm > 0.5) {
(...)
Imgcodecs.imwrite(Environment.getExternalStorageDirectory() + "/final-image.jpg", mRgba);
}
return px_cm;
}
}
Activity
OnSwitchFragmentFromStageTwo onSwitchFragmentFromStageTwo;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.stages);
bottomNavigationView = findViewById(R.id.bottom_navigation);
bottomNavigationView.setItemIconTintList(null);
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(Stages.this);
FragmentTransaction ft = getSupportFragmentManager().beginTransaction();
if (!sp.getBoolean("correctionDone", false))
ft.replace(R.id.content_frame, new StageZero(this));
else {
ft.replace(R.id.content_frame, new StageOne());
bottomNavigationView.setSelectedItemId(R.id.navigation_stage_one);
}
ft.commit();
SharedPreferences.Editor editor = sp.edit();
bottomNavigationView.setEnabled(false);
bottomNavigationView.setOnNavigationItemSelectedListener(new BottomNavigationView.OnNavigationItemSelectedListener() {
#Override
public boolean onNavigationItemSelected(#NonNull MenuItem item) {
(...)
if (bottomNavigationView.getSelectedItemId() == R.id.navigation_stage_two) {
if (onSwitchFragmentFromStageTwo.onSwitchFragmentFromFragmentTwo() <= 0.5) {
(...)
return false;
} else {
(...)
return true;
}
} else {
(...)
}
return true;
}
});
}
I was trying to solve this problem by setting touchedXL = 0; touchedXR = 0; touchedYD = 0; touchedYU = 0; right before saving but it did not help, picture is still saved with this rectangle. If you need something more just ask. Thank you in advance! :)
You may create a copy of mRgba before drawing the rectangle.
Add a new private class member mRgbNoRect:
private Mat mRgbNoRect; //mRgba before drawing rectangle
Initialize mRgbNoRect in onCameraViewStarted:
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC3);
mRgbNoRect = new Mat(height, width, CvType.CV_8UC3);
}
Create a copy of mRgba before drawing the rectangle in onCameraFrame:
Imgproc.cvtColor(inputFrameRgba, mRgba, Imgproc.COLOR_RGBA2RGB);
mRgba.copyTo(mRgbNoRect); //Copy mRgba content to mRgbNoRect before drawing a rectangle
Imgproc.rectangle(mRgba, new Point(20, 20), new Point(100, 100), new Scalar(255, 255, 255), 2);
Note: It's just an example (not your original code).
Add a "get" function getRgbNoRect():
public Mat getRgbNoRect() {
return mRgbNoRect;
}
Get mRgbNoRect and save it (example):
Mat rgbNoRect = sample.getRgbNoRect();
Imgcodecs.imwrite("rgbNoRect.png", rgbNoRect);
Here is a complete code sample (simple sample without a camera):
package myproject;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.CvType;
import org.opencv.core.Scalar;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgcodecs.Imgcodecs;
class Sample {
private Mat mRgba;
private Mat mRgbNoRect; //mRgba before drawing rectangle
static { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); }
public Mat onCameraFrame() {
//Create RGBA matrix filled with grin color - simulating inputFrame.rgba().
Mat inputFrameRgba = Mat.zeros(200, 250, CvType.CV_8UC4);
inputFrameRgba.setTo(new Scalar(0, 255, 0, 255));
Imgproc.cvtColor(inputFrameRgba, mRgba, Imgproc.COLOR_RGBA2RGB);
mRgba.copyTo(mRgbNoRect); //Copy mRgba content to mRgbNoRect before drawing a rectangle
Imgproc.rectangle(mRgba, new Point(20, 20), new Point(100, 100), new Scalar(255, 255, 255), 2);
return mRgba;
}
public Mat getRgbNoRect() {
return mRgbNoRect;
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC3);
mRgbNoRect = new Mat(height, width, CvType.CV_8UC3);
}
public static void main(String[] args) {
Sample sample = new Sample();
sample.onCameraViewStarted(250, 200);
Mat rgbWithRect = sample.onCameraFrame();
Mat rgbNoRect = sample.getRgbNoRect();
Imgcodecs.imwrite("rgbWithRect.png", rgbWithRect);
Imgcodecs.imwrite("rgbNoRect.png", rgbNoRect);
}
}
Notes:
The code sample is tested in Windows, and I am not sure if it can be executed in Android as is.
The last time I programmed in JAVA was many years ago, so I hope I didn't do some rookie's mistakes.

How to play a sound on android with a delay?

I'm doing a small school project of object detection by color. When the object arrives in the center of the screen, it must have a sound. So the sound does not stay uninterrupted, I tried to put a delay on it. But every time the application arrives at the point of playing the sound, it closes. I already researched here and in other forums and the solutions presented did not work.
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
static {
if(!OpenCVLoader.initDebug()){
Log.d("TAG", "OpenCV not loaded");
} else {
Log.d("TAG", "OpenCV loaded");
}
}
public void voltatela(View v) {
setContentView(R.layout.activity_main);
}
Mat imgHVS, imgThresholded;
Scalar sc1, sc2;
JavaCameraView cameraView;
int largura, altura;
public void Verde(View v) {
sc1 = new Scalar(45, 20, 10);
sc2 = new Scalar(75, 255, 255);
irTelaCamera();
}
public void Azul(View v) {
sc1 = new Scalar(80, 50, 50);
sc2 = new Scalar(100, 255, 255);
irTelaCamera();
}
public void Vermelho(View v) {
sc1 = new Scalar(110, 100, 50);
sc2 = new Scalar(130, 255, 255);
irTelaCamera();
}
public void irTelaCamera(){
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.telacamera);
cameraView = (JavaCameraView)findViewById(R.id.cameraview);
cameraView.setCameraIndex(0); //0 para traseira e 1 para dianteira
cameraView.setCvCameraViewListener(this);
cameraView.enableView();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
DisplayMetrics displayMetrics = new DisplayMetrics();
WindowManager windowmanager = (WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE);
windowmanager.getDefaultDisplay().getMetrics(displayMetrics);
}
#Override
protected void onPause() {
super.onPause();
cameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
imgHVS = new Mat(width,height, CvType.CV_16UC4);
imgThresholded = new Mat(width,height, CvType.CV_16UC4);
largura = width;
altura = height;
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Point centrotela = new Point((largura*0.5),(altura*0.5));
final MediaPlayer som = MediaPlayer.create(this, R.raw.bip);
Imgproc.medianBlur(imgHVS,imgHVS,1);
Imgproc.cvtColor(inputFrame.rgba(), imgHVS,Imgproc.COLOR_BGR2HSV);
Core.inRange(imgHVS, sc1, sc2, imgThresholded);
Imgproc.GaussianBlur(imgThresholded, imgThresholded, new Size(3, 3), 1, 1);
Mat circles = new Mat();
double dp = 1.2d;
int minRadius = 20;
int maxRadius = 0;
double param1 = 100, param2 = 20;
int desvio = (int) (minRadius*0.5);
Imgproc.HoughCircles(imgThresholded, circles, Imgproc.HOUGH_GRADIENT, dp, imgThresholded.rows()/4, 100, 20, minRadius, maxRadius);
int numCircles = (circles.rows() == 0) ? 0 : circles.cols();
for (int i = 0; i < numCircles; i++) {
double[] circleCoordinates = circles.get(0, i);
int x = (int) circleCoordinates[0], y = (int) circleCoordinates[1];
Point center = new Point(x, y);
int radius = (int) circleCoordinates[2];
if((((center.x-desvio) <= centrotela.x) && ((center.x+desvio) >= centrotela.x))) {
if ((((center.y-desvio) <= centrotela.y) && ((center.y+desvio) >= centrotela.y))) {
som.start();
Imgproc.circle(imgThresholded, center, radius, new Scalar(100, 255, 255), 4);
// Play sound after 2 sec delay
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
som.stop();
}
}, 2000);
}}
}
Imgproc.circle(imgThresholded,centrotela,50, new Scalar(100,255,255),7);
Imgproc.circle(imgThresholded,centrotela,25, new Scalar(100,255,255),4);
Imgproc.circle(imgThresholded,centrotela,5, new Scalar(100,255,255),-1);
return imgThresholded;
}
}
You have to write sop.start(); instead of sop.stop() inside the handler;
Have you tried playing the sound in a different thread (AsyncTask? See example here). You would have to keep track if you have already spawned a task though, otherwise, you will end up creating too many threads playing the same sound.

Plot graph of Red vs Time: OpenCV Android

I am using OpenCV in my android application which detects the RGB value of an object in front of the camera. I can successfully detect the RGB value (not perfect because my phone camera keeps adjusting the brightness), and i want to plot a graph of the Red/Blue/Green value against time. I would also like to limit the video to 1 minute. I am using the GraphView library to plot the graph. The problem is the graph view takes 2 parameters, the x (time) and y (my R/G/B value) and since I am could not find a solution to set the timer, I cant plot the graph.
Can anyone help me understand how to set the timer so that the video is recorded to a certain period of time?
public class VideoRecordingActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
//java camera view
JavaCameraView javaCameraView;
Mat mRgba, mHsv;
GraphView graph;
//callback loader
BaseLoaderCallback mCallBackLoader = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status){
case BaseLoaderCallback.SUCCESS:
javaCameraView.enableView();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_recording);
//connect the camera
javaCameraView = (JavaCameraView)findViewById(R.id.java_camera_view);
//set visibility
javaCameraView.setVisibility(SurfaceView.VISIBLE);
javaCameraView.setMaxFrameSize(320, 240);
javaCameraView.enableFpsMeter();
javaCameraView.clearFocus();
//set callback function
javaCameraView.setCvCameraViewListener(this);
/*Graph*/
graph = (GraphView)findViewById(R.id.graphView);
}
#Override
protected void onPause() {
super.onPause();
if(javaCameraView!=null){
javaCameraView.disableView();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
if (javaCameraView!=null){
javaCameraView.disableView();
}
}
#Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()){
Log.d("openCV", "Connected");
//display when the activity resumed,, callback loader
mCallBackLoader.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}else{
Log.d("openCV", "Not connected");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_3_0, this, mCallBackLoader);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
//4 channel
mRgba = new Mat(width, height, CvType.CV_8UC4);
mHsv = new Mat(width, height, CvType.CV_8UC3);
}
#Override
public void onCameraViewStopped() {
//release
mRgba.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
//get each frame from camera
mRgba = inputFrame.rgba();
/**********HSV conversion**************/
//convert mat rgb to mat hsv
Imgproc.cvtColor(mRgba, mHsv, Imgproc.COLOR_RGB2HSV);
//find scalar sum of hsv
Scalar mColorHsv = Core.sumElems(mHsv);
int pointCount = 320*240;
//convert each pixel
for (int i = 0; i < mColorHsv.val.length; i++) {
mColorHsv.val[i] /= pointCount;
}
//convert hsv scalar to rgb scalar
Scalar mColorRgb = convertScalarHsv2Rgba(mColorHsv);
//print scalar value
Log.d("intensity", "R:"+ String.valueOf(mColorRgb.val[0])+" G:"+String.valueOf(mColorRgb.val[1])+" B:"+String.valueOf(mColorRgb.val[2]));
int R = (int) mColorRgb.val[0];
int G = (int) mColorRgb.val[1];
int B = (int) mColorRgb.val[2];
Log.d("intensity", "Y:"+ String.valueOf(Y)+" U:"+String.valueOf(U)+" V:"+String.valueOf(V));
/*>>>>>>>>GRAPH<<<<<<<<<<*/
LineGraphSeries<DataPoint> series = new LineGraphSeries<>(new DataPoint[]{
new DataPoint(0, R),
new DataPoint(1, R),
new DataPoint(2, R),
new DataPoint(3, R)
});
graph.addSeries(series);
return mRgba;
}
//convert Mat hsv to scalar
private Scalar convertScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB);
return new Scalar(pointMatRgba.get(0, 0));
}
}
the video is at around 15-20 fps. I want to plot the R/G/B value of each frame against time.
Any help will be very much appreciated.

Black Screenshot android OpenCv

Currently I am using opencv what I want to happen is when the user presses the screen of course the screen/object from the displayed camera it will highlight all necessary/similar colors see:Color-Blob-Detector. Video without the screenshot.
Below there is a small camera icon for triggering a screenshot (imagebutton), however when it screenshots the outcome of the picture is always black. What I wanted is when the necessary colors are being highlighted and I press the screenshot progmatically (which is the ImageButton) it will take the picture of the screen (including those highlighted objects) and save the image in the file.
I have also tried using the camera control from opencv for taking a picture, but it is so hard for me to merge these two.
public class Camera extends Activity implements OnTouchListener, CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(Camera.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Camera() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}
}
public void camera_b(View v)
{
String path = Environment.getExternalStorageDirectory().toString() + "/" + "hellp.jpg";
v = getWindow().getDecorView().getRootView();
v.setDrawingCacheEnabled(true);
Bitmap bitmap = Bitmap.createBitmap(v.getDrawingCache());
v.setDrawingCacheEnabled(false);
OutputStream out = null;
File imageFile = new File(path);
try {
out = new FileOutputStream(imageFile);
// choose JPEG format
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
} catch (FileNotFoundException e) {
// manage exception
} catch (IOException e) {
// manage exception
} finally {
try {
if (out != null) {
out.close();
}
} catch (Exception exc) {
}
}
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}
As you can see camera_b is where all screenshot happens.The XML for color blob detector: ColorBlobXML (I cannot paste XML code here)
Image of the output. As you can see, It can take a screenshot of the button, not the expected Image. (This Image is made by using power +vol down)
Put this in the AndroidManifest.xml
<uses-permission android:name="android.permission.CAMERA"/>
And in this
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.colorblobdetector);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.setCameraIndex(0); //0 = back and 1 = frontal
mOpenCvCameraView.enableView();
if (!OpenCVLoader.initDebug()) {
Log.e(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), not working.");
} else {
Log.d(this.getClass().getSimpleName(), " OpenCVLoader.initDebug(), working.");
}

Efficiently load 5 larger than fullscreen Bitmaps into my Android Live Wallpaper and scroll them parralax

So I'm new to Java and this kind of coding.
I'm trying to make a Parrallax scrolling Live Wallpaper. But I'm having memory issues.
Well I have made it, and it works on on the phone I have. But I think the way i have done it is not very efficient at all. Because when I try it on other phones it doesn't work. It breaks at my Out Of Memory catcher. I added another layer and now it does the same thing on my phone too. So I am able to debug it. Basically I guess I'm using up-to or over 16 meg of memory.
If someone could take a look at my code and help me load in the bitmaps more efficiently that would be greatly appreciated.
Here is how I'm currently doing it:
static class Layer {
public Bitmap bitmap;
private float scale = 1.0f;
private Matrix matrix = new Matrix();
public Layer(Bitmap b) {
this.bitmap = b;
}
public void setScale(float factor) {
scale = factor;
}
public Matrix getMatrix(float x, float y) {
if (scale == 1) {
matrix.reset();
} else {
matrix.setScale(scale, scale);
}
matrix.postTranslate(x, y);
return matrix;
}
}
public static List<Integer> findLayers(Integer path) {
List<Integer> files = new ArrayList<Integer>();
files.add(R.drawable.planet_layer4);
files.add(R.drawable.planet_layer3);
files.add(R.drawable.planet_layer2);
files.add(R.drawable.planet_layer1);
files.add(R.drawable.planet_layer0);
return files;
}
private void loadLayers() {
try {
clearLayers();
for (Integer file: layerFiles) {
addLayer(file);
}
recalibrateLayers();
} catch (IOException e) {
layers.clear();
Toast.makeText(LiveWallpaper.this, "There was a problem loading the wallpaper. Please contact the developer.", Toast.LENGTH_LONG).show();
} catch (OutOfMemoryError oom) {
layers.clear();
Toast.makeText(LiveWallpaper.this, "Whoops, we ran out of memory trying to load the images. ", Toast.LENGTH_LONG).show();
}
}
private void addLayer(int name) throws IOException {
Bitmap layer = BitmapFactory.decodeResource(getResources(), name);
if (layer == null) {
throw new IOException("BitmapFactory couldn't decode asset " + name);
}
synchronized(layers) {
layers.add(new Layer(layer));
}
}
private void clearLayers() {
synchronized(layers) {
layers.clear();
}
}
private void recalibrateLayers() {
for (Layer layer : layers) {
final int bitmapHeight = layer.bitmap.getHeight();
layer.setScale((float)mHeight / (float)bitmapHeight);
}
}
#Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mDrawParallax);
}
#Override
public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) {
super.onSurfaceChanged(holder, format, width, height);
mHeight = height;
recalibrateLayers();
drawBackgrounds();
}
And here is where I draw them.
/*
* Draw one frame of the animation. This method gets called repeatedly
* by posting a delayed Runnable. You can do any drawing you want in
* here.
*/
void drawBackgrounds() {
final SurfaceHolder holder = getSurfaceHolder();
final Rect frame = holder.getSurfaceFrame();
mFrame = frame;
Canvas c = null;
try {
c = holder.lockCanvas();
if (c != null) {
// draw something
drawParallax(c);
}
} finally {
if (c != null) holder.unlockCanvasAndPost(c);
}
}
void drawParallax(Canvas c) {
int frameWidth = mFrame.width();
for (int i=layers.size()-1; i>=0; i--) {
Layer layer = layers.get(i);
Bitmap bitmap = layer.bitmap;
float bitmapWidth = bitmap.getWidth() * layer.scale;
float max = frameWidth - bitmapWidth;
float offset = mOffset * max;
final Matrix m = layer.getMatrix(offset, 0);
c.drawBitmap(bitmap, m, null);
}
}

Categories

Resources