I want to make logging time which a JNI's cpp system needed for every process.This is my code :
/*
* ImageProcessing.cpp
*/
#include <jni.h>
#include <stdlib.h>
#include <stdio.h>
#include <string>
#include <android/log.h>
#include <strstream>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc_c.h>
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <vector>
#define LOG_TAG "TourGuide"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
using namespace std;
using namespace cv;
std::vector<float> parse_delimeted_list_of_numbers(char* line, char delimeter)
{
std::vector<float> vector_of_numbers;
std::istrstream input_stream(line);
std::string text;
float number;
while (std::getline(input_stream, text, delimeter)) {
sscanf(text.c_str(), "%f", &number, text.size());
vector_of_numbers.push_back(number);
}
return vector_of_numbers;
}
extern "C"
jboolean
Java_com_example_franksyesipangkar_tourguide_CameraPreview_ImageProcessing
(JNIEnv* env, jobject thiz, jint width, jint height, jbyteArray NV21FrameData, jintArray outPixels, jbyteArray b)
{
LOGD("JNIEnv");
//convert jbyteArray to char
jbyte *cmd = env->GetByteArrayElements(b, 0);
LOGD("JNIEnvFeature");
char feature[90600];//[819000] sejumlah 800kb untuk ukuran file
memset(feature,0, sizeof(feature));
memcpy(feature, cmd, strlen((char*)cmd));
LOGD("OutFeature: %s", feature);
//LOGD("OutCMD: %s", cmd);
vector<float> vectorHOGSVM;
vectorHOGSVM = parse_delimeted_list_of_numbers(feature, ' ');
LOGD("Parsing Vector Success ");
/* Mengambil data pointer */
jbyte * pNV21FrameData = env->GetByteArrayElements(NV21FrameData, 0);
jint * poutPixels = env->GetIntArrayElements(outPixels, 0);
/* Membuat matrix dari input frame gambar */
Mat mGray(height, width, CV_8UC1, (unsigned char *)pNV21FrameData);
Mat mResult(height, width, CV_8UC4, (unsigned char *)poutPixels);
/* Mengubah matrix kembali menjadi frame gambar */
IplImage GrayImg = mGray;
IplImage ResultImg = mResult;
HOGDescriptor hog;
//hog.winSize = Size(56,40);
// Set our custom detecting vector
hog.setSVMDetector(vectorHOGSVM);
//hog.setSVMDetector(HOGDescriptor::getDefaultPeopleDetector());
/* Deklarasi variable vector untuk menggambar kotak deteksi */
vector<Rect> found, found_filtered;
size_t i, j;
hog.detectMultiScale(mGray, found, 0, Size(8,8), Size(32,32), 1.05, 2);
double t = (double)getTickCount();
t = (double)getTickCount() - t;
LOGD("Detection Time: %gms", t*1000./cv::getTickFrequency());
LOGD("Animal: %d", found.size());
for( i = 0; i < found.size(); i++ )
{
Rect r = found[i];
for( j = 0; j < found.size(); j++ )
if( j != i && (r & found[j]) == r)
break;
if( j == found.size() )
found_filtered.push_back(r);
}
if(found.size()) {
Rect r = found[0];
r.x += cvRound(r.width*0.1);
r.width = cvRound(r.width*0.8);
r.y += cvRound(r.height*0.07);
r.height = cvRound(r.height*0.8);
LOGD("c : %d, r : %d",r.height,r.width);
cvCvtColor(&GrayImg, &ResultImg, CV_GRAY2BGR);
env->ReleaseByteArrayElements(NV21FrameData, pNV21FrameData, 0);
env->ReleaseIntArrayElements(outPixels, poutPixels, 0);
env->ReleaseByteArrayElements(b, cmd, 0);
return true;
}
For detection process, I make logging time with cv::getTickFrequency() (OpenCV function) like this :
double t = (double)getTickCount();
t = (double)getTickCount() - t;
LOGD("Detection Time: %gms", t*1000./cv::getTickFrequency());
But, in another process I don't understand for making log time.So, I want to make logging time in every process, in process LOGD("OutFeature: %s", feature);, and process LOGD("Parsing Vector Success ");.Do you have an idea for it ?
Related
I'm trying the use the OpenSSL Rand library with DRBG in an android app, this library is implemented in native code of NDK. At starting, the OpenSSL works fine but in many times the app crashed and don't show any throw message. Here is the only error message that showwing:
A/libc: Fatal signal 7 (SIGBUS), code 1 (BUS_ADRALN), fault addr 0xb1aedca6f2d64adf in tid 22101 (RenderThread), pid 22075 (android.example)
My code is the follow:
libnative.cpp
#include <jni.h>
#include <string>
#include <cstdio>
#include <cstdlib>
#include <cassert>
#include <openssl/rand.h>
#include <openssl/rand_drbg.h>
#include <android/log.h>
#include <future>
#ifndef TAG
#define TAG "OpenSslApi"
#endif
#ifndef DEFAULT_VALUE_ERROR
#define DEFAULT_VALUE_ERROR 0
#endif
void thread_handler(union sigval sv) {}
extern "C"
JNIEXPORT void JNICALL
Java_com_android_random_OpenSslApi_initDrbgRandom(
JNIEnv * env,
jclass clazz) {
RAND_DRBG * randDrbgInstance = RAND_DRBG_new(NID_aes_256_ctr, RAND_DRBG_FLAG_CTR_NO_DF, nullptr);
RAND_DRBG_instantiate(randDrbgInstance, nullptr, 0);
RAND_DRBG_set_reseed_time_interval(randDrbgInstance, 0);
RAND_DRBG_set_reseed_interval(randDrbgInstance, 0);
}
std::pair < jint * , jint > generateRandomIntDrbg(jint * secureRandom, jint sizeKey) {
jint myStatus = RAND_DRBG_bytes(
RAND_DRBG_get0_public(),
(unsigned char * ) secureRandom,
sizeKey * sizeof(int)
);
return std::make_pair(secureRandom, myStatus);
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_android_random_OpenSslApi_intDrbgGenerateSecureRandom(
JNIEnv * env,
jclass clazz,
jintArray empty_array,
jint size_key) {
struct sigevent sev {};
timer_t timerid;
memset( & sev, 0, sizeof(sev));
sev.sigev_notify = SIGEV_THREAD;
sev.sigev_notify_function = & thread_handler;
sev.sigev_value.sival_ptr = & timerid;
timer_create(CLOCK_MONOTONIC, & sev, & timerid);
JavaVM * javaVm = nullptr;
env -> GetJavaVM( & javaVm);
javaVm -> AttachCurrentThread( & env, (void ** ) & env);
jintArray array = env -> NewIntArray(size_key);
if (array == nullptr) {
return DEFAULT_VALUE_ERROR;
}
jint * secureRandomIntArray = env -> GetIntArrayElements(array, nullptr);
if (secureRandomIntArray == nullptr) {
return DEFAULT_VALUE_ERROR;
}
std::future < std::pair < jint * , jint >> futureIntRandom = std::async (generateRandomIntDrbg, secureRandomIntArray, size_key);
std::pair < jint * , jint > result = futureIntRandom.get();
jint * resultSecureRandom = std::get < jint * > (result);
if (resultSecureRandom == nullptr) {
return DEFAULT_VALUE_ERROR;
}
memcpy(secureRandomIntArray, empty_array, size_key);
env -> ReleaseIntArrayElements(empty_array, secureRandomIntArray, 0);
return std::get < jint > (result);
}
OpenSslApi.java
static {
System.loadLibrary("libnative");
}
public OpenSslApi() {
initDrbgRandom();
}
public static native void initDrbgRandom();
public static native int intDrbgGenerateSecureRandom(
int[] emptyArray,
final int sizeKey
);
Thanks for either suggestions about the solution of this error.
Project is a realtime image processer same as my Project but it uses two values which are input and output(I remember that these projects are using frame for processing like that).
I changed its native-lib.cpp file this
#include <jni.h>
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#define TAG "NativeLib"
using namespace std;
using namespace cv;
extern "C" {
//void detect(Mat& input);
void JNICALL Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI(JNIEnv *env, jobject instance, jlong inputAddr, jlong outputAddr) {
Mat &input = *(Mat *) inputAddr;
Mat &output = *(Mat *) outputAddr;
clock_t begin = clock();
cv::adaptiveThreshold(input, output, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY_INV, 21, 5);
double total_time = double (clock() - begin ) / CLOCKS_PER_SEC;
__android_log_print(ANDROID_LOG_INFO, TAG, "adaptiveThreshold computation time = %f seconds\n", total_time);
}
}
to this
#include <jni.h>
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace std;
using namespace cv;
extern "C" {
void detect(Mat &input);
void JNICALL
Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI(JNIEnv *env, jobject instance,
jlong inputAddr) {
Mat &input = *(Mat *) inputAddr;
detect(input);
}
void detect(Mat &input) {
String face_cascade_name = "/storage/emulated/0/ony.xml";
String eyes_cascade_name = "/storage/emulated/0/moe.xml";
CascadeClassifier face_cascade;
CascadeClassifier eyes_cascade;
if (!face_cascade.load(face_cascade_name)) {
printf("--(!)Error loading\n");
return;
};
if (!eyes_cascade.load(eyes_cascade_name)) {
printf("--(!)Error loading\n");
return;
};
std::vector<Rect> faces;
Mat frame_gray;
cvtColor( input, frame_gray, COLOR_RGB2GRAY );
equalizeHist(frame_gray, frame_gray);
//-- Detect faces
face_cascade.detectMultiScale(frame_gray, faces, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));
for (size_t i = 0; i < faces.size(); i++) {
Point center(faces[i].x + faces[i].width * 0.5, faces[i].y + faces[i].height * 0.5);
ellipse(input, center, Size(faces[i].width * 0.5, faces[i].height * 0.5), 0, 0, 360, Scalar(255, 0, 255), 4, 8,
0);
Mat faceROI = frame_gray(faces[i]);
std::vector<Rect> eyes;
//-- In each face, detect eyes
eyes_cascade.detectMultiScale(faceROI, eyes, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));
for (size_t j = 0; j < eyes.size(); j++) {
Point center(faces[i].x + eyes[j].x + eyes[j].width * 0.5, faces[i].y + eyes[j].y + eyes[j].height * 0.5);
int radius = cvRound((eyes[j].width + eyes[j].height) * 0.25);
circle(input, center, radius, Scalar(255, 0, 0), 4, 8, 0);
}
}
}
}
But in my phone there was a black screen for probably five seconds and app stopping repeatedly.
Note: Sync and build were successfully and before I changed cpp file app was working successfully
Please help me about my Project.
Thanks
Here you made some changes to the definition of the C++ method Java_com_example_nativeopencvandroidtemplate_MainActivity_adaptiveThresholdFromJNI, you will therefore have to reflect these changes on the Kotlin side, as this method is called from your MainActivity.kt using JNI. Here is the code you'll have to adapt in MainActivity.kt:
class MainActivity : Activity(), CameraBridgeViewBase.CvCameraViewListener2 {
...
override fun onCameraFrame(inputFrame: CameraBridgeViewBase.CvCameraViewFrame): Mat {
val mat = inputFrame.gray()
adaptiveThresholdFromJNI(mat.nativeObjAddr)
return mat
}
private external fun adaptiveThresholdFromJNI(matAddr: Long)
...
}
Here adaptiveThresholdFromJNI was adapted to handle only one argument (as you did with the C++ equivalent) and that one argument is then returned from onCameraFrame to be displayed on the screen.
I see in your C++ code that the first thing you try to do is to convert your input Mat to gray, but this is not necessary because the Mat passed to your C++ code is already gray (see val mat = inputFrame.gray()).
If you want to keep your C++ code intact, you can also pass the colorized version of the camera view frame to your C++ code by using val mat = inputFrame.rgba().
When I ran my code under debugger, it showed an <unknown> error in:
detector->detectAndCompute( matInput, noArray(), keypoints2, descriptors2 );
like this
`matInput` is from `inputFrame.rgba()`
Java code in :
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame)
and this is not null.
This is a part of
native-lib.cpp:
std::vector<Mat> trainImages;
std::vector< std::vector<DMatch> > knn_matches;
std::vector<KeyPoint> keypoints1;
std::vector<KeyPoint> keypoints2;
Mat matImg;
static void createKeypointsAndDescriptors(const Mat& matInput) {
int minHessian = 400;
cv::Ptr<SURF> detector = SURF::create( minHessian );
std::vector<KeyPoint> temp_keypoints;
std::vector<std::vector<KeyPoint>> temp_keypoints1;
Mat temp_descriptors;
std::vector<Mat> temp_descriptors1;
Mat descriptors2;
std::vector< std::vector<DMatch> > temp_knn_matches;
for(int i = 0; i < 2; i++) {
detector->detectAndCompute( trainImages[i], noArray(), temp_keypoints, temp_descriptors );
temp_keypoints1.push_back(temp_keypoints);
temp_descriptors1.push_back(temp_descriptors);
}
detector->detectAndCompute( matInput, noArray(), keypoints2, descriptors2 );
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create(DescriptorMatcher::FLANNBASED);
int max = 0;
for(int i = 0; i < 2; i++) {
if(temp_keypoints[i].size >= 2 && keypoints2.size() >= 2)
matcher->knnMatch( temp_descriptors1[i], descriptors2, temp_knn_matches, 2 );
if(max < temp_knn_matches.size()) {
max = temp_knn_matches.size();
keypoints1 = temp_keypoints1[i];
matImg = trainImages[i];
knn_matches = temp_knn_matches;
}
}
}
EDIT
Here's my example input image
This is a code related image. I send images from the asset directory to JNI by using java code.
extern "C"
JNIEXPORT void JNICALL
Java_com_example_surfwithflann2_MainActivity_sendImages(JNIEnv *env, jobject instance,
jlongArray tempAddrObj_) {
if(trainImages.size() == 0) {
int length = env->GetArrayLength(tempAddrObj_);
jlong *tempAddrObj = env->GetLongArrayElements(tempAddrObj_, NULL);
for(int i = 0; i < length; i++) {
cv::Mat &tempImage = *(cv::Mat *)tempAddrObj[i];
trainImages.push_back(tempImage);
}
env->ReleaseLongArrayElements(tempAddrObj_, tempAddrObj, 0);
} else {
__android_log_print(ANDROID_LOG_DEBUG, "TAG", "already received from java");
}
}
To narrow it down, try splitting detectAndCompute() into detect() and compute():
#include <stdio.h>
#include <iostream>
#include "opencv2/core.hpp"
#include "opencv2/features2d.hpp"
#include "opencv2/xfeatures2d.hpp"
#include "opencv2/highgui.hpp"
using namespace cv;
using namespace cv::xfeatures2d;
std::vector<cv::KeyPoint> keypoints2;
cv::Mat descriptors2;
static void createKeypointsAndDescriptors(const cv::Mat& matInput) {
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
double minHessian = 400;
Ptr<SURF> detector = SURF::create();
detector->setHessianThreshold(minHessian);
detector->detect(matInput, keypoints2);
detector->compute(matInput, keypoints2, descriptors2);
// detector->detectAndCompute( matInput, noArray(), keypoints2, descriptors2 );
...
}
links:
cv::xfeatures2d::SURF Class Reference
cv::Feature2D Class Reference
cv::xfeatures2d::AffineFeature2D Class Reference
I am implementing the following project:
https://github.com/matthill/darwinwallet
I am getting the following error when I am executing a OpenCV android project in Eclipse:
jni/jni_recognizer.cpp:2:33: fatal error: opencv2/core/core.hpp: No such file or directory
#include <opencv2/core/core.hpp>
^
compilation terminated.
make.exe: *** [obj/local/armeabi-v7a/objs/native_wallet/jni_recognizer.o] Error 1
in my jni/include folder in Eclipse,there is no openCV files.There are only NDK files, so, maybe that is the issue. SO, please tell me how to add openCV files in the include folder.
This is my recognizer.cpp file
`
#include <jni.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <vector>
#include "NativeVision/vision.h"
#include <time.h>
#include <android/log.h>
using namespace std;
using namespace cv;
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, "JNI_DEBUGGING", __VA_ARGS__)
double diffclock(clock_t clock1,clock_t clock2)
{
double diffticks=clock2-clock1;
double diffms=(diffticks*1000)/ CLOCKS_PER_SEC;
return diffms;
}
static Ptr<ORB> detector;
static Ptr<DescriptorMatcher> descriptorMatcher;
static vector<Mat> trainImages;
static bool training_complete = false;
static vector<string> billMapping;
extern "C" {
JNIEXPORT void JNICALL Java_com_ndu_mobile_darwinwallet_Recognizer_nvInitialize(JNIEnv* env, jobject thiz)
{
LOGD( "Started nvInitialize" );
detector = getQueryDetector();
descriptorMatcher = getMatcher();
LOGD( "Finished nvInitialize" );
}
}
extern "C" {
JNIEXPORT void JNICALL Java_com_ndu_mobile_darwinwallet_Recognizer_nvResetTrainedDatabase(JNIEnv* env, jobject thiz)
{
LOGD( "Started nvResetTrainedDatabase" );
training_complete = false;
descriptorMatcher = getMatcher();
trainImages.clear();
billMapping.clear();
LOGD( "Finished nvResetTrainedDatabase" );
}
}
extern "C" {
JNIEXPORT void JNICALL Java_com_ndu_mobile_darwinwallet_Recognizer_nvTrainImage(JNIEnv* env, jobject thiz, jstring billname, jstring billpath)
{
//char* _imgBytes = (char*) env->GetPrimitiveArrayCritical(imgBytes, 0);
const char * _billpath = env->GetStringUTFChars(billpath, 0);
const char * _billname = env->GetStringUTFChars(billname, 0);
LOGD( "Started nvTrainImage" );
std::ostringstream out;
//std::ostringstream out;
//out << " : billname: " << _billname << ": BILLPATH: " << billpath << endl;
LOGD( out.str().c_str() );
//LOGD( "nvTrainImage: 1" );
//Mat mgray(1, bytelength, CV_8U, (unsigned char *)_imgBytes);
//LOGD( "nvTrainImage: 2" );
Mat img = imread(_billpath, 0);
//Mat img = imread("/sdcard/wallet/us/100b/full_pic.jpg", 0);
//LOGD( "nvTrainImage: 3" );
Mat trainData = trainImage( img, detector, descriptorMatcher );
out << "nvTrainImage: " << _billpath << " (" << trainData.rows << " x " << trainData.cols << ")" << endl;
LOGD( out.str().c_str() );
trainImages.push_back(trainData);
string billstr(_billname);
billMapping.push_back(billstr);
LOGD( "Finished nvTrainImage" );
env->ReleaseStringUTFChars(billpath, _billpath);
env->ReleaseStringUTFChars(billname, _billname);
//env->ReleasePrimitiveArrayCritical(imgBytes, _imgBytes, 0);
}
}
extern "C" {
JNIEXPORT void JNICALL Java_com_ndu_mobile_darwinwallet_Recognizer_nvFinalizeTraining(JNIEnv* env, jobject thiz)
{
LOGD( "Started nvFinalizeTraining" );
descriptorMatcher->add(trainImages);
descriptorMatcher->train();
training_complete = true;
LOGD( "Finished nvFinalizeTraining" );
}
}
extern "C" {
JNIEXPORT jstring JNICALL Java_com_ndu_mobile_darwinwallet_Recognizer_nvRecognize(JNIEnv* env, jobject thiz, jint width, jint height, jbyteArray yuv)
{
jbyte* _yuv = env->GetByteArrayElements(yuv, 0);
//jint* _bgra = env->GetIntArrayElements(bgra, 0);
LOGD( "Started nvFindFeatures" );
jstring response = env->NewStringUTF("");
if (training_complete == true)
{
clock_t begin;
clock_t end;
//Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
//Mat mbgra(height, width, CV_8UC4);
Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv);
//Mat myuv(width, 1, CV_8U, (unsigned char *)_yuv);
//Mat mgray = imdecode(myuv, 0);
//Please make attention about BGRA byte order
//ARGB stored in java as int array becomes BGRA at native level
//cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);
vector<KeyPoint> v;
//FastFeatureDetector detector(50);
//detector.detect(mgray, v);
float divisor = 1;
if (height < width)
{
divisor = (((double) height) / 240);
}
else
{
divisor = (((double) width) / 240);
}
if (divisor == 0)
divisor = 1;
Size idealSize(width/divisor, height/divisor);
Mat mgray_small;
resize(mgray, mgray_small, idealSize);
Mat descriptors;
vector<DMatch> matches;
begin=clock();
//detector->detect(mgray_small, v);
//descriptorExtractor->compute( mgray_small, v, descriptors );
//surfStyleMatching( descriptorMatcher, descriptors, matches );
Mat* dummy;
//imwrite("/sdcard/wallet_debug.jpg", mgray_small );
bool debug_on = true;
int debug_matches[billMapping.size()];
RecognitionResult result = recognize( mgray_small, false, dummy, detector, descriptorMatcher, billMapping,
debug_on, debug_matches);
end=clock();
std::ostringstream out;
out << "time: " << diffclock(begin, end) << " ms | matches: " << matches.size() << endl;
if (debug_on)
{
for (int k = 0; k < billMapping.size(); k++)
out << " --" << billMapping[k] << " : " << debug_matches[k] << endl;
}
out << "orig_width: " << width << "orig_height: " << height << endl;
out << "divisor: " << divisor << endl;
//LOGD( (char*) out.str().c_str());
if (result.haswinner == false)
out << "No winner frown emoticon" << endl;
else
{
out << "Big Winner! " << result.winner << " : " << result.confidence << endl;
std::ostringstream responsetext;
responsetext << result.winner << "," << result.confidence;
response = env->NewStringUTF(responsetext.str().c_str());
}
LOGD( (char*) out.str().c_str());
//for( size_t i = 0; i < v.size(); i++ )
// circle(mbgra, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(0,0,255,255));
}
LOGD( "Finished nvFindFeatures" );
//env->ReleaseIntArrayElements(bgra, _bgra, 0);
env->ReleaseByteArrayElements(yuv, _yuv, 0);
return response;
}
}
`
And this is my android.mk file:
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:= off
#OPENCV_MK_PATH:D:\FYP\darwinwallet-master\OpenCV-2.4.9-android-sdk\sdk\native\jni
OPENCV_MK_PATH:D:\FYP\darwinwallet-master\OpenCV-2.4.9-android-sdk\sdk\native\jni\OpenCV.mk
OPENCV_LIB_TYPE:=STATIC
OPENCV_INSTALL_MODULES:=on
include $(OPENCV_MK_PATH)
#Profiler
#-include android-ndk-profiler.mk
#include ../includeOpenCV.mk
#ifeq ("$(wildcard $(OPENCV_MK_PATH))","")
#try to load OpenCV.mk from default install location
#include $(TOOLCHAIN_PREBUILT_ROOT)/user/share/OpenCV/OpenCV.mk
#else
#include $(OPENCV_MK_PATH)
#endif
#LOCAL_C_INCLUDES:=D:\FYP\darwinwallet-master\OpenCV-2.4.9-android-sdk\sdk\native\jni\include
LOCAL_MODULE:=native_wallet
LOCAL_SRC_FILES:=jni_recognizer.cpp NativeVision\vision.cpp
LOCAL_CFLAGS=-ffast-math -O3 -funroll-loops
#LOCAL_CFLAGS=-O3 -funroll-loops
LOCAL_LDLIBS+=-llog -ldl
#Profiling
#LOCAL_CFLAGS:=-pg
#LOCAL_STATIC_LIBRARIES:=andprof
include $(BUILD_SHARED_LIBRARY)
There is a character missing
OPENCV_MK_PATH:D…
should read
OPENCV_MK_PATH:=D…
:)
I'm programming an algorithm for detect symmetric radial center of an image, to detect eyes position into face framePicture. I know that already exist a project of public domain that do this work, but i would base my work about another kind of studies.
This is the scenario:
Doing by hand this manipulation frame by frame, i have seen that writting code on java layer, like this:
private Mat mGray = new Mat(height,width,CvType.CV_8U);
private Mat mOut = new Mat(height,width,CvType.CV_8U);
private Mat mIntermediateMat = Mat.zeros(height,width,CvType.CV_32F);
[...common methods of opencv app...]
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
switch (ImageManipulationsActivity.viewMode) {
case ImageManipulationsActivity.VIEW_MODE_RGBA:
mOut = inputFrame.rgba();
break;
case ImageManipulationsActivity.VIEW_MODE_MODIFY:
mGray = inputFrame.gray();
int h = mGray.rows();
int w = mGray.cols();
int sobxVal,sobyVal;
/**
* Appling manually sobel filtering to calculate dx and dy image,
* moreover calculate magnitudo matrix and cosValue and sinValue
* matrices for computing, using lut techniques.
*/
for(int i = 1; i < h-1; i++)
for(int j = 1; j < w-1; j++) {
sobxVal = (int) (
((int)mGray.get(i-1,j)[0] << 1) +
mGray.get(i-1,j-1)[0] +
mGray.get(i-1,j+1)[0] - (
((int)mGray.get(i+1,j)[0] << 1) +
mGray.get(i+1,j-1)[0] +
mGray.get(i+1,j+1)[0] ) );
sobyVal = (int) (
((int)mGray.get(i,j-1)[0] << 1) +
mGray.get(i-1,j-1)[0] +
mGray.get(i+1,j-1)[0] - (
((int)mGray.get(i,j+1)[0] << 1) +
mGray.get(i-1,j+1)[0] +
mGray.get(i+1,j+1)[0] ) );
// compute magnitudo and atan2
}
// ...other calculations...
Core.convertScaleAbs(mIntermediateMat, mOut);
}
return mOut;
}
is not all efficient! So I decided to write c++ for a native function to manipulate matrix in this way:
Code by c++ side
#include <jni.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <math.h>
#include <vector>
#include <android/log.h>
#define LOG_TAG "Example Filter"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define RADIUS 10
#define _K 9.9
#define _A 2 //radial strictness parameter, found experimentally
using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_FindFeatures(
JNIEnv* env,
jobject,
jlong addrGray,
jlong addrRgba,
jlong addrlutCosSin,
jlong addrlutM );
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_FindFeatures(
JNIEnv* env,
jobject,
jlong addrGray,
jlong addrOut,
jlong addrlutCosSin,
jlong addrlutM )
{
Mat& mGr = *(Mat*)addrGray;
Mat& mOut = *(Mat*)addrOut;
Mat& lutCosSin = *(Mat*)addrlutCosSin;
Mat& lutM = *(Mat*)addrlutM;
int w = mGr.cols;
int h = mGr.rows;
double sobelxVal,sobelyVal,angle;
Mat magnitudo(h,w,CV_32F,Scalar(0));
Mat xMat(h,w,CV_8S,Scalar(0));
Mat yMat(h,w,CV_8S,Scalar(0));
Mat oMat(h,w,CV_32F,Scalar(0));
Mat mMat(h,w,CV_32F,Scalar(0));
/*
* Convolves Matrix with Sobel ky kernel and Sobel kx kernel
*ky = [ 1 2 1 ;
* 0 0 0 ;
* -1 -2 -1 ]
*
*kx = [ 1 0 -1 ;
* 2 0 -2 ;
* 1 0 -1 ]
*
* doing dedicated computation
*/
for( int i = 1; i < h-1; i++ )
{
for (int j = 1; j < w-1; j++ )
{
sobelxVal = (mGr.at<int>(i-1,j) << 1) +
mGr.at<int>(i-1,j-1) +
mGr.at<int>(i-1,j+1) - (
(mGr.at<int>(i+1,j) << 1) +
mGr.at<int>(i+1,j-1) +
mGr.at<int>(i+1,j+1) );
sobelyVal = (mGr.at<int>(i,j-1) << 1) +
mGr.at<int>(i-1,j-1) +
mGr.at<int>(i+1,j-1) - (
(mGr.at<int>(i,j+1) << 1) +
mGr.at<int>(i-1,j+1) +
mGr.at<int>(i+1,j+1) );
magnitudo.at<double>(i,j) = lutM.at<double>((int)sobelxVal+255/4,(int)sobelxVal+255/4);
angle = floor(atan2(sobelyVal,sobelxVal)*180/M_PI);
xMat.at<double>(i,j) = lutCosSin.at<double>(0,angle);
yMat.at<double>(i,j) = lutCosSin.at<double>(1,angle);
}
}
// other code calculation to determine mOut matrix values
}
}
By java code side
private Mat mRgba;
private Mat mGray;
/*
* Matrix of 360 cols and 2 rows
* row[0] cos values
* row[1] sin values
*/
private static Mat lutCosSin;
/*
* Matrix 510 x 510
* where lutM(i,j) = atan2(i,j)
*/
private static Mat lutMagnitudo
// common methods and declarations...
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_RGBA:
// input frame has RBGA format
mRgba = inputFrame.rgba();
break;
case VIEW_MODE_FEATURES:
// input frame has RGBA format
mGray = inputFrame.gray();
mRgba = Mat.zeros(mGray.rows(), mGray.cols(), mGray.type());
FindFeatures(
mGray.getNativeObjAddr(),
mRgba.getNativeObjAddr(),
lutCosSin.getNativeObjAddr(),
lutMagnitudo.getNativeObjAddr()
);
//Core.convertScaleAbs(mRgba, mRgba);
// Log.d(TAG, "Called native function :"+mRgba.submat(new Range(0,5), new Range(0,5)).toString()+
// "\nAngles matrix:"+mGray);
break;
}
return mRgba;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba, long matAddrlutCS, long matAddrlutM);
the first important issue of this snipped code is that on accessing to mGr cells with "at" method in this way:
mGr.at<int>(i,j)
previus check that mGr type is int, the returned values isn't the effective gray level pixel of gray frame (i saw it by log).
I suppose that there are linkin bug of matrix from java code to c++ code, but i'm not sure for this.
I hope may anyone help me to solve this issue XD !!