I´m a begginer on C Language and I need to copy pixels to Android Bitmap, I´m using a piece of code of android opencv, used for a jni:
AndroidBitmapInfo info;
void* pixels;
int ret;
cv::Mat* mat;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0 ){
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return false; // can't get info
}
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888){
LOGE("Bitmap format is not RGB_8888 !");
return false; // incompatible format
}
if ( (ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0 ){
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
return false; // can't get pixels
}
memcpy(pixels, mat->data, info.height * info.width * 4);
AndroidBitmap_unlockPixels(env, bitmap);
So, I have an IplImage* called pImage, but I don´t know how to convert an IplImage* to a cv::Mat*. I see a way to convert to a cv:Mat, like this:
cv::Mat mat(pImage);
But I need an cv:Mat* not an cv:Mat. Any help?
Answering to your question title:
cv::Mat* mat = new cv::Mat(pImage);
The OpenCV tutorials include an article on Interoperability with OpenCV 1.
Related
There are several posts about converting Mat to Bitmap using the Utils.matToBitmap() function. But I'm assuming this function can only be called in the Java layer after importing the Utils class.
I want to transfer the data to a memory address pointed to by uint32_t* bmpContent; in the code below.
JNIEXPORT void JNICALL Java_com_nod_nodcv_NodCVActivity_runfilter(
JNIEnv *env, jclass clazz, jobject outBmp, jbyteArray inData,
jint width, jint height, jint choice, jint filter)
{
int outsz = width*height;
int insz = outsz + outsz/2;
AndroidBitmapInfo bmpInfo;
if (AndroidBitmap_getInfo(env, outBmp, &bmpInfo) < 0) {
throwJavaException(env,"gaussianBlur","Error retrieving bitmap meta data");
return;
}
if (bmpInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
throwJavaException(env,"gaussianBlur","Expecting RGBA_8888 format");
return;
}
uint32_t* bmpContent;
if (AndroidBitmap_lockPixels(env, outBmp,(void**)&bmpContent) < 0) {
throwJavaException(env,"gaussianBlur","Unable to lock bitmap pixels");
return;
}
//This function runs the kernel on the inData and gives a matrix
tester(env, clazz, bmpContent, outsz, inData, insz, width, height);
AndroidBitmap_unlockPixels(env, outBmp);
}
This is roughly what happens in the tester function:
jbyte* b_mat = env->GetByteArrayElements(inData, 0);
cv::Mat mdata(h, w, CV_8UC4, (unsigned char *)b_mat);
cv::Mat mat_src = imdecode(mdata,1);
cv::UMat umat_src = mat_src.getUMat(cv::ACCESS_READ, cv::USAGE_ALLOCATE_DEVICE_MEMORY);
cv::UMat umat_dst (mat_src.size(), mat_src.type(), cv::ACCESS_WRITE, cv::USAGE_ALLOCATE_DEVICE_MEMORY);
kernel.args(cv::ocl::KernelArg::ReadOnlyNoSize(umat_src), cv::ocl::KernelArg::ReadWrite(umat_dst));
size_t globalThreads[3] = {static_cast<unsigned int>(mat_src.cols), static_cast<unsigned int>(mat_src.rows), 1 };
bool success = kernel.run(3, globalThreads, NULL, true);
cv::Mat mat_dst = umat_dst.getMat(cv::ACCESS_READ);
mat_dst holds the results I need and that I need to display on my phone.
How can I do that?
I'm assuming I'll need to copy the data from mat_dst to the bmpContent place, but I'm not sure.
If you really need to call this method from the JNI layer, you could simply use the OpenCV's original C++ implementation Here.
An example code would be like:
#include <jni.h>
#include <string>
#include <android/bitmap.h>
#include "opencv2/opencv.hpp"
// using namespace cv;
void MatToBitmap2 (JNIEnv * env, cv::Mat src, jobject bitmap, bool needPremultiplyAlpha)
{
AndroidBitmapInfo info;
void* pixels = 0;
try {
// LOGD("nMatToBitmap");
CV_Assert( AndroidBitmap_getInfo(env, bitmap, &info) >= 0 );
CV_Assert( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||
info.format == ANDROID_BITMAP_FORMAT_RGB_565 );
CV_Assert( src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols );
CV_Assert( src.type() == CV_8UC1 || src.type() == CV_8UC3 || src.type() == CV_8UC4 );
CV_Assert( AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0 );
CV_Assert( pixels );
if( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 )
{
cv::Mat tmp(info.height, info.width, CV_8UC4, pixels);
if(src.type() == CV_8UC1)
{
cvtColor(src, tmp, cv::COLOR_GRAY2RGBA);
} else if(src.type() == CV_8UC3){
cvtColor(src, tmp, cv::COLOR_RGB2RGBA);
} else if(src.type() == CV_8UC4){
if(needPremultiplyAlpha) cvtColor(src, tmp, cv::COLOR_RGBA2mRGBA);
else src.copyTo(tmp);
}
} else {
// info.format == ANDROID_BITMAP_FORMAT_RGB_565
cv::Mat tmp(info.height, info.width, CV_8UC2, pixels);
if(src.type() == CV_8UC1)
{
cvtColor(src, tmp, cv::COLOR_GRAY2BGR565);
} else if(src.type() == CV_8UC3){
cvtColor(src, tmp, cv::COLOR_RGB2BGR565);
} else if(src.type() == CV_8UC4){
cvtColor(src, tmp, cv::COLOR_RGBA2BGR565);
}
}
AndroidBitmap_unlockPixels(env, bitmap);
return;
} catch(const cv::Exception& e) {
AndroidBitmap_unlockPixels(env, bitmap);
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return;
} catch (...) {
AndroidBitmap_unlockPixels(env, bitmap);
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {nMatToBitmap}");
return;
}
}
The function was directly adopted from the OpenCV sources and contains some extra checks for different formats. You could just strip the checks out if you knew what matrix format you're going to use.
mat_dst holds the results I need and that I need to display on my phone. How can I do that?
You can call something like:
extern "C"
JNIEXPORT void JNICALL
Java_com_your_package_MainActivity_DoStuff(JNIEnv *env, jobject thiz,
jobject bitmap) {
// Do your stuff with mat_dst.
try {
MatToBitmap2(env, mat_dst, bitmap, false);
}
catch(const cv::Exception& e)
{
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
}
}
and define it in the Java side like:
public native void DoStuff(Bitmap bitmap);
You don't need to return anything back to java side as the Bitmap class would be reference type and the MatToBitmap2 method would already take care of locking and unlocking pixels buffer.
Use this to convert your Mat to Bitmap.
jclass java_bitmap_class = (jclass)env->FindClass("android/graphics/Bitmap");
jmethodID mid = env->GetMethodID(java_bitmap_class, "getConfig", "()Landroid/graphics/Bitmap$Config;");
jobject bitmap_config = env->CallObjectMethod(bitmap, mid);
jobject _bitmap = mat_to_bitmap(env,dst,false,bitmap_config);
AndroidBitmap_unlockPixels(env, bitmap);
return _bitmap;
I want to apply the grayscale effect to image using NDK.
For that i have googled a lot but found the same result which returns the image in somewhat like negative(this is what i believe).
What i want ::
For example ::
I have this original image
After applying the grayscale effect it should be like this ::
What i have tried ::
I want to achieve this functionality using NDK,so that i have created one function in .cpp file
JNIEXPORT void JNICALL Java_com_example_ndksampleproject_MainActivity_jniConvertToGray(JNIEnv * env, jobject obj, jobject bitmapcolor,jobject bitmapgray)
{
AndroidBitmapInfo infocolor;
void* pixelscolor;
AndroidBitmapInfo infogray;
void* pixelsgray;
int ret;
int y;
int x;
LOGI("convertToGray");
if ((ret = AndroidBitmap_getInfo(env, bitmapcolor, &infocolor)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
if ((ret = AndroidBitmap_getInfo(env, bitmapgray, &infogray)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGI("color image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infocolor.width,infocolor.height,infocolor.stride,infocolor.format,infocolor.flags);
if (infocolor.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
}
LOGI("gray image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infogray.width,infogray.height,infogray.stride,infogray.format,infogray.flags);
if (infogray.format != ANDROID_BITMAP_FORMAT_A_8) {
LOGE("Bitmap format is not A_8 !");
return;
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapcolor, &pixelscolor)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapgray, &pixelsgray)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
LOGI("unlocking pixels height = %d",infocolor.height);
// modify pixels with image processing algorithm
for (y=0;y<infocolor.height;y++) {
argb * line = (argb *) pixelscolor;
uint8_t * grayline = (uint8_t *) pixelsgray;
for (x=0;x<infocolor.width;x++) {
grayline[x] = 0.3 * line[x].red + 0.59 * line[x].green + 0.11*line[x].blue;
}
pixelscolor = (char *)pixelscolor + infocolor.stride;
pixelsgray = (char *) pixelsgray + infogray.stride;
}
LOGI("unlocking pixels");
AndroidBitmap_unlockPixels(env, bitmapcolor);
AndroidBitmap_unlockPixels(env, bitmapgray);
}
The above function return me the result like this ::
This effect looks like something like negative of image.
Let me know if u need anything from my side..
Please help me to solve this issue as i have stuck into this from many hours.
Many Thanks in Advance...
EDIT ::
floppy12's Suggestion ::
for (y=0;y<infocolor.height;y++) {
argb * line = (argb *) pixelscolor;
uint8_t * grayline = (uint8_t *) pixelsgray;
for (x=0;x<infocolor.width;x++) {
grayline[x] = (255-0.3 * line[x].red) + (255-0.59 * line[x].green) + (255-0.11*line[x].blue)/3;
}
pixelscolor = (char *)pixelscolor + infocolor.stride;
pixelsgray = (char *) pixelsgray + infogray.stride;
}
Output ::
EDIT 2 ::
I have made the some simple modification to the image and it returns me the image what i wanted but the image lost its brightness.
This is the changes that i have made in native function..
for (y=0;y<infocolor.height;y++) {
argb * line = (argb *) pixelscolor;
uint8_t * grayline = (uint8_t *) pixelsgray;
for (x=0;x<infocolor.width;x++) {
grayline[x] = ((255-0.3 * line[x].red) + (255-0.59 * line[x].green) + (255-0.11*line[x].blue))/3;
}
pixelscolor = (char *)pixelscolor + infocolor.stride;
pixelsgray = (char *) pixelsgray + infogray.stride;
}
Result(image is grayscaled but losing its brightness) ::
To obtain an image in grayscale, each pixel should have the same amount of red, green and blue
Maybe use the red component and affect it to both green and blue in your grayline computation
or use the formula (R+G+B)/3 = Gray
Negative images are normally obtained by by shifting each component :
NegR = 255 - grayR
and so on
So you could try to compute grayscal[x] = (255 - 0.3*line[x]) + ...
Edit for brightness:
To obtain better brightness, try to add a fixed amount to your grayscale computation:
G += Bness;
Here it seems that Bness should be negative as long as you are going from 255(black) to 0(white) for some strange reason. You want to put a down limit to not go under 0 for your grascale value, then try :
G = max(0, G+Bness);
I recommend something like Bness = -25
Edit implementation brightness:
// Declare a global variable for your brightness - outside your class
static uint8_t bness = -25;
// In your grayscale computation function
for y...
for x...
grayscale[x] = ( (255-0.3*line[x].red) + ..... ) /3 ;
int16_t gBright = grayscale[x] + bness;
grayscale[x] = MAX( 0, gBright );
I am passing the Bitmap with ARGB_8888 config.
I am able to apply the grayscale effect to the image but after applying that i am losing its brightness.
I have googled a lot but found the same implementation as i have.
Here is my native implmentation ::
JNIEXPORT void JNICALL Java_com_example_ndksampleproject_MainActivity_jniConvertToGray(JNIEnv * env, jobject obj, jobject bitmapcolor,jobject bitmapgray)
{
AndroidBitmapInfo infocolor;
void* pixelscolor;
AndroidBitmapInfo infogray;
void* pixelsgray;
int ret;
int y;
int x;
LOGI("convertToGray");
if ((ret = AndroidBitmap_getInfo(env, bitmapcolor, &infocolor)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
if ((ret = AndroidBitmap_getInfo(env, bitmapgray, &infogray)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGI("color image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infocolor.width,infocolor.height,infocolor.stride,infocolor.format,infocolor.flags);
if (infocolor.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
}
LOGI("gray image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infogray.width,infogray.height,infogray.stride,infogray.format,infogray.flags);
if (infogray.format != ANDROID_BITMAP_FORMAT_A_8) {
LOGE("Bitmap format is not A_8 !");
return;
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapcolor, &pixelscolor)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapgray, &pixelsgray)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
LOGI("unlocking pixels height = %d",infocolor.height);
// modify pixels with image processing algorithm
for (y=0;y<infocolor.height;y++) {
argb * line = (argb *) pixelscolor;
uint8_t * grayline = (uint8_t *) pixelsgray;
for (x=0;x<infocolor.width;x++) {
grayline[x] = ((255-0.3 * line[x].red) + (255-0.59 * line[x].green) + (255-0.11*line[x].blue))/3;
}
pixelscolor = (char *)pixelscolor + infocolor.stride;
pixelsgray = (char *) pixelsgray + infogray.stride;
}
LOGI("unlocking pixels");
AndroidBitmap_unlockPixels(env, bitmapcolor);
AndroidBitmap_unlockPixels(env, bitmapgray);
}
Result ::
Please let me know if you need anything from my side..
Please help me to get rid of this issue as i am stuck into this from many hours.
Many thanks in Advance !!!
EDIT ::
After applying the Mark Setchell's Suggestion ::
EDITED
If you invert the image above, you get this - which looks correct to me:
Don't divide by 3 on the line where you calculate grayline[x]. Your answer is already correctly weighted because 0.3 + 0.59 + 0.11 = 1
grayline[x] = (255-0.3 * line[x].red) + (255-0.59 * line[x].green) + (255-0.11*line[x].blue);
There are two problems with your current code.
1) As mentioned by others, do not divide the final result by three. If you were calculating grayscale using the average method (e.g. gray = (R + G + B) / 3), the division would be necessary. For the ITU conversion formula you are using, there is no need for this extra division, because the fractional amounts already sum to 1.
2) The inversion occurs because you are subtracting each color value from 255. There is no need to do this.
The correct grayscale conversion code for your current formula would be:
grayline[x] = ((0.3 * line[x].red) + (0.59 * line[x].green) + (0.11*line[x].blue));
i am trying to do real time image processing in android using jni. I have a native method to decode image data and i call this method for every frame. After a few seconds later i get out of memory and my app terminats.
LOG OUTPUT:
12-03 20:54:19.780: E/dalvikvm-heap(8119): Out of memory on a 3686416-byte allocation.
MY NATIVE METHOD:
JNIEXPORT jintArray JNICALL Java_net_oyunyazar_arcc_data_FrameManager_processImage(JNIEnv* env, jobject javaThis, jint width, jint height, jbyteArray arr) {
jint *convertedData;
convertedData = (jint*)malloc((width*height) * sizeof(jint));
jintArray result = (*env)->NewIntArray(env, width*height);
jint y,x;
jbyte grey;
jsize len = (*env)->GetArrayLength(env, arr);
jbyte *YUVData = (*env)->GetByteArrayElements(env, arr, 0);
for (y = 0; y < height; y++){
for (x = 0; x < width; x++){
grey = YUVData[y * width + x];
convertedData[y*width+x] =(jint) grey & 0xff;
}
}
LOGD("Random [%d]",len);
(*env)->SetIntArrayRegion(env, result, 0, (width*height),convertedData );
free(convertedData);
(*env)->ReleaseByteArrayElements(env, YUVData, (jbyte*)arr, 0);
return result;
}
Thanks for any help.
I have the same problem as yours.
In your specific case, while you are using pixel (and probably bitmap) you can send a bitmap instead of your bytearray and modify it
void *pixel_bm;
int retValue;
AndroidBitmapInfo info;
if ((retValue = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) return 0;
if ((retValue = AndroidBitmap_lockPixels(env, bitmap, &pixel_bm)) < 0) return 0;
// you can now read an write into pixel_bm
AndroidBitmap_unlockPixels(env, bitmap);
If you find a solution to correctly free a GetByteArrayElement result, I'm instrested by the solution !!!
I have solved this problem by releasing the parameters.
(*env)->ReleaseByteArrayElements(env, arr, YUVData, 0);
It works great now.
I have an app built in Android 2.2 and I'm using inPreferredConfig() to switch a bitmap to ARGB_8888 format, however, this doesn't seem to work as when checked immediately afterwards the bitmap is still in RGB_565 format. I've tried changing it to any of the other formats and neither of those work either.
The function works fine if the phone or emulator is running Android 2.2, but anything above that fails. Does anyone know why this is happening? Is inPreferredConfig() depreciated in later Android versions?
What I'm doing:
I'm using the NDK with some C code I've found to run some image processing functions (taken from http://www.ibm.com/developerworks/opensource/tutorials/os-androidndk/section5.html). The C code expects the image format to be in ARGB_8888 and although the Android documentation says that the format should already be in 8888 by default but it's definitely in 565 so I'm very confused.
I'm guessing I could convert it in C...but I'm terrible at C so I wouldn't know where to start.
My C function:
{
AndroidBitmapInfo infocolor;
void* pixelscolor;
AndroidBitmapInfo infogray;
void* pixelsgray;
int ret;
int y;
int x;
LOGI("convertToGray");
if ((ret = AndroidBitmap_getInfo(env, bitmapcolor, &infocolor)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
if ((ret = AndroidBitmap_getInfo(env, bitmapgray, &infogray)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGI("color image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infocolor.width,infocolor.height,infocolor.stride,infocolor.format,infocolor.flags);
if (infocolor.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Bitmap format is not RGBA_8888 !");
return;
}
LOGI("gray image :: width is %d; height is %d; stride is %d; format is %d;flags is %d",infogray.width,infogray.height,infogray.stride,infogray.format,infogray.flags);
if (infogray.format != ANDROID_BITMAP_FORMAT_A_8) {
LOGE("Bitmap format is not A_8 !");
return;
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapcolor, &pixelscolor)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
if ((ret = AndroidBitmap_lockPixels(env, bitmapgray, &pixelsgray)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
// modify pixels with image processing algorithm
for (y=0;y<infocolor.height;y++) {
argb * line = (argb *) pixelscolor;
uint8_t * grayline = (uint8_t *) pixelsgray;
for (x=0;x<infocolor.width;x++) {
grayline[x] = 0.3 * line[x].red + 0.59 * line[x].green + 0.11*line[x].blue;
}
pixelscolor = (char *)pixelscolor + infocolor.stride;
pixelsgray = (char *) pixelsgray + infogray.stride;
}
LOGI("unlocking pixels");
AndroidBitmap_unlockPixels(env, bitmapcolor);
AndroidBitmap_unlockPixels(env, bitmapgray);
}
My Java functions:
// load bitmap from resources
BitmapFactory.Options options = new BitmapFactory.Options();
// Make sure it is 24 bit color as our image processing algorithm expects this format
options.inPreferredConfig = Config.ARGB_8888;
bitmapOrig = BitmapFactory.decodeResource(this.getResources(), R.drawable.sampleimage,options);
if (bitmapOrig != null)
ivDisplay.setImageBitmap(bitmapOrig);
-
bitmapWip = Bitmap.createBitmap(bitmapOrig.getWidth(),bitmapOrig.getHeight(),Config.ALPHA_8);
convertToGray(bitmapOrig,bitmapWip);
ivDisplay.setImageBitmap(bitmapWip);
Thanks, N
P.S My last question of the same subject got deleted, which is annoying as I can't find any answers to this anywhere.
Images are loaded with the ARGB_8888 config by default, according to the documentation, so my guess is that it recognizes the RGB_565 format of your bitmap and changes the config for that. I don't see why this should be a problem if the original image is of RGB_565 format and has no transparency.
Here's the documentation - read the last bit:
If this is non-null, the decoder will try to decode into this internal configuration. If it is null, or the request cannot be met, the decoder will try to pick the best matching config based on the system's screen depth, and characteristics of the original image such as if it has per-pixel alpha (requiring a config that also does). Image are loaded with the ARGB_8888 config by default.
http://developer.android.com/reference/android/graphics/BitmapFactory.Options.html#inPreferredConfig
This is old, but so far not satisfactory answered:
Just ran into the same problem the other day.
So far I couldn't solve it and I'm considering writing a converter. Since I'm using OpenCV and they don't support the 565 format.
Saving the image and loading it again with different configuration works, but unfortunately for a real time camera application this is not feasible.
Have a look at this code:
How does one convert 16-bit RGB565 to 24-bit RGB888?