Is converting float to matrix and matrix to bitmap possible in android? - android

I already converted the data from onPreviewFrame to rgb which is an int array. Gaussian Blur uses float/double values and I wasn't able to find methods for converting float/double values to bitmap.
The only way I have in my mind is convert these float values into Matrix and then into a bitmap. But i am not really sure if this is possible.
public Bitmap getProcessedImage(byte[] image){
Bitmap bmp = null;
int rgb[] = new int[image.length];
decodeYUV420SP(rgb, image);
convertToGrayscale(rgb);
applyGaussianBlur(rgb);
bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(IntBuffer.wrap(rgb));
return bmp;
}
public void convertToGrayscale(int rgb[]){
for(int i = 0 ; i < rgb.length;i++){
int R = (rgb[i] >> 16) & 0xff;
int G = (rgb[i] >> 8) & 0xff;
int B = rgb[i] & 0xff;
int gray = (R + G + B )/ 3 ;
rgb[i] = 0xFF000000 | (gray << 16) | (gray << 8) | gray;
}
}
//Method from Ketai project!
void decodeYUV420SP(int[] rgb, byte[] yuv420sp) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)r = 0;else if (r > 262143)r = 262143;
if (g < 0)g = 0;else if (g > 262143)g = 262143;
if (b < 0)b = 0;else if (b > 262143)b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}
public void applyGaussianBlur(int rgb[]){
int kernelWidth = 3, kernelHeight = 3;
float darray[][] = new float[width][height];
float matrix[][] = new float[kernelWidth][kernelHeight];
float sigma = 3.0f;
float sum = 0;
for(int j = 0, ct = 0; j < width;j++){
for(int m = 0; m < height;m++, ct++){
darray[j][m] = rgb[ct];
}
}
for(int x = -1; x < kernelWidth-1;x++){
for(int y = -1; y < kernelHeight-1; y++){
matrix[x+1][y+1] = (float)((1/(2*Math.PI*Math.pow(sigma, 2))) * Math.exp(-(Math.pow(x, 2)+Math.pow(y, 2))/(2*Math.pow(sigma, 2))));
sum+=matrix[x+1][y+1];
}
}
for(int x = -1; x < kernelWidth-1;x++){
for (int y = -1; y < kernelHeight-1; y++) {
matrix[x+1][y+1] = matrix[x+1][y+1]/sum;
}
}
for (int j = 1; j < width-1; j++){
for (int m = 1; m < height-1;m++){
darray[j-1][m-1] = (darray[j-1][m-1] * matrix[0][0]);
darray[j-1][m] = (darray[j-1][m] * matrix[0][1]);
darray[j-1][m+1] = (darray[j-1][m+1] * matrix[0][2]);
darray[j][m-1] = (darray[j][m-1] * matrix[1][0]);
darray[j][m] = (darray[j][m] * matrix[1][1]);
darray[j][m+1] = (darray[j][m+1] * matrix[1][2]);
darray[j+1][m-1] = (darray[j+1][m-1] * matrix[2][0]);
darray[j+1][m] = (darray[j+1][m] * matrix[2][1]);
darray[j+1][m+1] = (darray[j+1][m+1] * matrix[2][2]);
}
}
for(int j = 0, ct = 0; j < width;j++){
for(int m = 0; m < height;m++, ct++){
rgb[ct] = Math.round(darray[j][m]);
}
}
}

Related

How do I write rgb2yuv based on the existing yuv2rgb method?

I took over an Android project, which already has a yuv2rgb method. I have to write this method rgb2yuv. Please help me, thank you!
public static int[] yuv2rgb(byte[] pYUV, int width, int height) {
int[] pRGB = new int[width * height];
int i, j, yp;
int hfWidth = width >> 1;
int size = width * height;
int qtrSize = size >> 2;
for (i = 0, yp = 0; i < height; i++) {
int uvp = size + (i >> 1) * hfWidth, u = 0, v = 0;
for (j = 0; j < width; j++, yp++) {
int y = (0xff & pYUV[yp]) - 16;
if ((j & 1) == 0) {
u = (0xff & pYUV[uvp + (j >> 1)]) - 128;
v = (0xff & pYUV[uvp + qtrSize + (j >> 1)]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0;
else if (r > 262143) r = 262143;
if (g < 0) g = 0;
else if (g > 262143) g = 262143;
if (b < 0) b = 0;
else if (b > 262143) b = 262143;
pRGB[i * width + j] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
return pRGB;
}
My colleague helped me find a way,it's work!
public static byte[] colorconvertRGB_IYUV_I420(int[] aRGB, int width, int height) {
final int frameSize = width * height;
final int chromasize = frameSize / 4;
int yIndex = 0;
int uIndex = frameSize;
int vIndex = frameSize + chromasize;
byte[] yuv = new byte[width * height * 3 / 2];
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
//a = (aRGB[index] & 0xff000000) >> 24; //not using it right now
R = (aRGB[index] & 0xff0000) >> 16;
G = (aRGB[index] & 0xff00) >> 8;
B = (aRGB[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv[uIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv[vIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
return yuv;
}

Convert YV12 (yuv420p) to JPEG / Bitamp

In onPreviewFrame using android.hardware.Camera, if I use the default NV21 format, I can use YuvImage to compress to jpeg format, which works great. If I try to change the format using setPreviewFormat(ImageFormat.YV12), then it does not work anymore as YuvImage does not support YV12 format. I've found only one solution somewhere to convert Bitmap to YV12, but I want to do the opposite and get a jpeg out of these bytes. Is there a library to do this?
If YUV420 to JPEG is what you're looking for then,
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
ByteArrayOutputStream out= new ByteArrayOutputStream();
decodeYUV420(pixels, data, previewSize.width, previewSize.height);
mBitmap = Bitmap.createBitmap(pixels, previewSize.width, previewSize.height,Config.ARGB_8888);
mBitmap.compress(CompressFormat.JPEG , 25, out);
.......
where, the decodeYUV420 method goes as follows:
public void decodeYUV420(int[] rgb, byte[] yuv420, int width, int height) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420[uvp++]) - 128;
u = (0xff & yuv420[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
I've created a similar project to this on github check here
and the code implementation: here
and yes it works!

How to detect average pixel intensity from live camera preview?

I am building a scanner app, and trying to determine the "preview quality" from the preview callback of the camera. I want to customize the camera's AUTO_FLASH_MODE where it will be turned on if the environment is too dark.
How can I detect if there is a high average of dark pixels? This means (in preview) I am getting darkness and therefore need to turn on the camera's flash light.
Either find out how to access pixel values of your image and calculate the average intensity yourself or use any image processing library to do so.
Dark pixels have low values, bright pixels have high values.
You want to calculate the average of all red, green and blue values divided by three times your pixel count.
Define a threshold for when to turn on the flash, but keep in mind that you have to get a new exposure time then.
Prefer flash over exposure time increase as long exposure times yield higher image noise.
I tried this approach but i think it is taking unnecessary time of processing the bitmap and then get an average screen color,
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Size cameraResolution = resolution;
PreviewCallback callback = this.callback;
if (cameraResolution != null && callback != null)
{
int format = camera.getParameters().getPreviewFormat();
SourceData source = new SourceData(data, cameraResolution.width, cameraResolution.height, format, getCameraRotation());
callback.onPreview(source);
final int[] rgb = decodeYUV420SP(data, cameraResolution.width, cameraResolution.height);
//Bitmap bmp = decodeBitmap(source.getData());
Bitmap bmp = Bitmap.createBitmap(rgb, cameraResolution.width, cameraResolution.height, Bitmap.Config.ARGB_8888);
if (bmp != null)
{
//bmp = decodeBitmap(source.getData());
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
// bmp.compress(Bitmap.CompressFormat.JPEG, 70, bytes);
Bitmap resizebitmap = Bitmap.createBitmap(bmp,
bmp.getWidth() / 2, bmp.getHeight() / 2, 60, 60);
int color = getAverageColor(resizebitmap);
Log.i("Color Int", color + "");
// int color = resizebitmap.getPixel(resizebitmap.getWidth()/2,resizebitmap.getHeight()/2);
String strColor = String.format("#%06X", 0xFFFFFF & color);
//String colorname = sColorNameMap.get(strColor);
Log.d("strColor", strColor);
Log.i("strColor", color + "");
if(!mIsOn)
{
if (color == -16777216 || color < -16777216)//minimum color code (full dark)
{
mIsOn = true;
setTorch(true);
Log.d("Yahooooo", "" + color);
}
}
Log.i("Pixel Value",
"Top Left pixel: " + Integer.toHexString(color));
}
}
else
{
Log.d(TAG, "Got preview callback, but no handler or resolution available");
}
}
}
private int[] decodeYUV420SP(byte[] yuv420sp, int width, int height)
{
final int frameSize = width * height;
int rgb[]=new int[width*height];
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) &
0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
private int getAverageColor(Bitmap bitmap)
{
int redBucket = 0;
int greenBucket = 0;
int blueBucket = 0;
int pixelCount = 0;
for (int y = 0; y < bitmap.getHeight(); y++) {
for (int x = 0; x < bitmap.getWidth(); x++) {
int c = bitmap.getPixel(x, y);
pixelCount++;
redBucket += Color.red(c);
greenBucket += Color.green(c);
blueBucket += Color.blue(c);
// does alpha matter?
}
}
int averageColor = Color.rgb(redBucket / pixelCount, greenBucket
/ pixelCount, blueBucket / pixelCount);
return averageColor;
}

How Can i read the preview frames from the actual opensource android Camera

How can i read the preview frames from the actual camera source code? I am trying to modify the actual source code of camera application in android for reading the preview frames. Can anyone help me with this.
You shouldn't have to change the actual source code for reading frames. Implementing the Camera.PreviewCallback interface should suffice. It returns raw data from the camera.
Before messing with the source of the camera app try the example here:
CameraPreview
Then implement the Camera.PreviewCallback.
The captured frame comes in YUV420SP so you have to convert it to rgb in order to convert it into a colored bitmap and show it on screen.
Like this :
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int imageWidth = camera.getParameters().getPreviewSize().width ;
int imageHeight =camera.getParameters().getPreviewSize().height ;
int RGBData[] = new int[imageWidth* imageHeight];
byte[] mYUVData = new byte[data.length];
System.arraycopy(data, 0, mYUVData, 0, data.length);
decodeYUV420SP(RGBData, mYUVData, imageWidth, imageHeight);
Bitmap bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ARGB_8888);
bitmap.setPixels(RGBData, 0, imageWidth, 0, 0, imageWidth, imageHeight);
}
static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}

YUV decode function error or hardware problem?

I get the YUV to RGB function 1&2 (from stack overflow)
but the result is wrong like this http://163.18.62.32/device.jpg
I dont understand what's wrong in the step
my device is Moto Milestone with 2.1 update 1
function 1
public int[] decodeYUV420SP( byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int rgb[]=new int[width*height];
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) &
0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
function 2
public void decodeYUV(int[] out, byte[] fg, int width, int height)
throws NullPointerException, IllegalArgumentException {
int sz = width * height;
if (out == null)
throw new NullPointerException("buffer out is null");
if (out.length < sz)
throw new IllegalArgumentException("buffer out size " + out.length
+ " < minimum " + sz);
if (fg == null)
throw new NullPointerException("buffer 'fg' is null");
if (fg.length < sz)
throw new IllegalArgumentException("buffer fg size " + fg.length
+ " < minimum " + sz * 3 / 2);
int i, j;
int Y, Cr = 0, Cb = 0;
for (j = 0; j < height; j++) {
int pixPtr = j * width;
final int jDiv2 = j >> 1;
for (i = 0; i < width; i++) {
Y = fg[pixPtr];
if (Y < 0)
Y += 255;
if ((i & 0x1) != 1) {
final int cOff = sz + jDiv2 * width + (i >> 1) * 2;
Cb = fg[cOff];
if (Cb < 0)
Cb += 127;
else
Cb -= 128;
Cr = fg[cOff + 1];
if (Cr < 0)
Cr += 127;
else
Cr -= 128;
}
int R = Y + Cr + (Cr >> 2) + (Cr >> 3) + (Cr >> 5);
if (R < 0)
R = 0;
else if (R > 255)
R = 255;
int G = Y - (Cb >> 2) + (Cb >> 4) + (Cb >> 5) - (Cr >> 1)
+ (Cr >> 3) + (Cr >> 4) + (Cr >> 5);
if (G < 0)
G = 0;
else if (G > 255)
G = 255;
int B = Y + Cb + (Cb >> 1) + (Cb >> 2) + (Cb >> 6);
if (B < 0)
B = 0;
else if (B > 255)
B = 255;
out[pixPtr++] = 0xff000000 + (B << 16) + (G << 8) + R;
}
}
}
and i use them like this
function 1
int[] rgbBuf =decodeYUV420SP(_data,height,width);
function 2
int[] rgbBuf = new int[height*width];
decodeYUV(rgbBuf,_height,.width);
than convert to Bitmap and show on
Bitmap bm = Bitmap.createBitmap(rgbBuf,width,height);
View01.setImageBitmap(bm);
As far as I see there is no Bitmap.createBitmap(.....);
with following set of arguments (byte[] inVal, width, height)
The signatures on both decode methods have width then height.
Your calls seem to have the params flipped.
public int[] decodeYUV420SP( byte[] yuv420sp, int width, int height)
but you call:
decodeYUV420SP(_data,height,width);
and
public void decodeYUV(int[] out, byte[] fg, int width, int height)
but you call:
decodeYUV(rgbBuf,_height,.width);

Categories

Resources