need a help to fix these line of code to find brightest pixel and its coordinates (x,y) and draw a bitmap over it.
i have android preview live camera and want to scan the screen my process code is within onPreviewFframe by doing for loop and comparing preset float brightestValue to every value of pixels[]. my canvas staticaly draw circle in top left of the screen.
i expect dinamic change of circle position on the camera surfaceview while change the direction of the camera. sorry if my english is bad grammatically
thank you
public class CameraDemo extends Activity {
private static final String TAG = "CameraDemo";
Preview preview;
int brightestX = 0; // X-coordinate of the brightest video pixel
int brightestY = 0; // Y-coordinate of the brightest video pixel
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Paint p = new Paint ();
p.setColor(Color.WHITE);
Bitmap bg = Bitmap.createBitmap(480,800,Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bg);
canvas.drawCircle(brightestX,brightestY,20,p);
preview = new Preview(this);
preview.setBackground(new BitmapDrawable(bg));
((FrameLayout) findViewById(R.id.preview)).addView(preview);
}
class Preview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
SurfaceHolder mHolder;
Camera mCamera;
private Camera.Parameters parameters;
private Camera.Size previewSize;
private int[] pixels;
Preview(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
mCamera = Camera.open();
try {
mCamera.startPreview();
mCamera.setPreviewDisplay(holder);
mCamera.setDisplayOrientation(90);
mCamera.setPreviewCallbackWithBuffer(this);
parameters = mCamera.getParameters();
previewSize = parameters.getPreviewSize();
} catch (IOException exception) {
mCamera.release();
mCamera = null;
// TODO: add more exception handling logic here
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.release();
mCamera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
parameters.setPreviewSize(w, h);
//set the camera's settings
mCamera.setParameters(parameters);
mCamera.startPreview();
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
//transforms NV21 pixel data into RGB pixels
decodeYUV420SP(pixels, data, previewSize.width, previewSize.height);
float brightestValue = 0; // Brightness of the brightest video pixel
for (int y = 0; y < previewSize.height ; y++) {
for (int x = 0; x < previewSize.width; x++) {
// Get the color stored in the pixel
float pixelBrightness = pixels[previewSize.width*previewSize.height];
// If that value is brighter than any previous, then store the
// brightness of that pixel, as well as its (x,y) location
if (pixelBrightness > brightestValue) {
brightestValue = pixelBrightness;
brightestY = y;
brightestX = x;
}
}
}
}
}
//Method from Ketai project! Not mine! See below...
void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143)
r = 262143;
if (g < 0) g = 0; else if (g > 262143)
g = 262143;
if (b < 0) b = 0; else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}
}
You need to call canvas.drawCircle(brightestX,brightestY,20,p); after updating the position in the onPreviewFrame method, so the code looks like this:
if (pixelBrightness > brightestValue) {
brightestValue = pixelBrightness;
brightestY = y;
brightestX = x;
drawCircleOnNewPosition() // method that calls canvas.drawCircle
}
Related
This question has been asked many times already but i cannot seem to find what i want exactly.
I am trying to create a camera app where I want to display the YUV or RGB value into the log when I point my camera to some color. The values must me 0...255 range for RGB or correspondent YUV color format. I can manage the conversion between them as there are many such examples on stack overflow. However, i cannot store the values into 3 separate variables and display them in the log.
so far i have managed to get
package com.example.virus.bpreader;
public class CaptureVideo extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
private int[] pixels;
public CaptureVideo(Context context, Camera cameraManager) {
super(context);
mCamera = cameraManager;
mCamera.setDisplayOrientation(90);
//get holder and set the class as callback
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
//when the surface is created, let the camera start the preview
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
mCamera.cancelAutoFocus();
Camera.Parameters params = mCamera.getParameters();
//get fps
params.getSupportedPreviewFpsRange();
//get resolution
params.getSupportedPreviewSizes();
//stop auto exposure
params.setAutoExposureLock(false);
// Check what resolutions are supported by your camera
List<Camera.Size> sizes = params.getSupportedPictureSizes();
// Iterate through all available resolutions and choose one
for (Camera.Size size : sizes) {
Log.i("Resolution", "Available resolution: " + size.width + " " + size.height);
}
//set resolution at 320*240
params.setPreviewSize(320,240);
//set frame rate at 10 fps
List<int[]> frameRates = params.getSupportedPreviewFpsRange();
int last = frameRates.size() - 1;
params.setPreviewFpsRange(10000, 10000);
//set Image Format
//params.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(params);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//need to stop the preview and restart on orientation change
if (mHolder.getSurface() == null) {
return;
}
//stop preview
mCamera.stopPreview();
//start again
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
//stop and release
mCamera.startPreview();
mCamera.release();
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
Log.d("myPixel", String.valueOf(myPixels.length));
}
//yuv decode
int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int r, g, b, y1192, y, i, uvp, u, v;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
// above answer is wrong at the following lines. just swap ***u*** and ***v***
u = (0xff & yuv420sp[uvp++]) - 128;
v = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
r = Math.max(0, Math.min(r, 262143));
g = Math.max(0, Math.min(g, 262143));
b = Math.max(0, Math.min(b, 262143));
// combine RGB
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) | 0xff);
}
}
return rgb;
}
}
Here the decode method should give a hex format of RGB color space (quite sure its all right as most of the answers are the same). The problem i am facing is when i am not quite sure how to call it inside the OnPreviewFrame method so that it displays the RGB values separately into the log.
N.B like i said i have seen lot of similar questions but could not find a solution to it. I do not want to store the file (image/video) as i only need the RGB/YUV values from the live camera preview when i point the camera to some color.
I need the rgb or yuv values because i want to plot a graph out of it against time.
Any help will be much appreciated.
Well if the problem is to get separate values of R, G And B from the RGB array, check this SO post here.
Hope it helps!
i'm developing an Android app that gets the color of a current pixel on the screen
the appication concept is very simple the CameraPreview class implements the SurfcaeHolder.Callback interface
and onSurfcaeChanged gets the color
my problem is the code works just fine on the samsung6s but it doesn't work on the nuxus6p the callbacks surfaceCreated and surfaceChanged never called
here is a sample of my code
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
String TAG = "TAG";
Context context;
OnColorDetected onColorDetected;
ColorDetectionPresenter presenter;
Camera.Size previewSize;
int midPixxel;
public CameraPreview(Context context, Camera camera, ColorDetectionPresenter presenter) {
super(context);
mCamera = camera;
this.context = context;
this.presenter = presenter;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setOnColorDetected(OnColorDetected onColorDetected) {
this.onColorDetected = onColorDetected;
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
Log.d("SURFACE_STATE","created");
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
for (int i = 0; i < myPixels.length; i++) {
//Toast.makeText(context, MainActivity.getBestMatchingColorName(myPixels[i]), Toast.LENGTH_SHORT).show();
}
}
});
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
Log.d("SURFACE_STATE","changed");
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
Camera.Parameters parameters = mCamera.getParameters();
mCamera.setPreviewDisplay(mHolder);
List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
if (previewSizes == null)
previewSizes = parameters.getSupportedPictureSizes();
// You need to choose the most appropriate previewSize for your
previewSize = previewSizes.get(0);
for(int i=0;i<previewSizes.size();i++){
if(previewSizes.get(i).width>previewSize.width)
previewSize=previewSizes.get(i);
}
// .... select one of previewSizes here
cameraSetup(width, height);
parameters.setPictureSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
String colorName = presenter.getBestMatchingColorName(myPixels[myPixels.length / 2]);
onColorDetected.colorDetected(colorName);
}
});
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
// Pulled directly from:
// http://ketai.googlecode.com/svn/trunk/ketai/src/edu/uic/ketai/inputService/KetaiCamera.java
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
private void cameraSetup(int w, int h) {
// set the camera parameters, including the preview size
FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) getLayoutParams();
double cameraAspectRatio = ((double) previewSize.width) / previewSize.height;
if (((double) h) / w > cameraAspectRatio) {
lp.width = (int) (h / cameraAspectRatio + 0.5);
lp.height = h;
} else {
lp.height = (int) (w * cameraAspectRatio + 0.5);
lp.width = w;
lp.topMargin = (h - lp.height) / 2;
}
lp.gravity = Gravity.CENTER_HORIZONTAL | Gravity.TOP;
setLayoutParams(lp);
requestLayout();
}
}
and this is how i call it
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
colorDetectionPresenter = new ColorDetectionPresenter();
camera = Camera.open();
cameraPreview = new CameraPreview(getActivity(), camera, colorDetectionPresenter);
params = camera.getParameters();
params.setPreviewFormat(ImageFormat.NV21);
camera.setDisplayOrientation(90);
siz = params.getSupportedPreviewSizes().get(0);
params.setPreviewSize(siz.width, siz.height);
}
i have tried initializing the camera preview in the onCreate, onCreateViewand onResume none works
any suggestions ?
it only doesn't work on my nexus6p
I am writing a android app about camera.
The function I want to implement is let preview preserving red color on screen,
the other part on screen be grayscale.
Like this picture
I use the particular scalar multiply R, G and B component to attach grayscale
and preserve red part of preview after changeing raw data format to RGB.
but after I override onDraw(Canvas canvas) method in my custom View,
the result is like blue filter effect
Can anybody give some hint to let me know which step I am wrong.
THANK YOU.
The code of my custom View is below
package com.example.macampreviewdemo;
public class CamPreviewDemoActivity extends Activity {
/** Called when the activity is first created. */
int h, w;
public String tag = "tag";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(tag, "onCreate");
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
h = metrics.heightPixels;
w = metrics.widthPixels;
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
Display display = ((WindowManager)
getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
setContentView(R.layout.main);
setRequestedOrientation(0);
ViewToDraw dtw = (ViewToDraw) findViewById(R.id.vtd);
CameraView cameraView = new CameraView(this, dtw, w, h);
((FrameLayout) findViewById(R.id.preview)).addView(cameraView);
}
}
And this:
package com.example.macampreviewdemo;
public class ViewToDraw extends View{
public String tag = "tag";
public byte[] image;
public boolean isCameraSet = false;
public int imgWidth, imgHeight;
Bitmap overlayBitmap;
Matrix matrix;
public ViewToDraw(Context context, AttributeSet attrs) {
super(context, attrs);
matrix = new Matrix();
}
public void cameraSet(){
isCameraSet = true;
}
public void putImage(byte[] img){
image = img;
}
#Override
protected void onDraw(Canvas canvas){
Log.i(tag, "onDraw() ");
int size = imgWidth * imgHeight;
int[] rgb = new int[imgWidth * imgHeight];
if(isCameraSet){
rgb = convertYUV420_NV21toARGB8888(image, imgWidth, imgHeight);
for (int k = 0; k < size; k++) {
if(Color.red(rgb[k]) == 255 &&
Color.green(rgb[k]) == 0 &&
Color.blue(rgb[k]) == 50){}
else{
rgb[k] = (int) (
(0.2126 * Color.red(rgb[k])) +
(0.7152 * Color.green(rgb[k])) +
(0.0722 * Color.blue(rgb[k]))
);
}
}
Log.i("tag", "rgb length = " + rgb.length);
overlayBitmap =
Bitmap.createBitmap(rgb, 0, imgWidth,
imgWidth, imgHeight,
Bitmap.Config.RGB_565);
canvas.drawBitmap(overlayBitmap, matrix, null);
overlayBitmap.recycle();
}
}
static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int rtmp, gtmp, btmp;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)r = 0;
else if (r > 262143)r = 262143;
if (g < 0)g = 0;
else if (g > 262143)g = 262143;
if (b < 0)b = 0;
else if (b > 262143)b = 262143;
rtmp = ((r << 6) & 0xff0000);
gtmp = ((g >> 2) & 0xff00);
btmp = ((b >> 10) & 0xff);
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00)
| ((b >> 10) & 0xff);
}
}
}
int[] yuv420ToGrayScale(byte[] yuv420, int width, int height){
int size = width * height;
int y1, y2, y3, y4;
int[] pixel = new int[size];
for (int i = 0; i < size; i+=2) {
y1 = yuv420[i]&0xff;;
y2 = yuv420[i + 1]&0xff;;
y3 = yuv420[i + width]&0xff;;
y4 = yuv420[i + width +1]&0xff;;
pixel[i] = yuv420[i];
pixel[i + 1] = yuv420[i +1];
pixel[i + width ] = yuv420[width + i];
pixel[i + width + 1] = yuv420[i + width + 1];
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixel;
}
/**
* Converts YUV420 NV21 to ARGB8888
*
* #param data byte array on YUV420 NV21 format.
* #param width pixels width
* #param height pixels height
* #return a ARGB8888 pixels int array. Where each int is a pixels ARGB.
*/
public static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height) {
int size = width*height;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
// i along Y and the final pixels
// k along pixels U and V
for(int i=0, k=0; i < size; i+=2, k+=1) {
y1 = data[i ]&0xff;
y2 = data[i+1]&0xff;
y3 = data[width+i ]&0xff;
y4 = data[width+i+1]&0xff;
v = data[offset+k ]&0xff;
u = data[offset+k+1]&0xff;
v = v-128;
u = u-128;
pixels[i ] = convertYUVtoARGB(y1, u, v);
pixels[i+1] = convertYUVtoARGB(y2, u, v);
pixels[width+i ] = convertYUVtoARGB(y3, u, v);
pixels[width+i+1] = convertYUVtoARGB(y4, u, v);
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixels;
}
private static int convertYUVtoARGB(int y, int u, int v) {
int r,g,b;
r = y + (int)(1.402f*u);
g = y - (int)(0.344f*v + 0.714f*u);
b = y + (int)(1.772f*v);
r = r>255? 255 : r<0 ? 0 : r;
g = g>255? 255 : g<0 ? 0 : g;
b = b>255? 255 : b<0 ? 0 : b;
return 0xff000000 | (r<<16) | (g<<8) | b;
}
public static void myNV21ToRGB(int width, int height){
int yy, u, v;
int frame_size = width * height;
}
}
And this:
package com.example.macampreviewdemo;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
public Camera mycamera;
List<Camera.Size> cameraSize;
private SurfaceHolder mHolder;
public ViewToDraw vtd;
int pickedH, pickedW;
int defaultH, defaultW;
public String tag = "tag";
public CameraView(Context context, ViewToDraw _vtd, int width, int height) {
super(context);
// TODO Auto-generated constructor stub
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
vtd = _vtd;
defaultH = height;
defaultW = width;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i("tag"," surfaceCreated");
int i;
mycamera = Camera.open();
cameraSize = mycamera.getParameters().getSupportedPreviewSizes();
if(cameraSize != null){
// pick resolution
pickedH = defaultH;
pickedW = defaultW;
for(i=0;i<cameraSize.size();i++){
if(cameraSize.get(i).width < defaultW){
break;
}else{
pickedH = cameraSize.get(i).height;
pickedW = cameraSize.get(i).width;
}
}
}else{
Log.e("tag","null");
};
try {
mycamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i("tag","surfaceChanged");
Camera.Parameters parameters = mycamera.getParameters();
parameters.setPreviewSize(pickedW, pickedH);
mycamera.setParameters(parameters);
//create buffer
PixelFormat p = new PixelFormat();
PixelFormat.getPixelFormatInfo(parameters.getPreviewFormat(),p);
int bufSize = (pickedW*pickedH*p.bitsPerPixel)/8;
//add buffers
byte[] buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
mycamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i("tag", "onPreviewFrame");
Log.i("tag", "pickedH = " + pickedH);
Log.i("tag", "pickedW = " + pickedW);
vtd.putImage(data);
vtd.cameraSet();
vtd.imgHeight = pickedH;
vtd.imgWidth = pickedW;
vtd.invalidate();
mycamera.addCallbackBuffer(data);
}
});
mycamera.startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i("tag", "surfaceDestroyed");
mycamera.setPreviewCallback(null);
mycamera.release();
mycamera = null;
}
}
And this:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="#dimen/activity_vertical_margin"
android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
tools:context=".CamPreviewDemoActivity" >
<FrameLayout android:id="#+id/FrameLayout01"
android:layout_height="fill_parent" android:layout_width="fill_parent">
<FrameLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/preview">
</FrameLayout>
<com.example.macampreviewdemo.ViewToDraw
android:id="#+id/vtd"
android:layout_height="fill_parent"
android:layout_width="fill_parent"/>
</FrameLayout>
</RelativeLayout>
I have created a demo application which opens the camera.now I want to get the color of pixel when user touches the live camera preview.
I have tried by overridingonTouchEvent and I succeed in getting the position of pixel in x,y but I am not getting the RGB color value from it.it always shows 0,0,0
All suggestions are welcomed including any alternate way to achieve the same functionality. [Excluding OpenCv because it requires to install OpenCvManager apk also to support my application]
Code :
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private Camera camera;
private SurfaceHolder holder;
int[] myPixels;
public CameraPreview(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public CameraPreview(Context context) {
super(context);
}
public void init(Camera camera) {
this.camera = camera;
initSurfaceHolder();
}
#SuppressWarnings("deprecation") // needed for < 3.0
private void initSurfaceHolder() {
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
initCamera(holder);
}
private void initCamera(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(holder);
camera.getParameters().setPreviewFormat(ImageFormat.NV21);
camera.setPreviewCallback(this);
camera.startPreview();
} catch (Exception e) {
Log.d("Error setting camera preview", e);
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if(event.getAction() == MotionEvent.ACTION_DOWN)
{
android.util.Log.d("touched", "called");
/* int x = (int)event.getX();
int y = (int)event.getY();
android.util.Log.d("touched pixel :", x+" "+y);
setDrawingCacheEnabled(true);
buildDrawingCache();
Bitmap mBmp = getDrawingCache();
int pixel = mBmp.getPixel(x, y);
int redValue = Color.red(pixel);
int blueValue = Color.blue(pixel);
int greenValue = Color.green(pixel);
android.util.Log.d("touched pixel color :", redValue+" "+greenValue+" "+blueValue);
android.util.Log.d("touched pixel color from preview:", redValue+" "+greenValue+" "+blueValue);
*/
//how to get particular pixel from myPixels[]
}
return false;
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
android.util.Log.d("onPreviewFrame", "called");
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
}
public int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
// here we're using our own internal PImage attributes
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
// use interal buffer instead of pixels for UX reasons
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000)
| ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
}
I've followed a different approach to solve it. I'll post the code as soon as I get free.
Algorithm :
create an overlay on the live camera
when user touches , update the overlay with RGB data of latest YUV buffer stream got from live camera
pick RGB color from overlay image
It seems like myPixels is a 1D representation of 2D data (width x height).
Which means myPixels has a length of (width * height).
Lets say the pixel is (x,y) then
int idx = ( height * y ) + x;
int color = myPixels[idx];
With the above information you can modify decodeYUV420SP method to output only the color of a particular pixel.
I am using JavaCV in Android.
In my code, I have created a ImageComparator(class of OpenCV CookBook
http://code.google.com/p/javacv/source/browse/OpenCV2_Cookbook/src/opencv2_cookbook/chapter04/ImageComparator.scala?repo=examples
http://code.google.com/p/javacv/wiki/OpenCV2_Cookbook_Examples_Chapter_4) Object and use that object to compare images. If I use file from SD card the comparator is working.
File referenceImageFile = new File(absPath1); // Read an image.
IplImage reference = Util.loadOrExit(referenceImageFile,CV_LOAD_IMAGE_COLOR);
comparator = new ImageComparator(reference);
comparator = new ImageComparator(reference);
But from Camera Preview, when I am creating IplImage it is not working. I am getting the following Exception during comparison "score" calculation.
score = referenceComparator.compare(grayImage) / imageSize;
java.lang.RuntimeException: /home/saudet/android/OpenCV-2.4.2/modules/core/src/convert.cpp:1196: error: (-215) i < src.channels() in function void cvSplit(const void*, void*, void*, void*, void*)
For CameraPreview I am using the code from FacePreview to create IplImage.But it create Image in grayScale.
int f = SUBSAMPLING_FACTOR;
if (grayImage == null || grayImage.width() != width / f
|| grayImage.height() != height / f) {
grayImage = IplImage.create(width / f, height / f, IPL_DEPTH_8U, 1);
}
int imageWidth = grayImage.width();
int imageHeight = grayImage.height();
int dataStride = f * width;
int imageStride = grayImage.widthStep();
ByteBuffer imageBuffer = grayImage.getByteBuffer();
for (int y = 0; y < imageHeight; y++) {
int dataLine = y * dataStride;
int imageLine = y * imageStride;
for (int x = 0; x < imageWidth; x++) {
imageBuffer.put(imageLine + x, data[dataLine + f * x]);
}
}
How to create a Color IplImage from Camera to use with ImageComparator?
The below code seems to be working fine.
public void onPreviewFrame(final byte[] data, final Camera camera) {
try {
Camera.Size size = camera.getParameters().getPreviewSize();
processImage(data, size.width, size.height);
camera.addCallbackBuffer(data);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
Log.d("Exception", " " + e);
}
}
protected void processImage(byte[] data, int width, int height) {
score.clear();
// First, downsample our image
int f = SUBSAMPLING_FACTOR;
IplImage _4image = IplImage.create(width, height, IPL_DEPTH_8U, f);
int[] _temp = new int[width * height];
if (_4image != null) {
decodeYUV420SP(_temp, data, width, height);
_4image.getIntBuffer().put(_temp);
}
//bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
//bitmap.copyPixelsFromBuffer(_4image.getByteBuffer());
Log.d("CompareAndroid", "processImage");
int imageSize = _4image.width() * _4image.height();
Iterator<ImageComparator> iterator = reference_List.iterator();
// Compute histogram match and normalize by image size.
// 1 means perfect match.
while(iterator.hasNext()){
score.add(((ImageComparator) iterator.next()).compare(_4image) / imageSize);
}
Log.d("CompareImages", "Score Size "+score.size());
postInvalidate();
}
This code seems to be working fine.
private void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width,
int height) {
int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((b << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((r >> 10) & 0xff);
}
}
}
I haven't tested it, but something like this should work:
IplImage yuvimage = IplImage.create(width, height * 3 / 2, IPL_DEPTH_8U, 2);
IplImage rgbimage = IplImage.create(width, height, IPL_DEPTH_8U, 3);
cvCvtColor(yuvimage, rgbimage, CV_YUV2BGR_NV21);