SurfaceHolder callbacks are never caled on nexus6p - android

i'm developing an Android app that gets the color of a current pixel on the screen
the appication concept is very simple the CameraPreview class implements the SurfcaeHolder.Callback interface
and onSurfcaeChanged gets the color
my problem is the code works just fine on the samsung6s but it doesn't work on the nuxus6p the callbacks surfaceCreated and surfaceChanged never called
here is a sample of my code
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
String TAG = "TAG";
Context context;
OnColorDetected onColorDetected;
ColorDetectionPresenter presenter;
Camera.Size previewSize;
int midPixxel;
public CameraPreview(Context context, Camera camera, ColorDetectionPresenter presenter) {
super(context);
mCamera = camera;
this.context = context;
this.presenter = presenter;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setOnColorDetected(OnColorDetected onColorDetected) {
this.onColorDetected = onColorDetected;
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
Log.d("SURFACE_STATE","created");
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
for (int i = 0; i < myPixels.length; i++) {
//Toast.makeText(context, MainActivity.getBestMatchingColorName(myPixels[i]), Toast.LENGTH_SHORT).show();
}
}
});
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
Log.d("SURFACE_STATE","changed");
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
Camera.Parameters parameters = mCamera.getParameters();
mCamera.setPreviewDisplay(mHolder);
List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
if (previewSizes == null)
previewSizes = parameters.getSupportedPictureSizes();
// You need to choose the most appropriate previewSize for your
previewSize = previewSizes.get(0);
for(int i=0;i<previewSizes.size();i++){
if(previewSizes.get(i).width>previewSize.width)
previewSize=previewSizes.get(i);
}
// .... select one of previewSizes here
cameraSetup(width, height);
parameters.setPictureSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
String colorName = presenter.getBestMatchingColorName(myPixels[myPixels.length / 2]);
onColorDetected.colorDetected(colorName);
}
});
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
// Pulled directly from:
// http://ketai.googlecode.com/svn/trunk/ketai/src/edu/uic/ketai/inputService/KetaiCamera.java
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
private void cameraSetup(int w, int h) {
// set the camera parameters, including the preview size
FrameLayout.LayoutParams lp = (FrameLayout.LayoutParams) getLayoutParams();
double cameraAspectRatio = ((double) previewSize.width) / previewSize.height;
if (((double) h) / w > cameraAspectRatio) {
lp.width = (int) (h / cameraAspectRatio + 0.5);
lp.height = h;
} else {
lp.height = (int) (w * cameraAspectRatio + 0.5);
lp.width = w;
lp.topMargin = (h - lp.height) / 2;
}
lp.gravity = Gravity.CENTER_HORIZONTAL | Gravity.TOP;
setLayoutParams(lp);
requestLayout();
}
}
and this is how i call it
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
colorDetectionPresenter = new ColorDetectionPresenter();
camera = Camera.open();
cameraPreview = new CameraPreview(getActivity(), camera, colorDetectionPresenter);
params = camera.getParameters();
params.setPreviewFormat(ImageFormat.NV21);
camera.setDisplayOrientation(90);
siz = params.getSupportedPreviewSizes().get(0);
params.setPreviewSize(siz.width, siz.height);
}
i have tried initializing the camera preview in the onCreate, onCreateViewand onResume none works
any suggestions ?
it only doesn't work on my nexus6p

Related

find the brightest pixel in the camera preview

need a help to fix these line of code to find brightest pixel and its coordinates (x,y) and draw a bitmap over it.
i have android preview live camera and want to scan the screen my process code is within onPreviewFframe by doing for loop and comparing preset float brightestValue to every value of pixels[]. my canvas staticaly draw circle in top left of the screen.
i expect dinamic change of circle position on the camera surfaceview while change the direction of the camera. sorry if my english is bad grammatically
thank you
public class CameraDemo extends Activity {
private static final String TAG = "CameraDemo";
Preview preview;
int brightestX = 0; // X-coordinate of the brightest video pixel
int brightestY = 0; // Y-coordinate of the brightest video pixel
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Paint p = new Paint ();
p.setColor(Color.WHITE);
Bitmap bg = Bitmap.createBitmap(480,800,Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bg);
canvas.drawCircle(brightestX,brightestY,20,p);
preview = new Preview(this);
preview.setBackground(new BitmapDrawable(bg));
((FrameLayout) findViewById(R.id.preview)).addView(preview);
}
class Preview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
SurfaceHolder mHolder;
Camera mCamera;
private Camera.Parameters parameters;
private Camera.Size previewSize;
private int[] pixels;
Preview(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
mCamera = Camera.open();
try {
mCamera.startPreview();
mCamera.setPreviewDisplay(holder);
mCamera.setDisplayOrientation(90);
mCamera.setPreviewCallbackWithBuffer(this);
parameters = mCamera.getParameters();
previewSize = parameters.getPreviewSize();
} catch (IOException exception) {
mCamera.release();
mCamera = null;
// TODO: add more exception handling logic here
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
mCamera.release();
mCamera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
parameters.setPreviewSize(w, h);
//set the camera's settings
mCamera.setParameters(parameters);
mCamera.startPreview();
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
//transforms NV21 pixel data into RGB pixels
decodeYUV420SP(pixels, data, previewSize.width, previewSize.height);
float brightestValue = 0; // Brightness of the brightest video pixel
for (int y = 0; y < previewSize.height ; y++) {
for (int x = 0; x < previewSize.width; x++) {
// Get the color stored in the pixel
float pixelBrightness = pixels[previewSize.width*previewSize.height];
// If that value is brighter than any previous, then store the
// brightness of that pixel, as well as its (x,y) location
if (pixelBrightness > brightestValue) {
brightestValue = pixelBrightness;
brightestY = y;
brightestX = x;
}
}
}
}
}
//Method from Ketai project! Not mine! See below...
void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143)
r = 262143;
if (g < 0) g = 0; else if (g > 262143)
g = 262143;
if (b < 0) b = 0; else if (b > 262143)
b = 262143;
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}
}
You need to call canvas.drawCircle(brightestX,brightestY,20,p); after updating the position in the onPreviewFrame method, so the code looks like this:
if (pixelBrightness > brightestValue) {
brightestValue = pixelBrightness;
brightestY = y;
brightestX = x;
drawCircleOnNewPosition() // method that calls canvas.drawCircle
}

Display the YUV value from live camera preview Android

This question has been asked many times already but i cannot seem to find what i want exactly.
I am trying to create a camera app where I want to display the YUV or RGB value into the log when I point my camera to some color. The values must me 0...255 range for RGB or correspondent YUV color format. I can manage the conversion between them as there are many such examples on stack overflow. However, i cannot store the values into 3 separate variables and display them in the log.
so far i have managed to get
package com.example.virus.bpreader;
public class CaptureVideo extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
private int[] pixels;
public CaptureVideo(Context context, Camera cameraManager) {
super(context);
mCamera = cameraManager;
mCamera.setDisplayOrientation(90);
//get holder and set the class as callback
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
//when the surface is created, let the camera start the preview
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
mCamera.cancelAutoFocus();
Camera.Parameters params = mCamera.getParameters();
//get fps
params.getSupportedPreviewFpsRange();
//get resolution
params.getSupportedPreviewSizes();
//stop auto exposure
params.setAutoExposureLock(false);
// Check what resolutions are supported by your camera
List<Camera.Size> sizes = params.getSupportedPictureSizes();
// Iterate through all available resolutions and choose one
for (Camera.Size size : sizes) {
Log.i("Resolution", "Available resolution: " + size.width + " " + size.height);
}
//set resolution at 320*240
params.setPreviewSize(320,240);
//set frame rate at 10 fps
List<int[]> frameRates = params.getSupportedPreviewFpsRange();
int last = frameRates.size() - 1;
params.setPreviewFpsRange(10000, 10000);
//set Image Format
//params.setPreviewFormat(ImageFormat.NV21);
mCamera.setParameters(params);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//need to stop the preview and restart on orientation change
if (mHolder.getSurface() == null) {
return;
}
//stop preview
mCamera.stopPreview();
//start again
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
//stop and release
mCamera.startPreview();
mCamera.release();
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
int[] myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
Log.d("myPixel", String.valueOf(myPixels.length));
}
//yuv decode
int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int r, g, b, y1192, y, i, uvp, u, v;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
// above answer is wrong at the following lines. just swap ***u*** and ***v***
u = (0xff & yuv420sp[uvp++]) - 128;
v = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
r = Math.max(0, Math.min(r, 262143));
g = Math.max(0, Math.min(g, 262143));
b = Math.max(0, Math.min(b, 262143));
// combine RGB
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) | 0xff);
}
}
return rgb;
}
}
Here the decode method should give a hex format of RGB color space (quite sure its all right as most of the answers are the same). The problem i am facing is when i am not quite sure how to call it inside the OnPreviewFrame method so that it displays the RGB values separately into the log.
N.B like i said i have seen lot of similar questions but could not find a solution to it. I do not want to store the file (image/video) as i only need the RGB/YUV values from the live camera preview when i point the camera to some color.
I need the rgb or yuv values because i want to plot a graph out of it against time.
Any help will be much appreciated.
Well if the problem is to get separate values of R, G And B from the RGB array, check this SO post here.
Hope it helps!

Preserving red part on Android Camera Preview and other part on screen is Grayscale

I am writing a android app about camera.
The function I want to implement is let preview preserving red color on screen,
the other part on screen be grayscale.
Like this picture
I use the particular scalar multiply R, G and B component to attach grayscale
and preserve red part of preview after changeing raw data format to RGB.
but after I override onDraw(Canvas canvas) method in my custom View,
the result is like blue filter effect
Can anybody give some hint to let me know which step I am wrong.
THANK YOU.
The code of my custom View is below
package com.example.macampreviewdemo;
public class CamPreviewDemoActivity extends Activity {
/** Called when the activity is first created. */
int h, w;
public String tag = "tag";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(tag, "onCreate");
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
h = metrics.heightPixels;
w = metrics.widthPixels;
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
Display display = ((WindowManager)
getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
setContentView(R.layout.main);
setRequestedOrientation(0);
ViewToDraw dtw = (ViewToDraw) findViewById(R.id.vtd);
CameraView cameraView = new CameraView(this, dtw, w, h);
((FrameLayout) findViewById(R.id.preview)).addView(cameraView);
}
}
And this:
package com.example.macampreviewdemo;
public class ViewToDraw extends View{
public String tag = "tag";
public byte[] image;
public boolean isCameraSet = false;
public int imgWidth, imgHeight;
Bitmap overlayBitmap;
Matrix matrix;
public ViewToDraw(Context context, AttributeSet attrs) {
super(context, attrs);
matrix = new Matrix();
}
public void cameraSet(){
isCameraSet = true;
}
public void putImage(byte[] img){
image = img;
}
#Override
protected void onDraw(Canvas canvas){
Log.i(tag, "onDraw() ");
int size = imgWidth * imgHeight;
int[] rgb = new int[imgWidth * imgHeight];
if(isCameraSet){
rgb = convertYUV420_NV21toARGB8888(image, imgWidth, imgHeight);
for (int k = 0; k < size; k++) {
if(Color.red(rgb[k]) == 255 &&
Color.green(rgb[k]) == 0 &&
Color.blue(rgb[k]) == 50){}
else{
rgb[k] = (int) (
(0.2126 * Color.red(rgb[k])) +
(0.7152 * Color.green(rgb[k])) +
(0.0722 * Color.blue(rgb[k]))
);
}
}
Log.i("tag", "rgb length = " + rgb.length);
overlayBitmap =
Bitmap.createBitmap(rgb, 0, imgWidth,
imgWidth, imgHeight,
Bitmap.Config.RGB_565);
canvas.drawBitmap(overlayBitmap, matrix, null);
overlayBitmap.recycle();
}
}
static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int rtmp, gtmp, btmp;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)r = 0;
else if (r > 262143)r = 262143;
if (g < 0)g = 0;
else if (g > 262143)g = 262143;
if (b < 0)b = 0;
else if (b > 262143)b = 262143;
rtmp = ((r << 6) & 0xff0000);
gtmp = ((g >> 2) & 0xff00);
btmp = ((b >> 10) & 0xff);
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00)
| ((b >> 10) & 0xff);
}
}
}
int[] yuv420ToGrayScale(byte[] yuv420, int width, int height){
int size = width * height;
int y1, y2, y3, y4;
int[] pixel = new int[size];
for (int i = 0; i < size; i+=2) {
y1 = yuv420[i]&0xff;;
y2 = yuv420[i + 1]&0xff;;
y3 = yuv420[i + width]&0xff;;
y4 = yuv420[i + width +1]&0xff;;
pixel[i] = yuv420[i];
pixel[i + 1] = yuv420[i +1];
pixel[i + width ] = yuv420[width + i];
pixel[i + width + 1] = yuv420[i + width + 1];
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixel;
}
/**
* Converts YUV420 NV21 to ARGB8888
*
* #param data byte array on YUV420 NV21 format.
* #param width pixels width
* #param height pixels height
* #return a ARGB8888 pixels int array. Where each int is a pixels ARGB.
*/
public static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height) {
int size = width*height;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
// i along Y and the final pixels
// k along pixels U and V
for(int i=0, k=0; i < size; i+=2, k+=1) {
y1 = data[i ]&0xff;
y2 = data[i+1]&0xff;
y3 = data[width+i ]&0xff;
y4 = data[width+i+1]&0xff;
v = data[offset+k ]&0xff;
u = data[offset+k+1]&0xff;
v = v-128;
u = u-128;
pixels[i ] = convertYUVtoARGB(y1, u, v);
pixels[i+1] = convertYUVtoARGB(y2, u, v);
pixels[width+i ] = convertYUVtoARGB(y3, u, v);
pixels[width+i+1] = convertYUVtoARGB(y4, u, v);
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixels;
}
private static int convertYUVtoARGB(int y, int u, int v) {
int r,g,b;
r = y + (int)(1.402f*u);
g = y - (int)(0.344f*v + 0.714f*u);
b = y + (int)(1.772f*v);
r = r>255? 255 : r<0 ? 0 : r;
g = g>255? 255 : g<0 ? 0 : g;
b = b>255? 255 : b<0 ? 0 : b;
return 0xff000000 | (r<<16) | (g<<8) | b;
}
public static void myNV21ToRGB(int width, int height){
int yy, u, v;
int frame_size = width * height;
}
}
And this:
package com.example.macampreviewdemo;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
public Camera mycamera;
List<Camera.Size> cameraSize;
private SurfaceHolder mHolder;
public ViewToDraw vtd;
int pickedH, pickedW;
int defaultH, defaultW;
public String tag = "tag";
public CameraView(Context context, ViewToDraw _vtd, int width, int height) {
super(context);
// TODO Auto-generated constructor stub
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
vtd = _vtd;
defaultH = height;
defaultW = width;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i("tag"," surfaceCreated");
int i;
mycamera = Camera.open();
cameraSize = mycamera.getParameters().getSupportedPreviewSizes();
if(cameraSize != null){
// pick resolution
pickedH = defaultH;
pickedW = defaultW;
for(i=0;i<cameraSize.size();i++){
if(cameraSize.get(i).width < defaultW){
break;
}else{
pickedH = cameraSize.get(i).height;
pickedW = cameraSize.get(i).width;
}
}
}else{
Log.e("tag","null");
};
try {
mycamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i("tag","surfaceChanged");
Camera.Parameters parameters = mycamera.getParameters();
parameters.setPreviewSize(pickedW, pickedH);
mycamera.setParameters(parameters);
//create buffer
PixelFormat p = new PixelFormat();
PixelFormat.getPixelFormatInfo(parameters.getPreviewFormat(),p);
int bufSize = (pickedW*pickedH*p.bitsPerPixel)/8;
//add buffers
byte[] buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
mycamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i("tag", "onPreviewFrame");
Log.i("tag", "pickedH = " + pickedH);
Log.i("tag", "pickedW = " + pickedW);
vtd.putImage(data);
vtd.cameraSet();
vtd.imgHeight = pickedH;
vtd.imgWidth = pickedW;
vtd.invalidate();
mycamera.addCallbackBuffer(data);
}
});
mycamera.startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i("tag", "surfaceDestroyed");
mycamera.setPreviewCallback(null);
mycamera.release();
mycamera = null;
}
}
And this:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="#dimen/activity_vertical_margin"
android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
tools:context=".CamPreviewDemoActivity" >
<FrameLayout android:id="#+id/FrameLayout01"
android:layout_height="fill_parent" android:layout_width="fill_parent">
<FrameLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/preview">
</FrameLayout>
<com.example.macampreviewdemo.ViewToDraw
android:id="#+id/vtd"
android:layout_height="fill_parent"
android:layout_width="fill_parent"/>
</FrameLayout>
</RelativeLayout>

Get color of touched pixel from camera preview

I have created a demo application which opens the camera.now I want to get the color of pixel when user touches the live camera preview.
I have tried by overridingonTouchEvent and I succeed in getting the position of pixel in x,y but I am not getting the RGB color value from it.it always shows 0,0,0
All suggestions are welcomed including any alternate way to achieve the same functionality. [Excluding OpenCv because it requires to install OpenCvManager apk also to support my application]
Code :
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private Camera camera;
private SurfaceHolder holder;
int[] myPixels;
public CameraPreview(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public CameraPreview(Context context) {
super(context);
}
public void init(Camera camera) {
this.camera = camera;
initSurfaceHolder();
}
#SuppressWarnings("deprecation") // needed for < 3.0
private void initSurfaceHolder() {
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
initCamera(holder);
}
private void initCamera(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(holder);
camera.getParameters().setPreviewFormat(ImageFormat.NV21);
camera.setPreviewCallback(this);
camera.startPreview();
} catch (Exception e) {
Log.d("Error setting camera preview", e);
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if(event.getAction() == MotionEvent.ACTION_DOWN)
{
android.util.Log.d("touched", "called");
/* int x = (int)event.getX();
int y = (int)event.getY();
android.util.Log.d("touched pixel :", x+" "+y);
setDrawingCacheEnabled(true);
buildDrawingCache();
Bitmap mBmp = getDrawingCache();
int pixel = mBmp.getPixel(x, y);
int redValue = Color.red(pixel);
int blueValue = Color.blue(pixel);
int greenValue = Color.green(pixel);
android.util.Log.d("touched pixel color :", redValue+" "+greenValue+" "+blueValue);
android.util.Log.d("touched pixel color from preview:", redValue+" "+greenValue+" "+blueValue);
*/
//how to get particular pixel from myPixels[]
}
return false;
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
android.util.Log.d("onPreviewFrame", "called");
int frameHeight = camera.getParameters().getPreviewSize().height;
int frameWidth = camera.getParameters().getPreviewSize().width;
// number of pixels//transforms NV21 pixel data into RGB pixels
int rgb[] = new int[frameWidth * frameHeight];
// convertion
myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);
}
public int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
// here we're using our own internal PImage attributes
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
// use interal buffer instead of pixels for UX reasons
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000)
| ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
return rgb;
}
}
I've followed a different approach to solve it. I'll post the code as soon as I get free.
Algorithm :
create an overlay on the live camera
when user touches , update the overlay with RGB data of latest YUV buffer stream got from live camera
pick RGB color from overlay image
It seems like myPixels is a 1D representation of 2D data (width x height).
Which means myPixels has a length of (width * height).
Lets say the pixel is (x,y) then
int idx = ( height * y ) + x;
int color = myPixels[idx];
With the above information you can modify decodeYUV420SP method to output only the color of a particular pixel.

Camera Preview is not FULL SCREEN in android

I am trying to take the camera preview and alter it in onPreviewFrame() and show it to the user. I have already achieved the required functionality but the problem is in SIZE OF CAMERA PREVIEW.It always takes smaller part of the screen and i want to make it fullscreen i.e want to take the camera preview which should fill whole screen of the device.I have read and tried any solutions available on net but none of them is working in my case.This is the Surface View class:
public class MySurfaceView extends SurfaceView implements Callback, Camera.PreviewCallback, android.view.SurfaceHolder.Callback {
private static final String TAG = "MySurfaceView";
private int width;
private int height;
public SurfaceHolder mHolder;
private Camera mCamera;
private int[] rgbints;
private int mMultiplyColor;
public MySurfaceView(Context context, AttributeSet attrs , Camera camera ,
int width , int height)
{
super(context, attrs);
mCamera = camera;
this.width = width;
this.height = height;
mHolder = getHolder();
mHolder.addCallback(this);
mMultiplyColor = getResources().getColor(R.color.honeydew);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
this.setWillNotDraw(false); // This allows us to make our own draw calls to this canvas
rgbints = new int[width * height];
// try { mCamera.setPreviewDisplay(holder); } catch (IOException e)
// { Log.e("Camera", "mCamera.setPreviewDisplay(holder);"); }
mCamera.startPreview();
mCamera.setPreviewCallback(this);
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized (this) {
try
{
CameraActivity cameraActivity = new CameraActivity();
cameraActivity.releaseCamera();
cameraActivity = null;
} catch (Exception e) {
Log.e("Camera", e.getMessage());
}
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Canvas canvas = null;
if (mHolder == null)
{
return;
}
try {
synchronized (mHolder)
{
canvas = mHolder.lockCanvas(null);
int canvasWidth = canvas.getWidth();
int canvasHeight = canvas.getHeight();
decodeYUV(rgbints, data, width, height);
// draw the decoded image, centered on canvas
canvas.drawBitmap(rgbints, 0, width, canvasWidth-((width+canvasWidth)>>1), canvasHeight-((height+canvasHeight)>>1), width, height, false, null);
// use some color filter
canvas.drawColor(mMultiplyColor, Mode.MULTIPLY);
}
} catch (Exception e){
e.printStackTrace();
} finally {
// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (canvas != null)
{
mHolder.unlockCanvasAndPost(canvas);
canvas = null;
}
}
}
public void decodeYUV(int[] out, byte[] fg, int width, int height) throws NullPointerException, IllegalArgumentException {
int sz = width * height;
if (out == null)
throw new NullPointerException("buffer out is null");
if (out.length < sz)
throw new IllegalArgumentException("buffer out size " + out.length + " < minimum " + sz);
if (fg == null)
throw new NullPointerException("buffer 'fg' is null");
if (fg.length < sz)
throw new IllegalArgumentException("buffer fg size " + fg.length + " < minimum " + sz * 3 / 2);
int i, j;
int Y, Cr = 0, Cb = 0;
for (j = 0; j < height; j++) {
int pixPtr = j * width;
final int jDiv2 = j >> 1;
for (i = 0; i < width; i++) {
Y = fg[pixPtr];
if (Y < 0)
Y += 255;
if ((i & 0x1) != 1) {
final int cOff = sz + jDiv2 * width + (i >> 1) * 2;
Cb = fg[cOff];
if (Cb < 0)
Cb += 127;
else
Cb -= 128;
Cr = fg[cOff + 1];
if (Cr < 0)
Cr += 127;
else
Cr -= 128;
}
int R = Y + Cr + (Cr >> 2) + (Cr >> 3) + (Cr >> 5);
if (R < 0)
R = 0;
else if (R > 255)
R = 255;
int G = Y - (Cb >> 2) + (Cb >> 4) + (Cb >> 5) - (Cr >> 1) + (Cr >> 3) + (Cr >> 4) + (Cr >> 5);
if (G < 0)
G = 0;
else if (G > 255)
G = 255;
int B = Y + Cb + (Cb >> 1) + (Cb >> 2) + (Cb >> 6);
if (B < 0)
B = 0;
else if (B > 255)
B = 255;
out[pixPtr++] = 0xff000000 + (B << 16) + (G << 8) + R;
}
}
}
public void showSupportedCameraFormats(Parameters p) {
List<Integer> supportedPictureFormats = p.getSupportedPreviewFormats();
Log.d(TAG, "preview format:" + cameraFormatIntToString(p.getPreviewFormat()));
for (Integer x : supportedPictureFormats) {
Log.d(TAG, "suppoterd format: " + cameraFormatIntToString(x.intValue()));
}
}
#SuppressWarnings("deprecation")
private String cameraFormatIntToString(int format) {
switch (format) {
case PixelFormat.JPEG:
return "JPEG";
case PixelFormat.YCbCr_420_SP:
return "NV21";
case PixelFormat.YCbCr_422_I:
return "YUY2";
case PixelFormat.YCbCr_422_SP:
return "NV16";
case PixelFormat.RGB_565:
return "RGB_565";
default:
return "Unknown:" + format;
}
}
}
This is the caller class:
public class CameraActivity extends Activity {
private Camera mCamera;
private MySurfaceView surfaceView;
private RelativeLayout relativeLayout;
#Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
}
#Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
releaseCamera();
}
#Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
releaseCamera();
}
#Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
mCamera = Camera.open();
Camera.Parameters p = mCamera.getParameters();
Size size = p.getPreviewSize();
int width = size.width;
int height = size.height;
p.setPreviewFormat(ImageFormat.JPEG);
mCamera.setParameters(p);
surfaceView = new MySurfaceView(this, null , mCamera ,width , height);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
surfaceView.setLayoutParams(layoutParams);
relativeLayout = (RelativeLayout)findViewById(R.id.relativeLayout);
relativeLayout.addView(surfaceView);
surfaceView.showSupportedCameraFormats(p);
}
public void releaseCamera()
{
if (mCamera != null)
{
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
surfaceView.getHolder().removeCallback(surfaceView);
mCamera.release(); // release the camera for other applications
mCamera = null;
surfaceView.mHolder.removeCallback(surfaceView);
surfaceView.mHolder = null;
surfaceView = null;
relativeLayout.removeAllViews();
relativeLayout.removeAllViewsInLayout();
relativeLayout = null;
}
}
}
Please help me.Thanks in advance.
EDIT:
Xml is :
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:id="#+id/relativeLayout"
android:layout_height="match_parent" >
</RelativeLayout>
instend of relativelayout take linearlayout and put framelayout in it:
<FrameLayout
android:id="#+id/preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1" >
</FrameLayout>
and then set camerapreview in it:
surfaceView = new MySurfaceView(this, null , mCamera ,width , height);
((FrameLayout) findViewById(R.id.preview)).addView(surfaceView);
CameraPreview.java
class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "Preview";
SurfaceHolder mHolder;
public Camera camera;
CameraPreview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
if(camera == null){
camera = Camera.open();
camera.setDisplayOrientation(90);
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
CameraPreview.this.invalidate();
}
});
} catch (IOException e) {
camera.release();
camera = null;
}
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
if(camera!=null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
// parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
camera.startPreview();
}
#Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p= new Paint(Color.RED);
Log.d(TAG,"draw");
canvas.drawText("PREVIEW", canvas.getWidth()/2, canvas.getHeight()/2, p );
}
public void releaseCameraAndPreview() {
if (camera != null) {
camera.release();
camera = null;
}
}
}

Categories

Resources