Android + OpenCV + Face detection + Custom Layout - android

I am using:
Android 4.0.3
OpenCV 2.4.2
Samsung Galaxy S2
The face-detection example (from the opencv 2.4.2) is working perfectly.
But now, I would like to create a custom layout and actually work with just the data extracted from face detection and build a game on it. Not necessarily having the FdView surface taking the entire screen.
I have done these modifications below, but just a black screen is displayed. Nothing appears on the screen.
Added a fd.xml layout:
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="horizontal">
<org.opencv.samples.fd.FdView android:id="#+id/FdView"
android:layout_width="640dp"
android:layout_height="480dp"
android:visibility="visible"
/>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="#FF0000"
android:text="hi"/>
Modified the baseLoaderCallback of FdActivity.java:
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");
//EXPERIMENT
setContentView(R.layout.fd);
FdView surface = (FdView) (findViewById(R.id.FdView));
surface = mView;
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
//setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
Added constructors in FdView.java:
public FdView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
public FdView(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
}
Added constructors in SampleCvViewBase.java:
public SampleCvViewBase(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}
public SampleCvViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
}

I have precisely the same issue. Also trying to figure it out. I'm trying to display the image on a SurfaceView that doesn't take the whole screen. And with that I read that you can't have your Camera handler class and linked SurfaceView in different classes. So smashed everything into one.
So, at the moment I have the camera displaying on the SurfaceView, and copying the frame data to a mFrame variable. Basically just struggling to get the mFrame processed (in the multi-threading, Run(), function) and showing the result on the SurfaceView.
This is the code I have, if you think it would help: (excuse the formatting as my code is also a work in progress)
package org.opencv.samples.tutorial3;
import java.io.IOException;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.widget.TextView;
public class Sample3Native extends Activity implements SurfaceHolder.Callback,Runnable{
//Camera variables
private Camera cam;
private boolean previewing = false;
private SurfaceHolder mHolder;
private SurfaceView mViewer;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
Sample3View viewclass;
TextView text;
int value = 0;
//==========
int framecount = 0;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
//constructor for viewclass that works on frames
viewclass = new Sample3View();
//setContentView(mView);
//OpenCam();
//setContentView(R.layout.main);
// Create and set View
CameraConstruct();
Camopen();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample3Native()
{}
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack);
}
//Camera construction
public void CameraConstruct()
{
mViewer = (SurfaceView)findViewById(R.id.camera_view);
text = (TextView)findViewById(R.id.text);
mHolder = mViewer.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
//calls camera screen setup when screen surface changes
public void surfaceChanged(SurfaceHolder holder, int format, int width,int height)
{
CamStartDisplay();
}
public void Camclose()
{
if(cam != null && previewing)
{
cam.setPreviewCallback(null);
cam.stopPreview();
cam.release();
cam = null;
previewing = false;
}
mThreadRun = false;
viewclass.PreviewStopped();
}
//only open camera, and get frame data
public void Camopen()
{
if(!previewing){
cam = Camera.open();
//rotate display
cam.setDisplayOrientation(90);
if (cam != null)
{
//copy viewed frame
cam.setPreviewCallbackWithBuffer(new PreviewCallback()
{
public void onPreviewFrame(byte[] data, Camera camera)
{
synchronized (this)
{
System.arraycopy(data, 0, mFrame, 0, data.length);
this.notify();
}
//text.setText(Integer.toString(value++));
camera.addCallbackBuffer(mBuffer);
}
});
}
}//if not previewing
}
//start preview
public void CamStartDisplay()
{
synchronized (this)
{
if(cam != null)
{
//stop previewing till after settings is changed
if(previewing == true)
{
cam.stopPreview();
previewing = false;
}
Camera.Parameters p = cam.getParameters();
for(Camera.Size s : p.getSupportedPreviewSizes())
{
p.setPreviewSize(s.width, s.height);
mFrameWidth = s.width;
mFrameHeight = s.height;
break;
}
p.setPreviewSize(mFrameWidth, mFrameHeight);
List<String> FocusModes = p.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
p.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
cam.setParameters(p);
//set the width and height for processing
viewclass.setFrame(mFrameWidth, mFrameHeight);
int size = mFrameWidth*mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(p.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mFrame = new byte [size];
cam.addCallbackBuffer(mBuffer);
viewclass.PreviewStarted(mFrameWidth, mFrameHeight);
//start display streaming
try
{
//cam.setPreviewDisplay(null);
cam.setPreviewDisplay(mHolder);
cam.startPreview();
previewing = true;
}
catch (IOException e)
{
e.printStackTrace();
}
}//end of if cam != null
}//synchronising
}
//thread gets started when the screen surface is created
public void surfaceCreated(SurfaceHolder holder) {
//Camopen();
//CamStartDisplay();
(new Thread(this)).start();
}
//called when the screen surface is stopped
public void surfaceDestroyed(SurfaceHolder holder)
{
Camclose();
}
//this is function that is run by thread
public void run()
{
mThreadRun = true;
while (mThreadRun)
{
//text.setText(Integer.toString(value++));
Bitmap bmp = null;
synchronized (this)
{
try
{
this.wait();
bmp = viewclass.processFrame(mFrame);
}
catch (InterruptedException e) {}
}
if (bmp != null)
{
Canvas canvas = mHolder.lockCanvas();
if (canvas != null)
{
canvas.drawBitmap(bmp, (canvas.getWidth() - mFrameWidth) / 2, (canvas.getHeight() - mFrameHeight) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}//if bmp != null
}// while thread in run
}
}//end class
Sample3View as used in this class just includes the processFrame function as such:
package org.opencv.samples.tutorial3;
import android.content.Context;
import android.graphics.Bitmap;
import android.widget.TextView;
class Sample3View {
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
private int frameWidth;
private int frameHeight;
private int count = 0;
Sample3Native samp;
//constructor
public Sample3View()
{
}
public void setFrame(int width,int height)
{
frameWidth = width;
frameHeight = height;
}
public void PreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
public void PreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
public Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;
FindFeatures(frameWidth, frameHeight, data, rgba);
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0, frameWidth, 0, 0, frameWidth, frameHeight);
//samp.setValue(count++);
return bmp;
}
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
}
So yeah, hope this helps. If I get the complete solution working, I'll post that also. Also post your stuff if you get the solution first please! Enjoy

Sry not a real answer (yet) but also tried to make a custom layout in opencv 2.4.2
i have this perfectly working solution for 2.4.0 if i remember it right it was enough to add the instructors.. but it doesn't work with 2.4.2
i'll try to figure smthg out and let you guys know.

I met the same problem that I wanted to create a custom view using layout. OpenCV 2.4.2 seems not to offer this function.
OpenCV 2.4.3 has the function, but its tutorial doesn't say so (it uses the old example from OpenCV2.4.2). Its Android samples provide some insights. Finally I found the instruction in OpenCV 2.4.9 documentation.
Hope it helps.

Hah, I figured out one way. You could just simply separate the OpenCV Loader and the custom layout.
Define BaseLoaderCallback mOpenCVCallBack.
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
In OnCreat, build your custom layout, load the OpenCv Loader,
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
// /////////////////////////////////////////////////////////////////////
// // begin:
// // Create and set View
setContentView(R.layout.main);
mView = (Sample3View) findViewById(R.id.sample3view);
mcameraButton = (ImageView) findViewById(R.id.cameraButton);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
Just that!
I did that, and it worked very well.

Related

Qr and bar code scanner doesn't support portrait mode

I downloaded the project for qr and bar code scanner from git hub and facing difficulty in changing scanner from landscape mode to portrait mode and i checked many stackoverflow links but didn't get proper solution and i am using zxing 2.1 jar in my code.Here is my code please check the code and please do some helpful to solve this problem.Thanks in advance!!
Error:"java.lang.IllegalArgumentException: Crop rectangle does not fit within image data."
//BarcodeReader
package com.dynamsoft.barcodereader;
import android.app.Activity;
import android.os.Bundle;
import android.view.Display;
import android.widget.FrameLayout;
public class BarcodeReader extends Activity {
private CameraPreview mPreview;
private CameraManager mCameraManager;
private HoverView mHoverView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Display display = getWindowManager().getDefaultDisplay();
mHoverView = (HoverView)findViewById(R.id.hover_view);
mHoverView.update(display.getWidth(), display.getHeight());
mCameraManager = new CameraManager(this);
mPreview = new CameraPreview(this, mCameraManager.getCamera());
mPreview.setArea(mHoverView.getHoverLeft(), mHoverView.getHoverTop(), mHoverView.getHoverAreaWidth(), display.getWidth());
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
getActionBar().hide();
}
#Override
protected void onPause() {
super.onPause();
mPreview.onPause();
mCameraManager.onPause();
}
#Override
protected void onResume() {
// TODO Auto-generated method stub
super.onResume();
mCameraManager.onResume();
mPreview.setCamera(mCameraManager.getCamera());
}
}
//CameraManager .java
package com.dynamsoft.barcodereader;
import android.content.Context;
import android.hardware.Camera;
import android.widget.Toast;
public class CameraManager {
private Camera mCamera;
private Context mContext;
public CameraManager(Context context) {
mContext = context;
// Create an instance of Camera
mCamera = getCameraInstance();
}
public Camera getCamera() {
return mCamera;
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
public void onPause() {
releaseCamera();
}
public void onResume() {
if (mCamera == null) {
mCamera = getCameraInstance();
}
Toast.makeText(
mContext,
"preview size = "
+ mCamera.getParameters().getPreviewSize().width + ", "
+ mCamera.getParameters().getPreviewSize().height,
Toast.LENGTH_LONG).show();
}
/** A safe way to get an instance of the Camera object. */
private static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
//HoverView .java
package com.dynamsoft.barcodereader;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
public class HoverView extends View {
private Paint mPaint;
private int mLeft, mTop, mRight, mBottom;
public HoverView(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
mPaint = new Paint();
mPaint.setColor(Color.RED);
mPaint.setStyle(Paint.Style.STROKE);
}
public void update(int width, int height) {
int centerX = width / 2;
int centerY = height / 2;
mLeft = centerX - 200;
mRight = centerX + 200;
mTop = centerY - 200;
mBottom = centerY + 200;
invalidate();
}
public int getHoverLeft() {
return mLeft;
}
public int getHoverTop() {
return mTop;
}
public int getHoverAreaWidth() {
return mRight - mLeft;
}
#Override
protected void onDraw(Canvas canvas) {
// TODO Auto-generated method stub
super.onDraw(canvas);
canvas.drawRect(mLeft, mTop, mRight, mBottom, mPaint);
}
}
//CameraPreview.java
package com.dynamsoft.barcodereader;
import java.io.IOException;
import android.app.AlertDialog;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.LuminanceSource;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.NotFoundException;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private static final String TAG = "camera";
private int mWidth, mHeight;
private Context mContext;
private MultiFormatReader mMultiFormatReader;
private AlertDialog mDialog;
private int mLeft, mTop, mAreaWidth, mAreaHeight;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mContext = context;
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
Parameters params = mCamera.getParameters();
mWidth = 640;
mHeight = 480;
params.setPreviewSize(mWidth, mHeight);
mCamera.setParameters(params);
mMultiFormatReader = new MultiFormatReader();
mDialog = new AlertDialog.Builder(mContext).create();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null) {
return;
}
try {
mCamera.stopPreview();
} catch (Exception e) {
}
try {
mCamera.setPreviewCallback(mPreviewCallback);
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
public void setCamera(Camera camera) {
mCamera = camera;
}
public void onPause() {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
}
}
private Camera.PreviewCallback mPreviewCallback = new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
if (mDialog.isShowing())
return;
LuminanceSource source = new PlanarYUVLuminanceSource(data, mWidth,
mHeight, mLeft, mTop, mAreaWidth, mAreaHeight, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
Result result;
try {
result = mMultiFormatReader.decode(bitmap, null);
if (result != null) {
mDialog.setTitle("Result");
mDialog.setMessage(result.getText());
mDialog.show();
}
} catch (NotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
public void setArea(int left, int top, int areaWidth, int width) {
double ratio = width / mWidth;
mLeft = (int) (left / (ratio + 1));
mTop = (int) (top / (ratio + 1));
mAreaHeight = mAreaWidth = mWidth - mLeft * 2;
}
}
It does not work on portrait mode. You need to limit your barcode activity to landscape mode.
<activity
android:name=".BarcodeReader"
android:screenOrientation="landscape" />

How to draw on a surfaceview?

Hi guys I was trying to make a QRCode reader so I used the used the QRCodeReaderView library provided by dlzaaro66 which provides easy implementation of Zxing library. The code is scanning the qrcode but i wanted to make sort of a reference box so as to indicate the whereabouts of where the code is being scanned from on the camera surface view I tried to use the normal draw technique. Its not giving any error but its not drawing either could you help me with where the problem might be occurring.
This my activity class.
import android.app.Activity;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PointF;
import android.net.Uri;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.CompoundButton;
import android.widget.Switch;
import android.widget.Toast;
import com.dlazaro66.qrcodereaderview.QRCodeReaderView;
import com.dlazaro66.qrcodereaderview.QRCodeReaderView.OnQRCodeReadListener;
public class MyActivity extends Activity implements OnQRCodeReadListener{
QRCodeReaderView decoder;
Switch start_stop;
Paint paint;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_my);
decoder = (QRCodeReaderView) findViewById(R.id.view2);
decoder.setOnQRCodeReadListener(this);
start_stop=(Switch) findViewById(R.id.switch1);
start_stop.setChecked(true);
start_stop.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
if(b){
decoder.getCameraManager().startPreview();
}
else{
decoder.getCameraManager().stopPreview();
}
}
});
paint= new Paint();
paint.setColor(Color.RED);
paint.setStrokeWidth(100);
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.my, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
#Override
public void onQRCodeRead(String text, PointF[] points) {
start_stop.setChecked(false);
if(text.startsWith("http")){
Toast.makeText(getApplicationContext(),text,Toast.LENGTH_SHORT).show();
final Intent intent = new Intent(Intent.ACTION_VIEW).setData(Uri.parse(text));
startActivity(intent);
}
else{
Toast.makeText(getApplicationContext(),text,Toast.LENGTH_SHORT).show();
}
Canvas canvas=new Canvas();
for(int i=0;i<points.length-1;i++){
canvas.drawLine(points[i].x,points[i].y,points[i+1].x,points[i+1].y,paint);
}
}
#Override
public void cameraNotFound() {
}
#Override
public void QRCodeNotFoundOnCamImage() {
}
}
This is the library project class from where I am getting the methods and the custom surfaceview
public class QRCodeReaderView extends SurfaceView implements SurfaceHolder.Callback,Camera.PreviewCallback {
public interface OnQRCodeReadListener {
public void onQRCodeRead(String text, PointF[] points);
public void cameraNotFound();
public void QRCodeNotFoundOnCamImage();
}
private OnQRCodeReadListener mOnQRCodeReadListener;
private static final String TAG = QRCodeReaderView.class.getName();
private QRCodeReader mQRCodeReader;
private int mPreviewWidth;
private int mPreviewHeight;
private SurfaceHolder mHolder;
private CameraManager mCameraManager;
public QRCodeReaderView(Context context) {
super(context);
init();
}
public QRCodeReaderView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public void setOnQRCodeReadListener(OnQRCodeReadListener onQRCodeReadListener) {
mOnQRCodeReadListener = onQRCodeReadListener;
}
public CameraManager getCameraManager() {
return mCameraManager;
}
#SuppressWarnings("deprecation")
private void init() {
if (checkCameraHardware(getContext())){
mCameraManager = new CameraManager(getContext());
mHolder = this.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // Need to set this flag despite it's deprecated
} else {
Log.e(TAG, "Error: Camera not found");
mOnQRCodeReadListener.cameraNotFound();
}
}
/****************************************************
* SurfaceHolder.Callback,Camera.PreviewCallback
****************************************************/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
// Indicate camera, our View dimensions
mCameraManager.openDriver(holder,this.getWidth(),this.getHeight());
} catch (IOException e) {
Log.w(TAG, "Can not openDriver: "+e.getMessage());
mCameraManager.closeDriver();
}
try {
mQRCodeReader = new QRCodeReader();
mCameraManager.startPreview();
} catch (Exception e) {
Log.e(TAG, "Exception: " + e.getMessage());
mCameraManager.closeDriver();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed");
mCameraManager.getCamera().setPreviewCallback(null);
mCameraManager.getCamera().stopPreview();
mCameraManager.getCamera().release();
mCameraManager.closeDriver();
}
// Called when camera take a frame
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
PlanarYUVLuminanceSource source = mCameraManager.buildLuminanceSource(data, mPreviewWidth, mPreviewHeight);
HybridBinarizer hybBin = new HybridBinarizer(source);
BinaryBitmap bitmap = new BinaryBitmap(hybBin);
try {
Result result = mQRCodeReader.decode(bitmap);
// Notify We're found a QRCode
if (mOnQRCodeReadListener != null) {
// Transform resultPoints to View coordinates
PointF[] transformedPoints = transformToViewCoordinates(result.getResultPoints());
mOnQRCodeReadListener.onQRCodeRead(result.getText(), transformedPoints);
}
} catch (ChecksumException e) {
Log.d(TAG, "ChecksumException");
e.printStackTrace();
} catch (NotFoundException e) {
// Notify QR not found
if (mOnQRCodeReadListener != null) {
mOnQRCodeReadListener.QRCodeNotFoundOnCamImage();
}
} catch (FormatException e) {
Log.d(TAG, "FormatException");
e.printStackTrace();
} finally {
mQRCodeReader.reset();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged");
if (mHolder.getSurface() == null){
Log.e(TAG, "Error: preview surface does not exist");
return;
}
//preview_width = width;
//preview_height = height;
mPreviewWidth = mCameraManager.getPreviewSize().x;
mPreviewHeight = mCameraManager.getPreviewSize().y;
mCameraManager.stopPreview();
mCameraManager.getCamera().setPreviewCallback(this);
mCameraManager.getCamera().setDisplayOrientation(90); // Portrait mode
mCameraManager.startPreview();
}
/**
* Transform result to surfaceView coordinates
*
* This method is needed because coordinates are given in landscape camera coordinates.
* Now is working but transform operations aren't very explained
*
* TODO re-write this method explaining each single value
*
* #return a new PointF array with transformed points
*/
private PointF[] transformToViewCoordinates(ResultPoint[] resultPoints) {
PointF[] transformedPoints = new PointF[resultPoints.length];
int index = 0;
if (resultPoints != null){
float previewX = mCameraManager.getPreviewSize().x;
float previewY = mCameraManager.getPreviewSize().y;
float scaleX = this.getWidth()/previewY;
float scaleY = this.getHeight()/previewX;
for (ResultPoint point :resultPoints){
PointF tmppoint = new PointF((previewY- point.getY())*scaleX, point.getX()*scaleY);
transformedPoints[index] = tmppoint;
index++;
}
}
return transformedPoints;
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
}
else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)){
// this device has a front camera
return true;
}
else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)){
// this device has any camera
return true;
}
else {
// no camera on this device
return false;
}
}
}
A Surface is part of a producer-consumer buffer queue arrangement. Your application is on the producer end, and for a SurfaceView the system compositor (SurfaceFlinger) is on the consumer end.
A surface can have only one producer at a time. You've established the camera preview as the producer, so it's not possible to also connect a Canvas to perform drawing. You're not seeing failures because you're using new Canvas to create a Canvas in a vacuum -- it's not connected to anything. (Normally you'd use Surface#lockCanvas() to get the Canvas associated with the Surface.)
The surface is a completely separate layer, composited behind everything else by default, which means you can draw on top of it with a custom View. I don't think you need an additional view object though -- I believe you can do it with the 'view' part of the SurfaceView itself, which should have a transparent background. See the "custom drawing" documentation.
If you want to get fancy you can feed the camera preview to OpenGL ES, but that's probably excessive for what you need. (Some examples here.) Also, if you want to learn more about the Android graphics architecture, see this document.

Android: unresponsive thread when a new activity for result is started

I am new to Android so I am making a coloring book app just to get myself acquainted to android programming. I have looked up my problem extensively and implemented the solutions but still no progress.
I have an activity 'ColoringActivity' which calls a class 'PaintView' which extends surfaceview. I am trying to update the canvas in a separate thread. I also have a button in the layout which takes the user to another activity for picking colors. The problem is that when the user returns after choosing the color, the canvas becomes empty and I cant draw on the canvas anymore. I think i somehow loose the thread in between activities and although the thread is running in the background, I have no access to it.
I read on this forum that I must implement pause() and resume() methods in the thread class and basically kill the thread when I go to another activity and restart it when I return. Also I read I have to override onPause() and onResume() method in activity class and construct the surfaceview in onResume() so that it is constructed every time user returns to this activity.
I am sorry if it doesnt make much sense because I am lost as well.
My 'ColoringActivity':
package com.ali.coloryourself;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
public class ColoringActivity extends Activity {
private static final int COLOR_REQUEST_CODE = 100;
public static String file;
public static Bitmap bitmap;
BitmapFactory.Options options;
PaintView paintView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_draw);
Intent intent = getIntent();
file = intent.getStringExtra("fileName");
// paintView = (PaintView) findViewById(R.id.drawingSurface);
}
#Override
protected void onResume() {
paintView = (PaintView) findViewById(R.id.drawingSurface);
paintView.getThread().resume();
super.onResume();
}
#Override
protected void onPause() {
paintView.getThread().pause();
super.onPause();
}
public void pickColor(View v) {
paintView.getThread().pause();
Intent colorIntent = new Intent(this, ColorPickerActivity.class);
startActivityForResult(colorIntent, COLOR_REQUEST_CODE);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode != RESULT_CANCELED) {
if (requestCode == COLOR_REQUEST_CODE) {
int color = data.getIntExtra("Color", -1);
// paintView.getPaint().setColor(color);
}
}
}
}
My 'PaintView' class:
package com.ali.coloryourself;
import android.R.color;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
class PaintView extends SurfaceView implements SurfaceHolder.Callback {
private Paint paint = new Paint();
private Canvas canvas;
private PaintThread thread;
private Path path = new Path();
private Bitmap bitmap;
public PaintView(Context context, AttributeSet attrs) {
super(context, attrs);
SurfaceHolder holder = getHolder();
holder.addCallback(this);
paint.setColor(Color.WHITE);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeJoin(Paint.Join.ROUND);
paint.setStrokeCap(Paint.Cap.ROUND);
paint.setStrokeWidth(3);
setThread(new PaintThread(holder));
}
class PaintThread extends Thread {
private boolean mRun;
private SurfaceHolder mSurfaceHolder;
private int mMode;
public static final int STATE_PAUSE = 2;
public static final int STATE_RUNNING = 4;
public PaintThread(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
}
#Override
public void run() {
while (mRun) {
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (mMode == STATE_RUNNING) {
if (bitmap == null) {
bitmap = Bitmap.createBitmap(1, 1,
Bitmap.Config.ARGB_8888);
}
}
doDraw(canvas);
canvas.drawBitmap(bitmap, 0, 0, null);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
}
private void doDraw(Canvas canvas) {
canvas.drawPath(path, paint);
}
public void setRunning(boolean b) {
mRun = b;
}
public void pause() {
if (mMode == STATE_RUNNING)
setState(STATE_PAUSE);
}
public void resume() {
setState(STATE_RUNNING);
}
public void setState(int mode) {
synchronized (mSurfaceHolder) {
mMode = mode;
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
// Log.d("Touch", "I am touching");
float eventX = event.getX();
float eventY = event.getY();
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
path.moveTo(eventX, eventY);
return true;
case MotionEvent.ACTION_MOVE:
path.lineTo(eventX, eventY);
break;
case MotionEvent.ACTION_UP:
// nothing to do
break;
default:
return false;
}
return true;
}
public void surfaceCreated(SurfaceHolder holder) {
if (getThread().getState() == Thread.State.NEW) {
getThread().setRunning(true);
getThread().start();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
boolean retry = true;
getThread().setRunning(false);
getThread().resume();
while (retry) {
try {
getThread().join();
retry = false;
} catch (InterruptedException e) {
}
}
}
public Paint getPaint() {
return paint;
}
public void setPaint(int color) {
this.paint.setColor(color);
}
public PaintThread getThread() {
return thread;
}
public void setThread(PaintThread thread) {
this.thread = thread;
}
}
my 'activity_draw.xml'
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<Button
android:id="#+id/button1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_centerHorizontal="true"
android:layout_marginTop="10dp"
android:onClick="pickColor"
android:text="Pick Color" />
<com.ali.coloryourself.PaintView
android:id="#+id/drawingSurface"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="#+id/button1"
android:layout_marginTop="10dp"/>
</RelativeLayout>
I know I am missing some very basic thread concept. I need to allow the user to pick color and return and be able to continue drawing. I will be extremely grateful for your help.
I think I have found an answer although I am not sure if it is a good programming practice. I found out that my surface view and thread were created once 'ColoringActivity' was created and destroyed every time 'ColoringActivity' went to background. But once 'ColoringActivity' was restarted and resumed, surface view and thread were not recreated. So i moved the following line
setContentView(R.layout.activity_draw);
to onResume() method and now I can draw on the canvas every time. Now I just need to save the canvas and re load it when the activity comes back on to start off the coloring where the user left it.

SurfaceView not drawing bitmaps anymore in Android 4.1+

In order to display a view with moving objects (from bitmaps) and touch events, I've been using the following code for a SurfaceView in Android. It has alwas worked fine on my development devices, but it turned out that lots of users just see a black box in place of that View. After quite a long time of (unsuccessful) debugging, I've come to the conclusion that it must be Android 4.1 which causes the SurfaceView to stop working correctly.
My development devices are Android 4.0 but users complaining about the black-only SurfaceView have Android 4.1. Checked that with a Android 4.1 emulator - and it's not working there, either.
Can you see what is wrong with the code? Is it caused by the "Project Butter" things in Android 4.1, perhaps?
Of course, I've checked that the Bitmap objects are valid (saved them to SD card in appropriate lines) and all methods for drawing are periodically called as well - everything's normal there.
package com.my.package.util;
import java.util.ArrayList;
import java.util.List;
import com.my.package.Card;
import com.my.package.MyApp;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MySurface extends SurfaceView implements SurfaceHolder.Callback {
private MyRenderThread mRenderThread;
private volatile List<Card> mGameObjects;
private volatile int mGameObjectsCount;
private int mScreenWidth;
private int mScreenHeight;
private int mGameObjectWidth;
private int mGameObjectHeight;
private int mHighlightedObject = -1;
private Paint mGraphicsPaint;
private Paint mShadowPaint;
private Rect mDrawingRect;
private int mTouchEventAction;
private Bitmap bitmapToDraw;
private int mOnDrawX1;
private BitmapFactory.Options bitmapOptions;
// ...
public MySurface(Context activityContext, AttributeSet attributeSet) {
super(activityContext, attributeSet);
getHolder().addCallback(this);
setFocusable(true); // touch events should be processed by this class
mGameObjects = new ArrayList<Card>();
mGraphicsPaint = new Paint();
mGraphicsPaint.setAntiAlias(true);
mGraphicsPaint.setFilterBitmap(true);
mShadowPaint = new Paint();
mShadowPaint.setARGB(160, 20, 20, 20);
mShadowPaint.setAntiAlias(true);
bitmapOptions = new BitmapFactory.Options();
bitmapOptions.inInputShareable = true;
bitmapOptions.inPurgeable = true;
mDrawingRect = new Rect();
}
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { }
public void surfaceCreated(SurfaceHolder arg0) {
mScreenWidth = getWidth();
mScreenHeight = getHeight();
mGameObjectHeight = mScreenHeight;
mGameObjectWidth = mGameObjectHeight*99/150;
mCurrentSpacing = mGameObjectWidth;
setDrawingCacheEnabled(true);
mRenderThread = new MyRenderThread(getHolder(), this);
mRenderThread.setRunning(true);
mRenderThread.start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
boolean retry = true;
mRenderThread.setRunning(false); // stop thread
while (retry) { // wait for thread to close
try {
mRenderThread.join();
retry = false;
}
catch (InterruptedException e) { }
}
}
public void stopThread() {
if (mRenderThread != null) {
mRenderThread.setRunning(false);
}
}
#Override
public void onDraw(Canvas canvas) {
if (canvas != null) {
synchronized (mGameObjects) {
mGameObjectsCount = mGameObjects.size();
canvas.drawColor(Color.BLACK);
if (mGameObjectsCount > 0) {
mCurrentSpacing = Math.min(mScreenWidth/mGameObjectsCount, mGameObjectWidth);
for (int c = 0; c < mGameObjectsCount; c++) {
if (c != mHighlightedObject) {
try {
drawGameObject(canvas, mGameObjects.get(c).getDrawableID(), false, c*mCurrentSpacing, c*mCurrentSpacing+mGameObjectWidth);
}
catch (Exception e) { }
}
}
if (mHighlightedObject > -1) {
mOnDrawX1 = Math.min(mHighlightedObject*mCurrentSpacing, mScreenWidth-mGameObjectWidth);
try {
drawGameObject(canvas, mGameObjects.get(mHighlightedObject).getDrawableID(), true, mOnDrawX1, mOnDrawX1+mGameObjectWidth);
}
catch (Exception e) { }
}
}
}
}
}
private void drawGameObject(Canvas canvas, int resourceID, boolean highlighted, int xLeft, int xRight) {
if (canvas != null && resourceID != 0) {
try {
if (highlighted) {
canvas.drawRect(0, 0, mScreenWidth, mScreenHeight, mShadowPaint);
}
bitmapToDraw = MyApp.gameObjectCacheGet(resourceID);
if (bitmapToDraw == null) {
bitmapToDraw = BitmapFactory.decodeResource(getResources(), resourceID, bitmapOptions);
MyApp.gameObjectCachePut(resourceID, bitmapToDraw);
}
mDrawingRect.set(xLeft, 0, xRight, mGameObjectHeight);
canvas.drawBitmap(bitmapToDraw, null, mDrawingRect, mGraphicsPaint);
}
catch (Exception e) { }
}
}
#Override
public boolean onTouchEvent(MotionEvent event) {
synchronized (mRenderThread.getSurfaceHolder()) { // synchronized so that there are no concurrent accesses
mTouchEventAction = event.getAction();
if (mTouchEventAction == MotionEvent.ACTION_DOWN || mTouchEventAction == MotionEvent.ACTION_MOVE) {
if (event.getY() >= 0 && event.getY() < mScreenHeight) {
mTouchEventObject = (int) event.getX()/mCurrentSpacing;
if (mTouchEventObject > -1 && mTouchEventObject < mGameObjectsCount) {
mHighlightedObject = mTouchEventObject;
}
else {
mHighlightedObject = -1;
}
}
else {
mHighlightedObject = -1;
}
}
else if (mTouchEventAction == MotionEvent.ACTION_UP) {
if (mActivityCallback != null && mHighlightedObject > -1 && mHighlightedObject < mGameObjectsCount) {
try {
mActivityCallback.placeObject(mGameObjects.get(mHighlightedObject));
}
catch (Exception e) { }
}
mHighlightedObject = -1;
}
}
return true;
}
// ...
}
And this is the code for the thread that periodically calls the SurfaceView's onDraw():
package com.my.package.util;
import android.graphics.Canvas;
import android.view.SurfaceHolder;
public class MyRenderThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private MySurface mSurface;
private boolean mRunning = false;
public MyRenderThread(SurfaceHolder surfaceHolder, MySurface surface) {
mSurfaceHolder = surfaceHolder;
mSurface = surface;
}
public SurfaceHolder getSurfaceHolder() {
return mSurfaceHolder;
}
public void setRunning(boolean run) {
mRunning = run;
}
#Override
public void run() {
Canvas c;
while (mRunning) {
c = null;
try {
c = mSurfaceHolder.lockCanvas(null);
synchronized (mSurfaceHolder) {
if (c != null) {
mSurface.onDraw(c);
}
}
}
finally { // when exception is thrown above we may not leave the surface in an inconsistent state
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
The SurfaceView is included in my Activity's layout XML:
<com.my.package.util.MySurface
android:id="#+id/my_surface"
android:layout_width="fill_parent"
android:layout_height="#dimen/my_surface_height" />
Then in code it is accessed like this:
MySurface mySurface = (MySurface) findViewById(R.id.my_surface);
Rename your draw method to onDraw2(). Change the thread code to call onDraw2. This way you are not overidding the base class's ondraw. I think you might be getting 2 hits in your onDraw. One from the base class override and one from the thread.
This would explain why setting the z-order helps. You will reverse the order the 2 windows draw therefore avoiding the problem. As to the "why now" part of the question. Since you have the 2 pathways to onDraw I suspect this is unsupported android behavior, so no telling what might happen.
Also I saw you called setDrawingCache enabled. I don't think that is helping you. Usually you would call getDrawingCache at some point. Try removing it if it is not important.
The only other thing I see is that you create the thread and pass the holder in surface created. You might want to take action when surfaceChanged occurs, or at
Least verify that nothing important has changed.

Basic surfaceView stuff - learning Android SDK

It's a bit cheeky - but I was wondering if anyone could tell me what's wrong below.
This is messing around trying to understand android - not "real" code.
It's a surfaceView which is laid out in the main activity layout.
It works - until the phone's "off" button is tapped (sleep) and woken up again. Upon waking up, it goes crazy and android produces a "Force Close" diaglog.
I've been trying to follow the path with LogCat, but for some reason, some messages get dropped - OR - the path I think is being followed, isn't.
eg - on putting the phone to sleep, I will get surfaceDestroyed called (seems reasonable) but on waking, I do not get a surfaceCreated().
The basic logic is: the surfaceView creates a thread which paints the system time in seconds as text. That's it.
I've got a real app I'd like to write - but until I really understand the basics, that won't happen. I've been through a fair number of tutorials too.
Any pointers most gratefully recieved :)
Cheers
Tim
package net.dionic.android.bouncingsquid;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.SystemClock;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.lang.System;
public class WidgetSeconds extends SurfaceView implements SurfaceHolder.Callback {
private class CanvasThread extends Thread {
private SurfaceHolder _surfaceHolder;
private WidgetSeconds _surfaceView;
private boolean _run = false;
public CanvasThread(SurfaceHolder surfaceHolder, WidgetSeconds surfaceView) {
Log.i("WidgetSecs.CanvasThread", "constructor");
_surfaceHolder = surfaceHolder;
_surfaceView = surfaceView;
}
public void setRunning(boolean run) {
_run = run;
}
#Override
public void run() {
Canvas c;
while (_run) {
c = null;
try {
c = _surfaceHolder.lockCanvas(null);
synchronized (_surfaceHolder) {
_surfaceView.onDraw(c);
try {
Thread.sleep(200);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} finally {
// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (c != null) {
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
private CanvasThread canvasthread;
public void Initalise() {
Log.i("WidgetSecs", "Initialise");
}
public WidgetSeconds(Context context, AttributeSet attrs) {
super(context, attrs);
Log.i("WidgetSecs", "constructor");
this.Initalise();
getHolder().addCallback(this);
setFocusable(true);
}
#Override
public void onDraw(Canvas canvas) {
Paint textPaint;
canvas.drawColor(Color.GRAY);
textPaint = new Paint();
textPaint.setTextSize(32);
canvas.drawText(System.currentTimeMillis()/1000 + " S", 10, 50, textPaint);
canvas.restore();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i("WidgetSecs", "surfaceChanged");
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i("WidgetSecs", "surfaceCreated");
Log.i("WidgetSecs.CanvasThread", "Thread create");
canvasthread = new CanvasThread(getHolder(), this);
canvasthread.setRunning(true);
canvasthread.start();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i("WidgetSecs", "surfaceDestroyed");
boolean retry = true;
while (retry) {
try {
Log.i("WidgetSecs", "Thread destroyed");
canvasthread.join();
canvasthread = null;
retry = false;
} catch (InterruptedException e) {
Log.i("WidgetSecs", "Thread join failed");
// we will try it again and again...
}
}
}
}

Categories

Resources