How to capture screenshot of camera preview with overlay? - android

I saw the following link for the above query but both the answers does not met the expectation.
[How to programmatically take a screenshot in Android?
Expectation: Capture camera preview with overlay.

I provided solution to a folk, please check this answer. He had trouble to get the result but copied code below is what I'm using in production since a year ago. please try it.
The code captures image in SurfaceView which is given from Camera. You can overlay some views on it. They will be captured along with Camera preview.
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraSurfaceView";
private SurfaceHolder mSurfaceHolder;
private Camera mCamera = null;
private Bitmap mBitmap;
private Context mContext;
private Camera.Parameters mParameters;
private byte[] byteArray;
private List<Camera.Size> mSupportedPreviewSizes;
private Camera.Size mPreviewSize;
public CameraSurfaceView (Context context) {
this(context, null);
}
public CameraSurfaceView (Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraSurfaceView (Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
try {
mSurfaceHolder = getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void surfaceCreated(final SurfaceHolder surfaceHolder) {
if (mCamera == null) {
try {
mCamera = Camera.open();
} catch (RuntimeException ignored) {
}
}
try {
if (mCamera != null) {
WindowManager winManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mCamera.setPreviewDisplay(mSurfaceHolder);
}
} catch (Exception e) {
if (mCamera != null)
mCamera.release();
mCamera = null;
}
if (mCamera == null) {
return;
} else {
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
if (mParameters == null)
{
return;
}
byteArray = bytes;
}
});
}
setWillNotDraw(false);
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
try {
mParameters = mCamera.getParameters();
List<Size> cameraSize = mParameters.getSupportedPreviewSizes();
mPreviewSize = cameraSize.get(0);
for (Size s : cameraSize) {
if ((s.width * s.height) > (mPreviewSize.width * mPreviewSize.height)) {
mPreviewSize = s;
}
}
mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
} catch (Exception e) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
public Bitmap getBitmap() {
try {
if (mParameters == null)
return null;
if (mPreviewSize == null)
return null;
int format = mParameters.getPreviewFormat();
YuvImage yuvImage = new YuvImage(byteArray, format, mPreviewSize.width, mPreviewSize.height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, mPreviewSize.width, mPreviewSize.height);
yuvImage.compressToJpeg(rect, 75, byteArrayOutputStream);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPurgeable = true;
options.inInputShareable = true;
mBitmap = BitmapFactory.decodeByteArray(byteArrayOutputStream.toByteArray(), 0, byteArrayOutputStream.size(), options);
byteArrayOutputStream.flush();
byteArrayOutputStream.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
return mBitmap;
}
public Camera getCamera() {
return mCamera;
}
}

Related

Stream Camera Preview and display preview as Bitmap in ImageView

I need to convert camera stream to Bitmap and display it into an ImageView. Here is the code for CameraPreviewActivity:
public class CameraPreviewActivity extends Activity {
private MyCamera myCamera;
private Camera camera;
private ImageView camView;
private Runnable r;
private Bitmap bm;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myCamera = new MyCamera();
myCamera.startCamera();
camView = (ImageView) findViewById(R.id.camViewId);
setContentView(R.layout.camera_preview_layout);
getCameraFrameView();
}
private void getCameraFrameView() {
r = new Runnable() {
#Override
public void run() {
bm = myCamera.getCameraFrameBitmap();
if(bm != null) {
runOnUiThread(new Runnable() {
#Override
public void run() {
camView.setImageBitmap(bm);
}
});
}
}
};
r.run();
}
}
and here is the Class that Initialize and run the camera preview:
public class MyCamera {
private Camera mCamera = null;
private SurfaceTexture st = new SurfaceTexture(1);
private Bitmap cameraFrameBitmap;
private Camera.Parameters parameters;
private Camera.Size previewSize;
public void startCamera() {
Log.d("MyCamera", "Start Camera Initialized");
mCamera = Camera.open();
setupCameraParams();
try{
mCamera.setPreviewTexture(st);
mCamera.startPreview();
Log.d("MyCamera", "Camera Started Preview");
} catch (Exception e) {
e.printStackTrace();
}
}
private void setupCameraParams() {
if(mCamera != null) {
Log.d("MyCamera", "Setup Camera Params Started");
parameters = mCamera.getParameters();
List<Camera.Size> cameraSize = parameters.getSupportedPreviewSizes();
previewSize = cameraSize.get(0);
for(Camera.Size s : cameraSize) {
if((s.width * s.height) > (previewSize.height * previewSize.width)) {
previewSize = s;
}
}
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(previewCallback);
Log.d("MyCamera", "Setup Camera Done");
} else {
return;
}
}
private Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(parameters == null && previewSize == null) {
return;
}
try{
int format = parameters.getPreviewFormat();
YuvImage yuvImage = new YuvImage(data, format, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, previewSize.width, previewSize.height);
yuvImage.compressToJpeg(rect, 100, baos);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPurgeable = true;
options.inInputShareable = true;
cameraFrameBitmap = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.size(), options);
baos.flush();
baos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
};
public Bitmap getCameraFrameBitmap() {
return cameraFrameBitmap;
}
}
Is this the right way to do it? or is there another way to stream the camera preview into ImageView as bitmap? My app can't display the frame. I think there might be something wrong when I tried running the Runnable. I am not used to Thread and Runnable.

Camera Flash with OpenCV3.0.0 Android not working

I'm writing a code using opencv video streaming, and i want to open the camera flash when first opening the camera. i have implemented a new class that is extended from JavaCameraView but the app crashes when i cal this line:
mOpencvCameraView.setEffect(Camera.Parameters.FLASH_MODE_ON);
the Class is As the Following:
My Code
public class CameraCustomize extends JavaCameraView implements Camera.PictureCallback {
private String mPictureFileName;
public CameraCustomize(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedFlashModes();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getFlashMode() != null);
}
public String getEffect() {
return mCamera.getParameters().getFlashMode();
}
public void setEffect(String effect) {
mCamera.getParameters();// here where i get the Error
Camera.Parameters params = mCamera.getParameters();
params.setFlashMode(effect);
mCamera.setParameters(params);
}
public List<Camera.Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(int w, int h) {
disconnectCamera();
mMaxHeight = h;
mMaxWidth = w;
connectCamera(getWidth(), getHeight());
}
public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
public void cameraRelease() {
if(mCamera != null){
mCamera.release();
}
}
}
Please any one can help me, or suggest another way to start the Camera Flash.
Thank you in Advanced.
I found the solution by applying this class from this link Using Camera LED Flash with OpenCV on Android
it's just worked perfectly :)
Here is the Class that i've implemented:
private static final String TAG = "Sample::Tutorial2View";
private Context my reference;
private static boolean isFlashLightON = false;
public Tutorial2View(Context context, AttributeSet attrs) {
super(context, attrs);
this.myreference = context;
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
// Setup the camera
public void setupCameraFlashLight() {
Camera camera = mCamera;
if (camera != null) {
Parameters params = camera.getParameters();
if (params != null) {
if (isFlashLightON) {
isFlashLightON = false;
params.setFlashMode(Parameters.FLASH_MODE_OFF);
camera.setParameters(params);
camera.startPreview();
} else {
isFlashLightON = true;
params.setFlashMode(Parameters.FLASH_MODE_TORCH);
camera.setParameters(params);
camera.startPreview();
}
}
}
}

Android Camera SurfaceView take picture

I want to get a bitmap captured by calling a function from camera surface view class, but I always get error like this:
Any help will be appreciated~
java.lang.NullPointerException
at com.etoff.appsopengl.CameraSurfaceView.setCapture(CameraSurfaceView.java:58)
at com.etoff.appsopengl.Stage$MyRenderer.onDrawFrame(Stage.java:168)
at android.opengl.GLSurfaceView$GLThread.guardedRun(GLSurfaceView.java:1467)
at android.opengl.GLSurfaceView$GLThread.run(GLSurfaceView.java:1221)
this is the Stage class coding the calling coding:
public class Stage extends GLSurfaceView {
CameraSurfaceView csv;
Bitmap imgB;
public Stage(Context context, AttributeSet attrs) {
super(context, attrs);
csv = new CameraSurfaceView(context);
}
//inside renderer I call the function
if(c==true){
csv.setCapture();
imgB = csv.getBitmap();
}
}
this is the CameraSurfaceView class coding:
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
Camera mCamera;
boolean isPreviewRunning = false;
Bitmap mBitmap;
CameraSurfaceView(Context context) {
super(context);
SurfaceHolder mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
synchronized(this) {
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
Log.e("Camera", "mCamera.setPreviewDisplay(holder);");
}
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(this) {
try {
if (mCamera!=null) {
mCamera.stopPreview();
isPreviewRunning=false;
mCamera.release();
}
} catch (Exception e) {
Log.e("Camera", e.getMessage());
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
}
public void setCapture(){
mCamera.takePicture(null,null,mPicture);
}
public Bitmap getBitmap(){
return mBitmap;
}
private Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
BitmapFactory.Options opt = new BitmapFactory.Options();
opt.inMutable = true;
mBitmap = BitmapFactory.decodeByteArray(data, 0, data.length, opt);
}
};
}
Use this code to get your back camera id
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
Log.d(DEBUG_TAG, "Camera found");
cameraId = i;
break;
}
}
return cameraId;
}
And use it like this
// do we have a camera?
if (!getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
Toast.makeText(this, "No camera on this device", Toast.LENGTH_LONG)
.show();
} else {
int cameraId = findFrontFacingCamera();
if (cameraId < 0) {
Toast.makeText(this, "No front facing camera found.",
Toast.LENGTH_LONG).show();
} else {
mCamera = Camera.open(cameraId);
}
}

Android camera capture bitmap

In my android coding I have a camera surface view class, when I show the preview for this camera class it's a live camera scene, but how can I create a function that create a bitmap for the camera current frame?
So I can get the captured bitmap from other class.
Any guidance will be appreciated~
This is my camera class coding:
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
Camera mCamera;
boolean isPreviewRunning = false;
CameraSurfaceView(Context context) {
super(context);
SurfaceHolder mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
synchronized(this) {
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
Log.e("Camera", "mCamera.setPreviewDisplay(holder);");
}
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(this) {
try {
if (mCamera!=null) {
mCamera.stopPreview();
isPreviewRunning=false;
mCamera.release();
}
} catch (Exception e) {
Log.e("Camera", e.getMessage());
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
}
}
Note that below codes are for capturing image with its displayed pixels, not for taking camera. It means, the result image will give actual pixel size of the screen (ie, 1080x768), not multi-megapixel high resolution image of the Camera app. If you want to take an image as Camera app provided, use takePicture method.
In order to capture live preview image in SurfaceView,
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraSurfaceView";
private SurfaceHolder mSurfaceHolder;
private Camera mCamera = null;
private Bitmap mBitmap;
private Context mContext;
private Camera.Parameters mParameters;
private byte[] byteArray;
private List<Camera.Size> mSupportedPreviewSizes;
private Camera.Size mPreviewSize;
public CameraSurfaceView (Context context) {
this(context, null);
}
public CameraSurfaceView (Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraSurfaceView (Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
try {
mSurfaceHolder = getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void surfaceCreated(final SurfaceHolder surfaceHolder) {
if (mCamera == null) {
try {
mCamera = Camera.open();
} catch (RuntimeException ignored) {
}
}
try {
if (mCamera != null) {
WindowManager winManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mCamera.setPreviewDisplay(mSurfaceHolder);
}
} catch (Exception e) {
if (mCamera != null)
mCamera.release();
mCamera = null;
}
if (mCamera == null) {
return;
} else {
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
if (mParameters == null)
{
return;
}
byteArray = bytes;
}
});
}
setWillNotDraw(false);
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
try {
mParameters = mCamera.getParameters();
List<Size> cameraSize = mParameters.getSupportedPreviewSizes();
mPreviewSize = cameraSize.get(0);
for (Size s : cameraSize) {
if ((s.width * s.height) > (mPreviewSize.width * mPreviewSize.height)) {
mPreviewSize = s;
}
}
mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
} catch (Exception e) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
public Bitmap getBitmap() {
try {
if (mParameters == null)
return null;
if (mPreviewSize == null)
return null;
int format = mParameters.getPreviewFormat();
YuvImage yuvImage = new YuvImage(byteArray, format, mPreviewSize.width, mPreviewSize.height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, mPreviewSize.width, mPreviewSize.height);
yuvImage.compressToJpeg(rect, 75, byteArrayOutputStream);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPurgeable = true;
options.inInputShareable = true;
mBitmap = BitmapFactory.decodeByteArray(byteArrayOutputStream.toByteArray(), 0, byteArrayOutputStream.size(), options);
byteArrayOutputStream.flush();
byteArrayOutputStream.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
return mBitmap;
}
public Camera getCamera() {
return mCamera;
}
}

java.lang.RuntimeException: Method call after release()

i have an android application which has two camera activities. they switch time to time in between them. after switching several times first activity throws this exception.. any idea why this happen?
how to fix it? Please help me.thank you for reading my question and have a nice day!
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "LogcatActivity";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private int frameNumber=1;//my
public SampleViewBase(Context context) {///
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
//--
List<String> flashing_methords=params.getSupportedFlashModes();
params.setFlashMode(flashing_methords.get(3));
// List<String> color_effects=params.getSupportedColorEffects();
// params.setColorEffect(color_effects.get(2));
//--
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
try{
mCamera.reconnect();
mCamera = Camera.open();
mCamera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
}
/* if((frameNumber%120)==0){
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
frameNumber=1;
}
}else{
frameNumber++;
}*/
}
});
(new Thread(this)).start();
}catch (Exception e) {
Log.v(TAG, "reconnect error" + e);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
protected abstract Bitmap processFrame(byte[] data);
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
public void releaseAll() {
Log.i(TAG, "hardweare released");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
}
Try to release your camera resources from your activity after you override the back key.
#Override
public void onBackPressed()
{
releaseCamera();
super.onBackPressed();
}
private void releaseCamera()
{
synchronized (this)
{
if(mCamera!=null)
{
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
add the method releaseCamera() to your SurfaceView and make the call from your activity when user clicks back button.
the mCamera.release() function means to disconnect and release the Camera object resources. which performs a job identical to your next mCamera = null; statement. Why would you do that?

Categories

Resources