Take photo in service using Camera2 API - android

I am using the camera2 API. I need to take a photo in the service without a preview. It works, but the photos have a bad exposure. The pictures are very dark or sometimes very light. How can I fix my code so that the photos are of high quality? I'm using the front camera.
public class Camera2Service extends Service
{
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigured(CameraCaptureSession session) {
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
processImage(img);
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation != CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
#Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
#Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}

Sergey, I copied your code and indeed I was able to reproduce the issue. I got totally black pictures out of Google Pixel 2 (Android 8.1).
However, I have successfully resolved the black-pic issue as follows:
First, in case anyone is wondering, you actually do NOT need any Activity, or any preview UI element as many other threads about the Camera API claim! That used to be true for the deprecated Camera v1 API. Now, with the new Camera v2 API, all I needed was a foreground service.
To start the capturing process, I used this code:
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest (CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
builder.set (CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
builder.set (CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
builder.addTarget (imageReader.getSurface ());
captureRequest = builder.build ();
Then, in ImageReader.onImageAvailable, I skipped the first N pictures (meaning I did not save them). I let the session run, and capture more pics without saving them.
That gave the camera enough time to automatically gradually adjust the exposition parameters. Then, after N ignored photos, I saved a photo, which was normally exposed, not black at all.
The value of the N constant will depend on characteristics of your hardware. So you will need to determine the ideal value of N experimentally for your hardware. You can also use histogram-based heuristic automation. At the beginning of experiments, don't be afraid to start saving only after hundreds of milliseconds of calibration have passed.
Finally, in a lot of similar threads people suggest to just wait e.g. 500 ms after creating the session and only then taking a single picture. That does not help. One really has to let the camera run and let it take many pictures rapidly (at the fastest rate possible). For that, simply use the setRepeatingRequest method (as in your original code).
Hope this helps. :)
EDITED TO ADD: When skipping the initial N pictures, you need to call the acquireLatestImage method of ImageReader for each of those skipped pictures too. Otherwise, it won't work.
Full original code with my changes incorporated that resolved the issue, tested and confirmed as working on Google Pixel 2, Android 8.1:
public class Camera2Service extends Service
{
protected static final int CAMERA_CALIBRATION_DELAY = 500;
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected static long cameraCaptureStartTime;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
cameraCaptureStartTime = System.currentTimeMillis ();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigured(CameraCaptureSession session) {
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if (System.currentTimeMillis () > cameraCaptureStartTime + CAMERA_CALIBRATION_DELAY) {
processImage(img);
}
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
#Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
#Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}

Related

Not taking picture in background using textureview and camera2 api

Here is my Camera2Manger code
i have added permissions in manifist file and take permissions in run time but still this code is not working i am calling this class in my other class like
Camera2Manager camera2Manager = new Camera2Manager(mTextureView,GestureSelfUnlockActivity.this);
camera2Manager.initCamera();
and my Manager class is below i have checked code surfaceviewlistener is not working anyone can help me out thanks
public class Camera2Manager {
private static final int SETIMAGE = 1;
private static final int MOVE_FOCK = 2;
private TextureView mTextureView;
private Context mContext;
private Handler mHandler;
private Handler mUIHandler;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreViewBuidler;
private CameraCaptureSession mCameraSession;
private CameraCharacteristics mCameraCharacteristics;
private Size mPreViewSize;
private Rect maxZoomrect;
private int maxRealRadio;
// camera zoom related
private Rect picRect;
public Camera2Manager(TextureView textureView, Context context) {
mTextureView = textureView;
mContext = context;
}
public void initCamera() {
mUIHandler = new Handler(new InnerCallBack());
mTextureView.setSurfaceTextureListener(mSurfacetextlistener);
// Toast.makeText(mContext, "Caremmaa", Toast.LENGTH_SHORT).show();
}
#SuppressLint("NewApi")
private final ImageReader.OnImageAvailableListener onImageAvaiableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
mHandler.post(new ImageSaver(imageReader.acquireNextImage()));
}
};
private Surface surface;
#SuppressLint("NewApi")
private final CameraDevice.StateCallback cameraOpenCallBack = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
try {
mPreViewBuidler = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
SurfaceTexture texture = mTextureView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreViewSize.getWidth(), mPreViewSize.getHeight());
surface = new Surface(texture);
mPreViewBuidler.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), mSessionStateCallBack, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
}
#Override
public void onError(CameraDevice cameraDevice, int i) {
}
};
#SuppressLint("NewApi")
private final CameraCaptureSession.StateCallback mSessionStateCallBack = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
try {
mCameraSession = cameraCaptureSession;
cameraCaptureSession.setRepeatingRequest(mPreViewBuidler.build(), null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
}
};
private final TextureView.SurfaceTextureListener mSurfacetextlistener = new TextureView
.SurfaceTextureListener() {
#SuppressLint("NewApi")
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
HandlerThread thread = new HandlerThread("Camera2");
thread.start();
mHandler = new Handler(thread.getLooper());
CameraManager manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
String cameraid = CameraCharacteristics.LENS_FACING_FRONT + "";
Toast.makeText(mContext, "Caremmaa", Toast.LENGTH_SHORT).show();
try {
mCameraCharacteristics = manager.getCameraCharacteristics(cameraid);
//The area of ​​the screen sensor, in pixels.
maxZoomrect = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
//Maximum digital zoom
maxRealRadio = mCameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM).intValue();
picRect = new Rect(maxZoomrect);
StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)
), new CompareSizeByArea());
mPreViewSize = map.getOutputSizes(SurfaceTexture.class)[0];
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 5);
mImageReader.setOnImageAvailableListener(onImageAvaiableListener, mHandler);
if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
manager.openCamera(cameraid, cameraOpenCallBack, mHandler);
//Set the monitor for clicking and taking pictures
takePhoto();
} catch (CameraAccessException e) {
ToastUtil.showToast(e.getMessage());
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#SuppressLint("NewApi")
private void takePhoto() {
try {
Toast.makeText(mContext, "Camera started", Toast.LENGTH_SHORT).show();
mCameraSession.setRepeatingRequest(initDngBuilder().build(), null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#SuppressLint("NewApi")
private CaptureRequest.Builder initDngBuilder() {
CaptureRequest.Builder captureBuilder = null;
try {
captureBuilder = mCameraSession.getDevice().createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.addTarget(surface);
// Required for RAW capture
captureBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) ((214735991 - 13231) / 2));
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, (10000 - 100) / 2);//Set ISO, Sensitivity
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, 90);
//set 30 frames per second
CaptureRequest mCaptureRequest = captureBuilder.build();
mCameraSession.capture(mCaptureRequest, null, mHandler); //take a picture
CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
String cameraid = CameraCharacteristics.LENS_FACING_FRONT + "";
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraid);
Range<Integer>[] fps = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fps[fps.length - 1]);
} catch (CameraAccessException | NullPointerException e) {
e.printStackTrace();
}
return captureBuilder;
}
private class ImageSaver implements Runnable {
Image reader;
public ImageSaver(Image reader) {
this.reader = reader;
}
#SuppressLint("NewApi")
#Override
public void run() {
File dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS+"/lock").getAbsoluteFile();
if (!dir.exists()) {
dir.mkdirs();
}
File file = new File(dir, System.currentTimeMillis() + ".jpg");
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file);
ByteBuffer buffer = reader.getPlanes()[0].getBuffer();
byte[] buff = new byte[buffer.remaining()];
buffer.get(buff);
BitmapFactory.Options ontain = new BitmapFactory.Options();
ontain.inSampleSize = 50;
Bitmap bm = BitmapFactory.decodeByteArray(buff, 0, buff.length, ontain);
Message.obtain(mUIHandler, SETIMAGE, bm).sendToTarget();
outputStream.write(buff);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
reader.close();
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
private class InnerCallBack implements Handler.Callback {
#SuppressLint("NewApi")
#Override
public boolean handleMessage(Message message) {
switch (message.what) {
case SETIMAGE:
Bitmap bm = (Bitmap) message.obj;
//preview avatar
break;
case MOVE_FOCK:
mPreViewBuidler.set(CaptureRequest.SCALER_CROP_REGION, picRect);
try {
mCameraSession.setRepeatingRequest(mPreViewBuidler.build(), null,
mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
break;
}
return false;
}
}
#SuppressLint("NewApi")
public static class CompareSizeByArea implements java.util.Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
}
But this code is not working.

I am using an Camera Service but in pop up camera the camera is opened continuously?

Basically i have a camera service which i used to capture the image but in popup camera the camera is openend all the time please help me i have called session.close() also.Below i have pasted my code
and also i have created one service which is running in background continuosly Please help me.I have tried all the alternatives but none of the things worked.
public class OnplusRightService extends Service {
GPSTracker gps;
Intent cameraIntent;
protected static final String TAG = "ASA";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_FRONT;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.i(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
Log.i(TAG, "CameraCaptureSession.StateCallback onConfigured");
OnplusRightService.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "onImageAvailable");
for (int i = 0; i < 1; i++) {
Image img = reader.acquireLatestImage();
if (img != null) {
try {
processImage(img);
} catch (CameraAccessException e) {
e.printStackTrace();
}
img.close();
}
}
stopSelf();
}
};
public void readyCamera() {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 1 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.i(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
}
/**
* Return the Camera Id which matches the field CAMERACHOICE.
*/
public String getCamera(CameraManager manager) {
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i(TAG, "onStartCommand flags " + flags + " startId " + startId);
gps = new GPSTracker(OnplusRightService.this);
cameraIntent = intent;
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
public void actOnReadyCameraDevice() {
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onDestroy() {
/*try {
session.abortCaptures();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}*/
session.close();
Intent intent = new Intent("custom-event-name");
// You can also include some extra data.
intent.putExtra("message", "This is my message!");
LocalBroadcastManager.getInstance(this).sendBroadcast(intent);
super.onDestroy();
}
private void processImage(Image image) throws CameraAccessException {
final ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
// session.abortCaptures();
saveImageToDisk(bytes);
session.abortCaptures();
image.close();
// session.abortCaptures();
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}

Take a photo with surfaceview

I've tried to create an app that allows me to see in a SurfaceView what I see with the front Camera. I tried to implement a Button to allows me to take a screenshot of the Activity, because over the camera I have a mask. When I perform the screenshot I can capture only the screenshot of my mask that is over the surfaceView.
I don't know what I'm doing wrong.
Is there anyone can help me?
My Intent is to create an app that allows me take a picture of my self whit a mask over my face.
Her is the example of my virtual device.
I've alredy tried with a real phone but nothing changed.
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Handler.Callback {
static final String TAG = "CamTest";
static final int MY_PERMISSIONS_REQUEST_CAMERA = 1242;
private static final int MSG_CAMERA_OPENED = 1;
private static final int MSG_SURFACE_READY = 2;
private final Handler mHandler = new Handler(this);
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
CameraManager mCameraManager;
String[] mCameraIDsList;
CameraDevice.StateCallback mCameraStateCB;
CameraDevice mCameraDevice;
CameraCaptureSession mCaptureSession;
boolean mSurfaceCreated = true;
boolean mIsCameraConfigured = false;
private Surface mCameraSurface = null;
Button btt_scatta;
Bitmap immagine_screen;
ImageView img_view_screen;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//Richiedo il Fullscreen
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
//Get root view from Activity
final View rootView = getWindow().getDecorView().findViewById(android.R.id.content);
btt_scatta = (Button) findViewById(R.id.btt_scatta);
img_view_screen = (ImageView) findViewById(R.id.img_view_screen);
img_view_screen.setVisibility(View.GONE);
this.mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
this.mSurfaceHolder = this.mSurfaceView.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
mCameraIDsList = this.mCameraManager.getCameraIdList();
for (String id : mCameraIDsList) {
Log.v(TAG, "CameraID: " + id);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
mCameraStateCB = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onOpened", Toast.LENGTH_SHORT).show();
mCameraDevice = camera;
mHandler.sendEmptyMessage(MSG_CAMERA_OPENED);
}
#Override
public void onDisconnected(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onDisconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onError(CameraDevice camera, int error) {
Toast.makeText(getApplicationContext(), "onError", Toast.LENGTH_SHORT).show();
}
};
btt_scatta.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
btt_scatta.setVisibility(View.GONE);
immagine_screen = getScreenShot(rootView);
store(immagine_screen,"screenshot Honor");
img_view_screen.setVisibility(View.VISIBLE);
img_view_screen.setImageBitmap(immagine_screen);
}
});
}
#Override
protected void onStart() {
super.onStart();
//requesting permission
int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
Toast.makeText(getApplicationContext(), "request permission", Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(getApplicationContext(), "PERMISSION_ALREADY_GRANTED", Toast.LENGTH_SHORT).show();
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
#Override
protected void onStop() {
super.onStop();
try {
if (mCaptureSession != null) {
mCaptureSession.stopRepeating();
mCaptureSession.close();
mCaptureSession = null;
}
mIsCameraConfigured = false;
} catch (final CameraAccessException e) {
// Doesn't matter, cloising device anyway
e.printStackTrace();
} catch (final IllegalStateException e2) {
// Doesn't matter, cloising device anyway
e2.printStackTrace();
} finally {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
mCaptureSession = null;
}
}
}
#Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MSG_CAMERA_OPENED:
case MSG_SURFACE_READY:
// if both surface is created and camera device is opened
// - ready to set up preview and other things
if (mSurfaceCreated && (mCameraDevice != null)
&& !mIsCameraConfigured) {
configureCamera();
}
break;
}
return true;
}
private void configureCamera() {
// prepare list of surfaces to be used in capture requests
List<Surface> sfl = new ArrayList<Surface>();
sfl.add(mCameraSurface); // surface for viewfinder preview
// configure camera with all the surfaces to be ever used
try {
mCameraDevice.createCaptureSession(sfl,
new CaptureSessionListener(), null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsCameraConfigured = true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case MY_PERMISSIONS_REQUEST_CAMERA:
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
break;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCameraSurface = holder.getSurface();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mCameraSurface = holder.getSurface();
mSurfaceCreated = true;
mHandler.sendEmptyMessage(MSG_SURFACE_READY);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceCreated = false;
}
private class CaptureSessionListener extends
CameraCaptureSession.StateCallback {
#Override
public void onConfigureFailed(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure failed");
}
#Override
public void onConfigured(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure onConfigured");
mCaptureSession = session;
try {
CaptureRequest.Builder previewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(mCameraSurface);
mCaptureSession.setRepeatingRequest(previewRequestBuilder.build(),
null, null);
} catch (CameraAccessException e) {
Log.d(TAG, "setting up preview failed");
e.printStackTrace();
}
}
}
//Capture the root view
public static Bitmap getScreenShot(View view) {
View screenView = view.getRootView();
screenView.setDrawingCacheEnabled(true);
Bitmap bitmap = Bitmap.createBitmap(screenView.getDrawingCache());
screenView.setDrawingCacheEnabled(false);
return bitmap;
}
//Store the Bitmap into the phone
public static void store(Bitmap bm, String fileName){
final String dirPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Screenshots";
File dir = new File(dirPath);
if(!dir.exists())
dir.mkdirs();
File file = new File(dirPath, fileName);
try {
FileOutputStream fOut = new FileOutputStream(file);
bm.compress(Bitmap.CompressFormat.PNG, 85, fOut);
fOut.flush();
fOut.close();
} catch (Exception e) {
e.printStackTrace();
}
}
I think that a good solution is to take a picture with the camera and then put in a Image view under my mask, and then take a screenshot. But the problem is to take a photo with the surfaceView: I don't know how to do it.

Camera returns a dark image

By using this code I am capturing selfi. But it's always saved as dark image.
I want a real image like how i get it from normal capture..
I tested in multiple mobiles and multiple versions.
Every time it results in a dark image
public class CameraCapture extends APictureCapturingService {
private static final String TAG = CameraCapture.class.getSimpleName();
private boolean cameraClosed;
private CameraDevice cameraDevice;
private ImageReader imageReader;
private TreeMap<String, byte[]> picturesTaken;
private PictureCapturingListener capturingListener;
private String currentCameraId;
//camera ids queue;
private Queue<String> cameraIds;
/***
* constructor.
*
* #param activity the activity used to get display manager and the application context
*/
CameraCapture(Activity activity) {
super(activity);
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
#Override
public void startCapturing(PictureCapturingListener listener) {
this.picturesTaken = new TreeMap<>();
this.capturingListener = listener;
this.cameraIds = new LinkedList<>();
try {
final String[] cameraIds = manager.getCameraIdList();
if (cameraIds.length > 1) {
this.cameraIds.addAll(Arrays.asList(cameraIds));
this.currentCameraId = "1";
openCamera();
} else {
capturingListener.onDoneCapturingAllPhotos(picturesTaken);
}
} catch ( CameraAccessException e) {
Log.v("exception", "Exception occurred while accessing the list of cameras", e);
}
}
public static APictureCapturingService getInstance(final Activity activity) {
return new CameraCapture(activity);
}
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void openCamera() {
Log.v("camera open", "opening camera " + currentCameraId);
try {
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
manager.openCamera(currentCameraId, stateCallback, null);
}
} catch ( CameraAccessException e) {
Log.v("exception", " exception occurred while opening camera " + currentCameraId, e);
}
}
private final ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imReader) {
final Image image = imReader.acquireLatestImage();
final ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
try {
CameraCapture.this.saveImageToDisk(bytes);
} catch (IOException e) {
e.printStackTrace();
}
image.close();
}
};
#TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void saveImageToDisk(final byte[] bytes) throws IOException
{
String file_path = Environment.getExternalStorageDirectory() +
"/.Securityapptechno";
File dir = new File(file_path);
if(!dir.exists()){
dir.mkdirs();
}
String timeStamp = new SimpleDateFormat("yyyyMMddHHmmss",
java.util.Locale.getDefault()).format(new Date());
File file = new File(dir,timeStamp + "Intruder.jpg");
FileOutputStream fOut;
try {
fOut = new FileOutputStream(file);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fOut);
this.picturesTaken.put(file.getPath(), bytes);
fOut.flush();
fOut.close();
} catch (Exception e) {
e.printStackTrace();
}
}
#SuppressLint("NewApi")
private void closeCamera() {
Log.v("camera close", "closing camera " + cameraDevice.getId());
if (null != cameraDevice && !cameraClosed) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}
#SuppressLint("NewApi")
private final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if (picturesTaken.lastEntry() != null) {
capturingListener.onCaptureDone(picturesTaken.lastEntry().getKey(), picturesTaken.lastEntry().getValue());
Log.v("image done", "done taking picture from camera " + cameraDevice.getId());
}
closeCamera();
}
};
#SuppressLint("NewApi")
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraClosed = false;
Log.v("opened camera", "camera " + camera.getId() + " opened");
cameraDevice = camera;
Log.i(TAG, "Taking picture from camera " + camera.getId());
//Take the picture after some delay. It may resolve getting a black dark photos.
new android.os.Handler().postDelayed(new Runnable() {
#Override
public void run() {
try {
takePicture();
} catch (CameraAccessException e) {
Log.e(TAG, " exception occurred while taking picture from " + currentCameraId, e);
}
}
}
, 500);
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.v("camera discon", " camera " + camera.getId() + " disconnected");
if (cameraDevice != null && !cameraClosed) {
cameraClosed = true;
cameraDevice.close();
}
}
#Override
public void onClosed(#NonNull CameraDevice camera) {
cameraClosed = true;
Log.v("camera closed", "camera " + camera.getId() + " closed");
//once the current camera has been closed, start taking another picture
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.v("error", "camera in error, int code " + error);
if (cameraDevice != null && !cameraClosed) {
cameraDevice.close();
}
}
public void takePicture() throws CameraAccessException {
if (null == cameraDevice) {
Log.v("camera device", "cameraDevice is null");
return;
}
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamConfigurationMap != null) {
jpegSizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG);
}
final boolean jpegSizesNotEmpty = jpegSizes != null && 0 < jpegSizes.length;
int width = jpegSizesNotEmpty ? jpegSizes[0].getWidth() : 720;
int height = jpegSizesNotEmpty ? jpegSizes[0].getHeight() : 1280;
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
final List<Surface> outputSurfaces = new ArrayList<>();
outputSurfaces.add(reader.getSurface());
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE,CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, -20);
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation());
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY,200);
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, 22000L);
captureBuilder.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
reader.setOnImageAvailableListener(onImageAvailableListener, null);
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, null);
} catch (CameraAccessException e) {
Log.v("exception", " exception occurred while accessing " + currentCameraId, e);
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
}, null);
}
};
}

Dual camera JPEG image capture using Camera2 API android

I am trying get JPEG image from both camera parallel on Snapdragon 820 platform.
I not getting first camera Image callback. I only getting second camera JPEG callback.
Here is my code :
protected void takePictureBack() {
Log.d(TAG, "takePictureBack() called");
if (null == cameraDeviceBack) {
Log.e(TAG, "cameraDeviceBack is null");
return;
}
try {
final File file_back = new File(Environment.getExternalStorageDirectory() + "/pic_back.jpg");
final CaptureRequest.Builder captureBuilderBack = cameraDeviceBack.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
List<Surface> outputSurfaces = new ArrayList<Surface>(3);
outputSurfaces.add(new Surface(mTextureViewBack.getSurfaceTexture()));
ImageReader reader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 1);
outputSurfaces.add(reader.getSurface());
captureBuilderBack.addTarget(reader.getSurface());
ImageReader.OnImageAvailableListener readerListenerBack = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable() called with: reader = [" + reader + "]");
if (reader.getImageFormat() == ImageFormat.JPEG) {
Log.d(TAG, "onImageAvailable() called with back: reader = JPEG");
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file_back);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListenerBack, mBackgroundHandlerBack);
captureBuilderBack.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
final CameraCaptureSession.CaptureCallback captureListenerBack = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if (DEBUG) Log.d(TAG, "onCaptureCompleted: take picture back successfully");
//Toast.makeText(getActivity(), "Take picture successfully", Toast.LENGTH_SHORT).show();
createCameraPreviewBack();
mCaptureResultBack = result;
}
};
cameraDeviceBack.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilderBack.build(), captureListenerBack, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Front camera capture code is also same.
Individual single camera JPEG capture work fine.
Any idea, why I am not getting bot JPEG Images callback ?
I find solution.We need take "ImageReader reader" as global variable.
By this change I am able to get two JPEG from both camera.

Categories

Resources