mobile vision Qrcode Call intent multiple time - android

I made a QrScanner apps for scanning using google mobile vision. App is simple like scannig a Qrcode , app decode the Qrcode ,deliver it to result class and result shows in the Result layout. But the problem is When im trying to Scan Qrcode , Some how it call result class multiple time ... heres my MainActivity code :
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
SurfaceView surfaceView;
BarcodeDetector barcodeDetector;
CameraSource cameraSource;
final int RequestCameraID = 1001;
BoxDetector boxDetector;
#Override
protected void onStop() {
super.onStop();
Toast.makeText(getApplicationContext(),"Stop",Toast.LENGTH_LONG).show();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.cameraView);
surfaceView.setZOrderMediaOverlay(true);
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, PERMISSION_GRANTED);
}
Casting();
}
private void Casting() {
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
boxDetector = new BoxDetector(barcodeDetector, 300, 300);
if (!barcodeDetector.isOperational()) {
Toast.makeText(MainActivity.this, "Sorry couldn't setup the detector", Toast.LENGTH_LONG).show();
}
cameraSource = new CameraSource.Builder(MainActivity.this, boxDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(30)
.setAutoFocusEnabled(true)
.setRequestedPreviewSize(1280, 720)
.build();
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA},RequestCameraID);
return;
}
try {
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { }
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
boxDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> barcodeSparseArray = detections.getDetectedItems();
if(barcodeSparseArray.size()!=0){
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
cameraSource.release();
barcodeDetector.release();
Intent intent = new Intent(MainActivity.this, ResultActivity.class);
intent.putExtra("Result",barcodeSparseArray.valueAt(0).displayValue);
Casting();
startActivity(intent);
}
});
}
}
});
}
public void cS() {
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
boxDetector = new BoxDetector(barcodeDetector, 300, 300);
if (!barcodeDetector.isOperational()) {
Toast.makeText(MainActivity.this, "Sorry couldn't setup the detector", Toast.LENGTH_LONG).show();
}
cameraSource = new CameraSource.Builder(MainActivity.this, boxDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(30)
.setAutoFocusEnabled(true)
.setRequestedPreviewSize(1280, 720)
.build();
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}}
BoxDetector Code is:
public class BoxDetector extends Detector {
private Detector mDelegate;
private int mBoxWidth, mBoxHeight;
BoxDetector(Detector delegate, int boxWidth, int boxHeight) {
mDelegate = delegate;
mBoxWidth = boxWidth;
mBoxHeight = boxHeight;
}
public SparseArray detect(Frame frame) {
int width = frame.getMetadata().getWidth();
int height = frame.getMetadata().getHeight();
int right = ((width / 2) + (mBoxHeight / 2)) -150 ;
int left = ((width / 2) - (mBoxHeight / 2)) - 150;
int bottom = ((height / 2) + (mBoxWidth / 2));
int top = ((height / 2) - ((mBoxWidth) / 2));
YuvImage yuvImage = new YuvImage(frame.getGrayscaleImageData().array(), ImageFormat.NV21, width, height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(left, top, right, bottom), 100, byteArrayOutputStream);
byte[] jpegArray = byteArrayOutputStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegArray, 0, jpegArray.length);
Frame croppedFrame =
new Frame.Builder()
.setBitmap(bitmap)
.setRotation(frame.getMetadata().getRotation())
.build();
return mDelegate.detect(croppedFrame);
}
public void run(){
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public boolean setFocus(int id) {
return mDelegate.setFocus(id);
}
}
Thats it Thank You!

Related

Not taking picture in background using textureview and camera2 api

Here is my Camera2Manger code
i have added permissions in manifist file and take permissions in run time but still this code is not working i am calling this class in my other class like
Camera2Manager camera2Manager = new Camera2Manager(mTextureView,GestureSelfUnlockActivity.this);
camera2Manager.initCamera();
and my Manager class is below i have checked code surfaceviewlistener is not working anyone can help me out thanks
public class Camera2Manager {
private static final int SETIMAGE = 1;
private static final int MOVE_FOCK = 2;
private TextureView mTextureView;
private Context mContext;
private Handler mHandler;
private Handler mUIHandler;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreViewBuidler;
private CameraCaptureSession mCameraSession;
private CameraCharacteristics mCameraCharacteristics;
private Size mPreViewSize;
private Rect maxZoomrect;
private int maxRealRadio;
// camera zoom related
private Rect picRect;
public Camera2Manager(TextureView textureView, Context context) {
mTextureView = textureView;
mContext = context;
}
public void initCamera() {
mUIHandler = new Handler(new InnerCallBack());
mTextureView.setSurfaceTextureListener(mSurfacetextlistener);
// Toast.makeText(mContext, "Caremmaa", Toast.LENGTH_SHORT).show();
}
#SuppressLint("NewApi")
private final ImageReader.OnImageAvailableListener onImageAvaiableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
mHandler.post(new ImageSaver(imageReader.acquireNextImage()));
}
};
private Surface surface;
#SuppressLint("NewApi")
private final CameraDevice.StateCallback cameraOpenCallBack = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
try {
mPreViewBuidler = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
SurfaceTexture texture = mTextureView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreViewSize.getWidth(), mPreViewSize.getHeight());
surface = new Surface(texture);
mPreViewBuidler.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), mSessionStateCallBack, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
}
#Override
public void onError(CameraDevice cameraDevice, int i) {
}
};
#SuppressLint("NewApi")
private final CameraCaptureSession.StateCallback mSessionStateCallBack = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
try {
mCameraSession = cameraCaptureSession;
cameraCaptureSession.setRepeatingRequest(mPreViewBuidler.build(), null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
}
};
private final TextureView.SurfaceTextureListener mSurfacetextlistener = new TextureView
.SurfaceTextureListener() {
#SuppressLint("NewApi")
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
HandlerThread thread = new HandlerThread("Camera2");
thread.start();
mHandler = new Handler(thread.getLooper());
CameraManager manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
String cameraid = CameraCharacteristics.LENS_FACING_FRONT + "";
Toast.makeText(mContext, "Caremmaa", Toast.LENGTH_SHORT).show();
try {
mCameraCharacteristics = manager.getCameraCharacteristics(cameraid);
//The area of ​​the screen sensor, in pixels.
maxZoomrect = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
//Maximum digital zoom
maxRealRadio = mCameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM).intValue();
picRect = new Rect(maxZoomrect);
StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)
), new CompareSizeByArea());
mPreViewSize = map.getOutputSizes(SurfaceTexture.class)[0];
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 5);
mImageReader.setOnImageAvailableListener(onImageAvaiableListener, mHandler);
if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
manager.openCamera(cameraid, cameraOpenCallBack, mHandler);
//Set the monitor for clicking and taking pictures
takePhoto();
} catch (CameraAccessException e) {
ToastUtil.showToast(e.getMessage());
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#SuppressLint("NewApi")
private void takePhoto() {
try {
Toast.makeText(mContext, "Camera started", Toast.LENGTH_SHORT).show();
mCameraSession.setRepeatingRequest(initDngBuilder().build(), null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#SuppressLint("NewApi")
private CaptureRequest.Builder initDngBuilder() {
CaptureRequest.Builder captureBuilder = null;
try {
captureBuilder = mCameraSession.getDevice().createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.addTarget(surface);
// Required for RAW capture
captureBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) ((214735991 - 13231) / 2));
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, (10000 - 100) / 2);//Set ISO, Sensitivity
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, 90);
//set 30 frames per second
CaptureRequest mCaptureRequest = captureBuilder.build();
mCameraSession.capture(mCaptureRequest, null, mHandler); //take a picture
CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
String cameraid = CameraCharacteristics.LENS_FACING_FRONT + "";
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraid);
Range<Integer>[] fps = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fps[fps.length - 1]);
} catch (CameraAccessException | NullPointerException e) {
e.printStackTrace();
}
return captureBuilder;
}
private class ImageSaver implements Runnable {
Image reader;
public ImageSaver(Image reader) {
this.reader = reader;
}
#SuppressLint("NewApi")
#Override
public void run() {
File dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS+"/lock").getAbsoluteFile();
if (!dir.exists()) {
dir.mkdirs();
}
File file = new File(dir, System.currentTimeMillis() + ".jpg");
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file);
ByteBuffer buffer = reader.getPlanes()[0].getBuffer();
byte[] buff = new byte[buffer.remaining()];
buffer.get(buff);
BitmapFactory.Options ontain = new BitmapFactory.Options();
ontain.inSampleSize = 50;
Bitmap bm = BitmapFactory.decodeByteArray(buff, 0, buff.length, ontain);
Message.obtain(mUIHandler, SETIMAGE, bm).sendToTarget();
outputStream.write(buff);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
reader.close();
}
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
private class InnerCallBack implements Handler.Callback {
#SuppressLint("NewApi")
#Override
public boolean handleMessage(Message message) {
switch (message.what) {
case SETIMAGE:
Bitmap bm = (Bitmap) message.obj;
//preview avatar
break;
case MOVE_FOCK:
mPreViewBuidler.set(CaptureRequest.SCALER_CROP_REGION, picRect);
try {
mCameraSession.setRepeatingRequest(mPreViewBuidler.build(), null,
mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
break;
}
return false;
}
}
#SuppressLint("NewApi")
public static class CompareSizeByArea implements java.util.Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
}
But this code is not working.

How TO Crop Region Of Detected Number (OCR) Using Google Vision?

I write a code to knowing the value of money. I'm using OCR from Mobile Vision to get the number and words then if match (i put some condition) the App will play a voice of the value.
Now i want to make some experiment. I want this app cropping the region around the number and show it on my activity. This is what i've do.
public class MainActivity extends AppCompatActivity {
SurfaceView cameraView;
TextView textView;
CameraSource cameraSource;
final int RequestCameraPermissionID = 1001;
String nominal = "";
String bilangan = "";
TextToSpeech tts;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraPermissionID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
break;
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.surface_view);
textView = (TextView) findViewById(R.id.text_view);
tts=new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
// TODO Auto-generated method stub
if(status == TextToSpeech.SUCCESS){
int result=tts.setLanguage(Locale.US);
if(result==TextToSpeech.LANG_MISSING_DATA ||
result==TextToSpeech.LANG_NOT_SUPPORTED){
Log.e("error", "This Language is not supported");
}
else{
ConvertTextToSpeech();
}
}
else
Log.e("error", "Initilization Failed!");
}
});
TextRecognizer textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!textRecognizer.isOperational()) {
Log.w("MainActivity", "Detector dependencies are not yet available");
} else {
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(1280, 1024)
.setRequestedFps(2.0f)
.setAutoFocusEnabled(true)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA},
RequestCameraPermissionID);
return;
}
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
cameraSource.stop();
}
});
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
final SparseArray<TextBlock> items = detections.getDetectedItems();
if(items.size() != 0)
{
textView.post(new Runnable() {
#Override
public void run() {
StringBuilder stringBuilder = new StringBuilder();
for(int i =0;i<items.size();++i)
{
TextBlock item = items.valueAt(i);
/*stringBuilder.append(item.getValue());
stringBuilder.append("\n");*/
if (item.getValue().toLowerCase().contains("seribu")){
bilangan = "seribu rupiah";
}
else if(item.getValue().toLowerCase().contains("dua ribu")){
bilangan = "dua ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("lima ribu")){
bilangan = "lima ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("sepuluh ribu")){
bilangan = "sepuluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("dua puluh ribu")){
bilangan = "dua puluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("lima puluh ribu")){
bilangan = "lima puluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("seratus ribu")){
bilangan = "seratus ribu rupiah";
}
if(item.getValue().equals("1000")){
nominal = "1000";
}
else if(item.getValue().equals("2000")){
}
else if(item.getValue().equals("5000")){
nominal = "5000";
}
else if(item.getValue().equals("10000")){
nominal = "10000";
}
else if(item.getValue().equals("20000")){
nominal = "20000";
}
else if(item.getValue().equals("50000")){
nominal = "50000";
}
else if(item.getValue().equals("100000")){
nominal = "100000";
}
stringBuilder.append(bilangan);
stringBuilder.append(nominal);
}
//textView.setText(stringBuilder.toString());
if (!bilangan.equals("") && !nominal.equals("")){
cameraSource.stop();
ConvertTextToSpeech();
}
}
});
}
}
});
}
}
private void ConvertTextToSpeech() {
tts.speak(bilangan,TextToSpeech.QUEUE_FLUSH,null,"Niel");
}
}
Just how to crop the number (red box )like this image below then i'll display it on my activity using ImageView.
Thanks

Images from Camera2Api in Nexus 5 appear blurry

This is my first attempt at accessing and clicking images using the Camera2Api.
I'm using the Nexus 5 running on API 23.
Though I can get the camera preview, but I cannot understand why does the frame gets locked after the capture request and also the images generated are blurry/totaly unclear.
I've been stuck with this issue for a while now. Any advise would be appreciated
Attached is my MainActivity Code:-
public class MainActivity extends AppCompatActivity implements
View.OnClickListener{
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0,90);
ORIENTATIONS.append(Surface.ROTATION_90,0);
ORIENTATIONS.append(Surface.ROTATION_180,270);
ORIENTATIONS.append(Surface.ROTATION_270,180);
}
private static final String TAG = "MainActivity";
private static final int STATE_PREVIEW=0;
private static final int STATE_WAIT_AF_LOCK=1;
private static final int STATE_PICTURE_CAPTURED=2;
private static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
private static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private static final int REQUEST_READ_EXTERNAL_STORAGE_REQUEST = 2;
private int mState;
private static LruCache<String, Bitmap> mMemoryCache;
private HandlerThread mBackgroundThread;
private final Handler mUIHandler = new Handler(Looper.getMainLooper()){
#Override
public void handleMessage(Message msg){
swapImageAdapter();
}
};
private static File mImageFile;
private Handler mBackgroundHandler;
private TextureView mTextureView;
private Button mCaptureButton;
private File mImageFolder;
private static String mImageFileName;
private Size mPreviewSize; //mImageSize
private RecyclerView mRecycleView;
private String mCameraId;
private CameraDevice mCameraDevice;
private CaptureRequest mPreviewCaptureRequest;
private CaptureRequest.Builder mPreviewCaptureRequestBuilder;
private CameraCaptureSession mCameraCaptureSession;
private ImageReader mImageReader;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(ImageReader reader){
Log.i(TAG,"OnImageAvailableListener!!!");
mBackgroundHandler.post(new
ImageSaver(reader.acquireNextImage(),mUIHandler));
}
};
private static class ImageSaver implements Runnable{
private final Image mImage;
private final Handler mHandler;
ImageSaver(Image image){
Log.i(TAG,"imgSaver const");
mImage=image;
mHandler=null;
}
ImageSaver(Image image,Handler handler){
Log.i(TAG,"imgSaver const");
mImage =image;
mHandler =handler;
}
#Override
public void run(){
Log.i(TAG,"In ImageSaver run()");
ByteBuffer byteBuffer=mImage.getPlanes()[0].getBuffer();
byteBuffer.rewind();
byte[] bytes = new byte[byteBuffer.capacity()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try{
fileOutputStream =new FileOutputStream(mImageFileName);
fileOutputStream.write(bytes);
}catch(IOException e){
e.printStackTrace();
}
finally{
Log.i(TAG,"Closing Image!!");
mImage.close();
if(fileOutputStream!=null){
try {
fileOutputStream.close();
}catch(IOException e){
e.printStackTrace();
}
}
}
Message message = mHandler.obtainMessage();
Log.i(TAG,"sending Message from Image Saver run()");
message.sendToTarget();
}
}
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
Log.i(TAG,"mSessionCaptureCallback+ onCaptureStarted");
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session,request,result);
Log.i(TAG,"mSessionCaptureCallback+ onCaptureCompleted");
process(result); //process the result once capture session request is completed.
Log.i(TAG,"mSessionCaptureCallback+ onCaptureCompleted ENDS!");
}
#Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure){
super.onCaptureFailed(session, request, failure);
Toast.makeText(getApplicationContext(),"Focus Lock UnSucessfull!", Toast.LENGTH_SHORT).show();
}
private void process(CaptureResult result){
Log.i(TAG,"mSTATE_PICTURE_CAPTURED"+mState);
switch (mState){
case STATE_PREVIEW:
break;
case STATE_WAIT_AF_LOCK:
Log.i(TAG,"process_ wait_AF_LOCK");
Integer afState=result.get(CaptureResult.CONTROL_AF_STATE);
if(afState==CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED){
Log.i(TAG,"Lock Sucessfull!");
Toast.makeText(getApplicationContext(),"Lock Sucessfull!", Toast.LENGTH_SHORT).show();
mState=STATE_PICTURE_CAPTURED;
captureStillImage();
}
Log.i(TAG,"Out of process_WAIT_AF_LOCK");
break;
case STATE_PICTURE_CAPTURED:
break;
}
}
};
private CameraDevice.StateCallback mCameraDeviceStateCallback
= new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
Log.i(TAG,"CameraDeviceStateCallback+ onOpened");
mCameraDevice = cameraDevice;
Toast.makeText(getApplicationContext(), "Cam Opened!", Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
#Override
public void onClosed(CameraDevice dev){
mCameraDevice=null;
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
Log.i(TAG,"CameraDeviceStateCallback+ onDisconnected");
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
Log.i(TAG, "Error Number:" + error + "While opening camera!!!");
}
};
private TextureView.SurfaceTextureListener mSurfaceTexture = new
TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "SurfaceTexture Available");
setupCamera(width, height);
Log.i(TAG, "in onSurfaceTextureAvailable ");
transformImage(width, height);
Log.i(TAG, "in onSurfaceTextureAvailable ");
openCamera();
Log.i(TAG, "in onSurfaceTextureAvailable ");
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "in onSurfaceTextureSizeChanged ");
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Log.i(TAG, "in onSurfaceTextureTextureUpdated");
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.i(TAG, "SurfaceTexture Destroyed");
return false;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
createImageFolder();
setContentView(R.layout.activity_main);
if(ContextCompat.checkSelfPermission(this,Manifest.permission.READ_EXTERNAL_STORAGE)==PackageManager.PERMISSION_GRANTED) {
mRecycleView = (RecyclerView) findViewById(R.id.galleryRecyclerView);
GridLayoutManager gridLayoutManager = new GridLayoutManager(this, 1);
mRecycleView.setLayoutManager(gridLayoutManager);
RecyclerView.Adapter imageAdapter = new ImageAdapter(mImageFolder);
Log.i(TAG, "RecyclerViewSetup!");
mTextureView = (TextureView) findViewById(R.id.textureView);
Log.i(TAG, "TextureViewSetup!");
mCaptureButton = (Button) findViewById(R.id.captureButton);
mCaptureButton.setOnClickListener(this);
}
else{
if(shouldShowRequestPermissionRationale(Manifest.permission.READ_EXTERNAL_STORAGE)) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_READ_EXTERNAL_STORAGE_REQUEST);
}
}
Log.i(TAG,"onCreate Completed");
}
#Override
public void onResume() {
super.onResume();
openBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
transformImage(mTextureView.getWidth(),mTextureView.getHeight());
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
openCamera();
}
else {
if(shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this, "App needs to be able to save Images", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTexture);
}
}
#Override
public void onPause() {
closeCamera();
closeBackgroundThread();
super.onPause();
}
public void setupCamera(int width, int height) {
Log.i(TAG,"setupcam");
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestImageSize = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new Comparator<Size>() {
#Override
public int compare(Size size, Size t1) {
return Long.signum(size.getWidth()*size.getHeight()- t1.getWidth()*t1.getHeight());
}
}
);
mImageReader = ImageReader.newInstance(largestImageSize.getWidth(),largestImageSize.getHeight(),ImageFormat.JPEG,1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,mBackgroundHandler);
mPreviewSize = getPrefferedPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
Log.i(TAG,"Setup Camera Ends!!");
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getPrefferedPreviewSize(Size[] mapSizes, int width, int height) {
List<Size> collectorSizes = new ArrayList<>();
for (Size size : mapSizes) {
if (width > height) {
if (size.getWidth() > width && size.getHeight() > height) {
collectorSizes.add(size);
} else {
if (size.getWidth() > height && size.getHeight() > width) {
collectorSizes.add(size);
}
}
}
}
if (collectorSizes.size() > 0) {
return Collections.min(collectorSizes, new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getHeight() * rhs.getWidth());
}
});
}
return mapSizes[0];
}
public void openCamera() {
Log.i(TAG,"In OpenCamera");
CameraManager cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
Log.i(TAG,"CamaeraManager.OpenCamera");
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
Log.i(TAG,"CamaeraManager.OpenCameraEnds");
}
catch(SecurityException s){
s.printStackTrace();
}
catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"OpenCamera Ends");
}
public void closeCamera(){
if(mImageReader!=null){
mImageReader.close();
mImageReader=null;
}
if(mCameraCaptureSession!=null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if(mCameraDevice!=null){
mCameraDevice.close();
mCameraDevice = null;
}
}
public void createCameraPreviewSession(){
try{
Log.i(TAG,"CREATE_PREVIEW_CAMERA_SESSION");
SurfaceTexture surfaceTexture=mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
mPreviewCaptureRequestBuilder=mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(
Arrays.asList(previewSurface,mImageReader.getSurface()),
new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(CameraCaptureSession session){
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigured");
if(null == mCameraDevice){
return;
}
try{
mPreviewCaptureRequest=mPreviewCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequest,mSessionCaptureCallback,mBackgroundHandler);
}
catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigured setRepeatingRequests");
}
#Override
public void onConfigureFailed(CameraCaptureSession session){
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigurationFailed");
Toast.makeText(getApplicationContext(),"CreateCameraSession FAILED!!",Toast.LENGTH_SHORT).show();
}
}
,null);
}catch(CameraAccessException e){
e.printStackTrace();
}
}
private void clickPic(){
Log.i(TAG,"in ClickPic");
lockFocus();
Log.i(TAG,"exiting ClickPic");
}
private void lockFocus(){
try {
Log.i(TAG,"lockFocus");
mState = STATE_WAIT_AF_LOCK;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CaptureRequest.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"exiting lockFocus");
}
private void unlockFocus(){
try {
Log.i(TAG,"unlockFocus");
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),mSessionCaptureCallback,mBackgroundHandler);
mState = STATE_PREVIEW;
mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"exiting lockFocus");
}
private void openBackgroundThread(){
mBackgroundThread=new HandlerThread("Camera Background Thread");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper()) {
public void handleMessage(Message msg) {
Log.i(TAG,"Inside Background Handler");
}
};
}
private void closeBackgroundThread(){
Log.i(TAG,"closing Background Thread!");
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}catch(InterruptedException e){
e.printStackTrace();
}
}
private void captureStillImage(){
try {
Log.i(TAG,"captureStillImage");
mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
mPreviewCaptureRequestBuilder.addTarget(mImageReader.getSurface());
int rotation = getWindowManager().getDefaultDisplay().getRotation();
mPreviewCaptureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION,ORIENTATIONS.get(rotation));
CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
Log.i(TAG,"captureStillImage+ onCaptureCompleted"+mImageFileName);
Toast.makeText(getApplicationContext(),"Image Captured!",Toast.LENGTH_SHORT).show();
unlockFocus();
}
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
Log.i(TAG,"captureStillImage+ onCapturestarted");
try {
mImageFile=createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
};
CameraCaptureSession.stopRepeating();
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),captureCallback,null);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"captureStillImage Ends!");
}
private void transformImage(int width,int height){
Matrix matrix = new Matrix();
if(mPreviewSize==null||mTextureView==null) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
RectF textureRectF = new RectF(0,0,width,height);
RectF previewRectF = new RectF(0,0,mPreviewSize.getHeight(),mPreviewSize.getWidth());
float centerX = textureRectF.centerX();
float centerY = textureRectF.centerY();
if(rotation==Surface.ROTATION_90||rotation==Surface.ROTATION_270){
previewRectF.offset(centerX - previewRectF.centerX(),centerY-previewRectF.centerY());
matrix.setRectToRect(textureRectF,previewRectF,Matrix.ScaleToFit.FILL);
float scale = Math.max((float)width/mPreviewSize.getWidth(),
(float)height/mPreviewSize.getHeight());
matrix.postScale(scale,scale,centerX,centerY);
matrix.postRotate(90*(rotation-2),centerX,centerY);
}
mTextureView.setTransform(matrix);
}
#Override
public void onClick(View view){
switch(view.getId()){
case R.id.captureButton:
clickPic();
break;
}
}
private void checkPermission() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
try {
createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
else {
if(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this, "App needs to be able to save Images", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT);
}
}
private void createImageFolder() {
Log.i(TAG,"createImageFolder");
File imageFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
mImageFolder = new File(imageFile, "camera2Api");
if(!mImageFolder.exists()) {
mImageFolder.mkdirs();
}
}
private File createImageFileName() throws IOException {
Log.i(TAG,"createImageFileName");
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "IMG_" + timestamp + "_";
File imageFile = File.createTempFile(prepend, ".jpg", mImageFolder);
mImageFileName = imageFile.getAbsolutePath();
Log.i(TAG,"Image FILE NAME="+mImageFileName);
return imageFile;
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without camera services", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
}
}
if(requestCode==REQUEST_READ_EXTERNAL_STORAGE_REQUEST){
if(grantResults[0]==PackageManager.PERMISSION_GRANTED){
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
}
}
}
private void swapImageAdapter(){
Log.i(TAG,"swapImageAdapter"+mImageFolder.toString()+"mGalleryFolder from MainActivity!!");
RecyclerView.Adapter newImageAdapter=new ImageAdapter(mImageFolder);
mRecycleView.swapAdapter(newImageAdapter,false);
}
private File[] sortFilesToLatest(File fileImagesDir) {
File[] files = fileImagesDir.listFiles();
Arrays.sort(files, new Comparator<File>() {
#Override
public int compare(File lhs, File rhs) {
return Long.valueOf(rhs.lastModified()).compareTo(lhs.lastModified());
}
});
return files;
}
}
And this is the ImageAdapter class used to inflate the ImageViews in the RecyclerView:-
public class ImageAdapter extends RecyclerView.Adapter {
private static final String TAG="ImageAdapter";
private File imagesFile;
public ImageAdapter(File folderFile) {
Log.i(TAG,folderFile.toString()+"printing Folder File FROM IMAGE_ADAPTER!!!!");
imagesFile = folderFile;
}
#Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Log.i(TAG,"onCreateViewHolder");
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.activity_image, parent, false);
return new ViewHolder(view);
}
#Override
public void onBindViewHolder(ViewHolder holder, int position) {
Log.i(TAG,"onBindViewHolder");
BitmapFactory.Options mBitOptions = new BitmapFactory.Options();
mBitOptions.inScaled=true;
mBitOptions.inSampleSize=2;
int size = imagesFile.listFiles().length;
Log.i(TAG,imagesFile.toString()+" its SIZE="+size+" position="+position);
Log.i(TAG,"CONSIDERING FILE:"+imagesFile.listFiles()[position].getPath());
File imageFile = imagesFile.listFiles()[position];
Bitmap imageBitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath(),mBitOptions);
holder.getImageView().setImageBitmap(imageBitmap);
}
#Override
public int getItemCount() {
Log.i(TAG,"getItemCount");
return imagesFile.listFiles().length;
}
public static class ViewHolder extends RecyclerView.ViewHolder {
private ImageView imageView;
public ViewHolder(View view) {
super(view);
imageView = (ImageView) view.findViewById(R.id.imageGalleryView);
}
public ImageView getImageView() {
return imageView;
}
}
}
Activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<android.support.v7.widget.RecyclerView
android:id="#+id/galleryRecyclerView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignEnd="#+id/captureButton"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true"
android:layout_marginStart="25dp"
android:layout_marginTop="28dp"
android:layout_toStartOf="#+id/captureButton" />
<Button
android:id="#+id/captureButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="24dp"
android:text="#string/CaptureButtonString" />
<TextureView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_alignParentStart="true" />
</RelativeLayout>
Activity_image.xml contains an ImageView in a RelativeLayout. I could not provide code for it due to character restrictions.
Please advise what am I doing wrong?

why video is getting stretched in camera2 API?

I am building a video camera app which is always be in landscape mode and always record from front camera.
But when I am recording the video it looks like stretched.How to solve it?
Where I am making mistake??
public class MainActivity extends AppCompatActivity {
public static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
public static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private TextureView mTextureView;
//public ImageView mImageView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
//Toast.makeText(getApplicationContext(),"TextureView is Available",Toast.LENGTH_SHORT).show();
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback;
{
mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if (mIsRecording) {
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
#Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
//Toast.makeText(getApplicationContext(),"Camera connected",Toast.LENGTH_SHORT).show();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int i) {
}
};
}
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private String mCameraId;
private Size mPreviewSize;
private Size mVideoSize;
private MediaRecorder mMediaRecorder;
private Chronometer mChronometer;
private ImageView thumb;
//private String V1, V2, V3, V4, V5;
// private Map<String, String> mapA = new HashMap<>();
// private ImageView[] IMGS = {mImageView1, mImageView2, mImageView3, mImageView4, mImageView5};
private int mTotalRotation;
private CaptureRequest.Builder mCaptureRequestBuilder;
public static int count;
public static int max = 5;
private ImageButton mRecordImageButton;
private boolean mIsRecording = false;
public static File mVideoFolder;
private static File mRawVideoFolder;
public static String mVideoFileName;
//Test
private List<Bitmap> bitMapsAvailable = new ArrayList<>();
private List<String> bitMapsFilePath = new ArrayList<>();
private int bitMapIndex;
CameraCaptureSession storedSession;
private ArrayAdapter bitMapAdapter;
private ArrayAdapter bitMapFileAdapter;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
public int index;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() / (long) rhs.getWidth() * rhs.getHeight());
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
createVideoFolder();
mMediaRecorder = new MediaRecorder();
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mTextureView = (TextureView) findViewById(R.id.textureView);
mRecordImageButton = (ImageButton) findViewById(R.id.videoButton);
mRecordImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsRecording) {
mChronometer.stop();
mChronometer.setVisibility(View.INVISIBLE);
mIsRecording = false;
mRecordImageButton.setImageResource(R.mipmap.start_recording);
//Toast.makeText(getApplicationContext(),"Started",Toast.LENGTH_SHORT).show();
if(storedSession != null){
try {
storedSession.stopRepeating();
storedSession.abortCaptures();
}catch (CameraAccessException e){
throw new RuntimeException(e.getMessage());
}
}
mMediaRecorder.stop();
mMediaRecorder.reset();
startPreview();
//Create bitmap with current video file path
Bitmap bitMap = ThumbnailUtils.createVideoThumbnail(mVideoFileName, MediaStore.Video.Thumbnails.MICRO_KIND);
//Add bitmap to array list
bitMapsAvailable.add(bitMap);
bitMapsFilePath.add(mVideoFileName);
// Shows thumbnails
showThumbnails();
} else {
checkWriteStoragePermission();
}
}
});
}
private void showThumbnails() {
LinearLayout layout = (LinearLayout) findViewById(R.id.thumbnails);
bitMapAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsAvailable);
bitMapFileAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsFilePath);
bitMapIndex = 0;
if (layout.getChildCount() > 0) {
layout.removeAllViews();
}
for (Bitmap eachBitMap : bitMapsAvailable) {
bitMapIndex++;
ImageView thumb = new ImageView(this);
thumb.setId(new Integer(bitMapIndex+ 17));
thumb.setLayoutParams(new android.view.ViewGroup.LayoutParams(100, 80));
thumb.setImageBitmap(eachBitMap);
// Adds the view to the layout
thumb.setOnClickListener(previewThumb(thumb));
layout.addView(thumb);
}
}
View.OnClickListener previewThumb(final ImageView imageview) {
return new View.OnClickListener() {
public void onClick(View arg0) {
index = imageview.getId()-18;
imageview.setBackgroundColor(0xff999999);
// Start NewActivity.class
Intent myIntent = new Intent(MainActivity.this,
VideoViewActivity.class);
Bundle bundle = new Bundle();
bundle.putStringArrayList("bitMapsAvailable", new ArrayList(bitMapsAvailable));
bundle.putStringArrayList("bitMapsFilePath", new ArrayList(bitMapsFilePath));
//Add your data to bundle
bundle.putInt("urlIndex", index);
myIntent.putExtras(bundle);
bitMapAdapter.notifyDataSetChanged();
bitMapFileAdapter.notifyDataSetChanged();
// startActivity(myIntent);
startActivityForResult(myIntent, 111);
}
};
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (requestCode == 111) {
int indexToBeDeleted = data.getIntExtra("indexToBeDeleted",index);
//bitMapsFilePath.remove(indexToBeDeleted);
bitMapsAvailable.remove(indexToBeDeleted);
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permission, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permission, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not run without camera service", Toast.LENGTH_SHORT).show();
}
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not have audio on record ", Toast.LENGTH_SHORT).show();
}
}
if (requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.ic_launcher);
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
Toast.makeText(this, "Permission Successfully Granted", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "App needs to save video to run", Toast.LENGTH_SHORT).show();
}
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
View decorView = getWindow().getDecorView();
if (hasFocus) {
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotateWidth = width;
int rotateHeight = height;
if (swapRotation) {
rotateWidth = height;
rotateHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotateWidth, rotateHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotateWidth, rotateHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this, "Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void startRecord() {
if (this.bitMapsAvailable.size() < max) {
try {
setupMediaRecorder();
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, null);
storedSession = session;
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
} else {
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.stopRepeating();
session.abortCaptures();
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getApplicationContext(), "Unable to setup Camera Preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void closeCamera() {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("AuthorTV");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + 180 + 360) % 360;
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * height / width && option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setVideoEncodingBitRate(1000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
chooseOptimalSize needs to be adjusted and different for mPreviewSize and mVideoSize. Check this link Camera2VideoFragment.java

Lacking privileges to access camera service in Android 6.0

I am using Camera2API in Android 6.0. I was done without error in Android 5.0. However, when I used my code in the Android 6.0, I have a issue. The issue is that sometime I can open the camera successfully and take picture. However, sometime the camera cannot open and it has error
java.lang.SecurityException: Lacking privileges to access camera service
at android.hardware.camera2.utils.CameraBinderDecorator.throwOnError(CameraBinderDecorator.java:108)
I added the runtime permission as follows:
String[] PERMISSIONS = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.CAMERA};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
public static boolean hasAllPermissions(Context context, String... permissions) {
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
This is all log
FATAL EXCEPTION: main
Process: com.example.camera2api, PID: 5376
java.lang.SecurityException: Lacking privileges to access camera service
at android.hardware.camera2.utils.CameraBinderDecorator.throwOnError(CameraBinderDecorator.java:108)
at android.hardware.camera2.utils.CameraBinderDecorator$CameraBinderDecoratorListener.onAfterInvocation(CameraBinderDecorator.java:73)
at android.hardware.camera2.utils.Decorator.invoke(Decorator.java:81)
at java.lang.reflect.Proxy.invoke(Proxy.java:393)
at $Proxy2.cancelRequest(Unknown Source)
at android.hardware.camera2.impl.CameraDeviceImpl.stopRepeating(CameraDeviceImpl.java:926)
at android.hardware.camera2.impl.CameraCaptureSessionImpl.close(CameraCaptureSessionImpl.java:378)
at android.hardware.camera2.impl.CameraCaptureSessionImpl$2.onDisconnected(CameraCaptureSessionImpl.java:514)
at android.hardware.camera2.impl.CameraDeviceImpl$7.run(CameraDeviceImpl.java:228)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:158)
at android.app.ActivityThread.main(ActivityThread.java:7229)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1230)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)
update: For someone who want to look at my full code. I upload my full code here
public class AndroidCamera extends AppCompatActivity {
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final String TAG = "Camera2App";
private String mImageFileLocation = "";
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAIT_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
/**
* Camera state: Waiting for the exposure state to be something other than precapture.
*/
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
/**
* Camera state: Picture was taken.
*/
private static final int STATE_PICTURE_TAKEN = 4;
private int mState;
private TextureView mTextureView;
private Size mPreviewSize;
private String mCameraId;
String[] PERMISSIONS = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.CAMERA};
private static final int PERMISSION_ALL = 105;
private static final int REQUEST_CAMERA_RESULT = 106;
private boolean isRegistred=false;
private int mSensorOrientation;
private TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
setupCamera(width, height,BleUtils.getCameraLens(AndroidCamera.this));
else
setupCamera(width, height,"1");
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
//closeCamera();
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback =
new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraOpenCloseLock.release();
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(),"Camera Opened!", Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private CaptureRequest mPreviewCaptureRequest;
private CaptureRequest.Builder mPreviewCaptureRequestBuilder;
private CameraCaptureSession mCameraCaptureSession;
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result){
switch (mState){
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
Integer afState=result.get(CaptureResult.CONTROL_AF_STATE);
if(afState==CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED){
captureStillImage();
}
else{
captureStillImage();
}
break;
}
}
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
#Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Handler mHandler = new Handler(getMainLooper());
mHandler.post(new Runnable() {
#Override
public void run() {
Toast.makeText(getApplicationContext(), "Focus Lock UnSuccesful", Toast.LENGTH_SHORT).show();
}
});
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private static File mImageFile;
private ImageReader mImageReader;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage()));
}
};
private static class ImageSaver implements Runnable {
private final Image mImage;
private ImageSaver(Image image) {
mImage = image;
}
#Override
public void run() {
ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mImageFile);
fileOutputStream.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_activity);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
mTextureView = (TextureView) findViewById(R.id.texture);
}
public static boolean hasAllPermissions(Context context, String... permissions) {
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
#Override
public void onStart() {
super.onStart();
if (!EventBus.getDefault().isRegistered(this)) {
EventBus.getDefault().register(this);
}
}
//onStop
#Override
public void onStop() {
super.onStop();
EventBus.getDefault().unregister(this);
}
#Subscribe
public void onCaptureNumberReceived(OnCaptureEvent event) {
//get the phone number value here and do something with it
String capturecode = event.getCodeCapture();
Log.d(TAG, capturecode);
if (capturecode.equals("capture")) {
try {
mImageFile = createImageFile();
} catch (IOException e) {
e.printStackTrace();
}
lockFocus();
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
else if(capturecode.equals("end_capture")) {
finish(); // call this to finish the current activity
Intent homeIntent = new Intent(Intent.ACTION_MAIN);
homeIntent.addCategory( Intent.CATEGORY_HOME );
homeIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(homeIntent);
}
}
public void takepicture(View view) {
try {
mImageFile = createImageFile();
Log.d("TAG","=====Take picture=====");
} catch (IOException e) {
e.printStackTrace();
}
lockFocus();
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
public void switch_camera(View view) {
closeCamera();
//swap the id of the camera to be used
if(mCameraId == String.valueOf(Camera.CameraInfo.CAMERA_FACING_BACK)){
mCameraId = String.valueOf(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
else {
mCameraId = String.valueOf(Camera.CameraInfo.CAMERA_FACING_BACK);
}
BleUtils.setCameraLens(this, mCameraId);
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),mCameraId);
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
File createImageFile() throws IOException {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "BLE_" + timeStamp + "_";
File storageDirectory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
if(!storageDirectory.exists()){
if(!storageDirectory.mkdirs()){
Log.e("Dir", "Failed to create directory");
Log.d("MAKE DIR", storageDirectory.mkdir() + "" + storageDirectory.getParentFile() + "");
return null;
}
}
File image = File.createTempFile(imageFileName, ".jpg", storageDirectory);
mImageFileLocation = image.getAbsolutePath();
return image;
}
#Override
public void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
openBackgroundThread();
if (mTextureView.isAvailable()) {
if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),BleUtils.getCameraLens(AndroidCamera.this));
else
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),"1");
closeCamera();
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onDestroy() {
super.onDestroy();
Log.d(TAG,"onDestroy");
}
public void onPause() {
Log.d(TAG,"onPause");
closeCamera();
closeBackgroundThread();
super.onPause();
}
private void setupCamera(int width, int height, String cameraId) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestImageSize = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() -
rhs.getWidth() * rhs.getHeight());
}
}
);
mImageReader = ImageReader.newInstance(largestImageSize.getWidth(),
largestImageSize.getHeight(),
ImageFormat.JPEG,
1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,
mBackgroundHandler);
mPreviewSize = getPreferredPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
Log.d("CAMERA_ID",String.valueOf(mCameraId));
// }
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getPreferredPreviewSize(Size[] mapSizes, int width, int height) {
List<Size> collectorSizes = new ArrayList<>();
for (Size option : mapSizes) {
if (width > height) {
if (option.getWidth() > width &&
option.getHeight() > height) {
collectorSizes.add(option);
}
} else {
if (option.getWidth() > height &&
option.getHeight() > width) {
collectorSizes.add(option);
}
}
}
if (collectorSizes.size() > 0) {
return Collections.min(collectorSizes, new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
}
});
}
return mapSizes[0];
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
Log.v("CAMERA", mCameraId + " " + mCameraDeviceStateCallback);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED){
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback,mBackgroundHandler);
}
else {
if (shouldShowRequestPermissionRationale(android.Manifest.permission.CAMERA)){
Toast.makeText(this,"No Permission to use the Camera services", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {android.Manifest.permission.CAMERA},REQUEST_CAMERA_RESULT);
}
}
else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
switch (requestCode){
case REQUEST_CAMERA_RESULT:
if (grantResults[0] != PackageManager.PERMISSION_GRANTED){
Toast.makeText(this, "Cannot run application because camera service permission have not been granted", Toast.LENGTH_SHORT).show();
}
break;
default:
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
break;
}
}
private void closeCamera(){
if(mCameraCaptureSession!=null){
mCameraCaptureSession.close();
mCameraCaptureSession=null;
}
if (mCameraDevice!=null){
mCameraDevice.close();
mCameraDevice=null;
if(mImageReader!=null){
mImageReader.close();
mImageReader=null;
}
}
}
private void createCameraPreviewSession(){
try{
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
Surface previewSurface= new Surface(surfaceTexture);
mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewCaptureRequestBuilder.addTarget(previewSurface);
mPreviewCaptureRequestBuilder.set(CaptureRequest.JPEG_QUALITY, (byte)100);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface,mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
if(mCameraDevice==null){
return;
}
try {
mPreviewCaptureRequest = mPreviewCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(
mPreviewCaptureRequest,
mSessionCaptureCallback,
mBackgroundHandler
);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Handler mHandler = new Handler(getMainLooper());
mHandler.post(new Runnable() {
#Override
public void run() {
Toast.makeText(
getApplicationContext(),
"create camera session failed!",
Toast.LENGTH_SHORT
).show();
}
});
}
},null);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void openBackgroundThread(){
mBackgroundThread=new HandlerThread("Camera2 background thread");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void closeBackgroundThread(){
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread=null;
mBackgroundHandler=null;
}catch (InterruptedException e){
e.printStackTrace();
}
}
private void lockFocus(){
try{
mState=STATE_WAIT_LOCK;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback,mBackgroundHandler);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void unLockFocus(){
try{
mState=STATE_PREVIEW;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback,mBackgroundHandler);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void captureStillImage(){
try {
CaptureRequest.Builder captureStillBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureStillBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureStillBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// setAutoFlash(captureBuilder);
// Orientation
int rotation=0;
//Front camera
if(mCameraId.equals("1")) {
rotation = this.getWindowManager().getDefaultDisplay().getRotation();
captureStillBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
}
else {
rotation = this.getWindowManager().getDefaultDisplay().getRotation();
captureStillBuilder.set(CaptureRequest.JPEG_ORIENTATION,
ORIENTATIONS.get(rotation));
}
CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
//Toast.makeText(getApplicationContext(),"Image Captured",Toast.LENGTH_SHORT).show();
unLockFocus();
}
};
mCameraCaptureSession.capture(
captureStillBuilder.build(),captureCallback,null
);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation +180) % 360;
}
}
setupCamera() is called right from onSurfaceTextureAvailable, which can be earlier than the permissions are granted.
What you need to do is to track whether the permissions are granted and if the surface texture available in both callbacks.
Make a single entry for checking these conditions and setting up camera
private boolean mSurfaceTextureAvailable;
private boolean mPermissionsGranted;
private boolean mCameraOpened;
private void setupCameraIfPossible() {
if (!mCameraOpened && mSurfaceTextureAvailable && mPermissionsGranted) {
String cameraLens = BleUtils.getCameraLens(AndroidCamera.this);
if (TextUtils.isEmpty(cameraLens)) {
cameraLens = "1";
}
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(), cameraLens);
openCamera();
}
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mSurfaceTextureAvailable = true;
setupCameraIfPossible();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
//closeCamera();
mSurfaceTextureAvailable = false;
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_activity);
openBackgroundThread();
// Make sure the boolean flag is set. Will be true for lower SDK
mPermissionsGranted = hasAllPermissions(this, PERMISSIONS);
if (!mPermissionsGranted) {
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
mTextureView = (TextureView) findViewById(R.id.texture);
}
In onResume() you don't need to check for permissions. If they will be denied while in background your Activity will get killed and you will go through onCreate() again.
Remove the code in onPause() and onResume()!
// #Override
// public void onResume() {
// super.onResume();
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
// if(!hasAllPermissions(this, PERMISSIONS)){
// ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
// }
// }
// openBackgroundThread();
// if (mTextureView.isAvailable()) {
// if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
// setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),BleUtils.getCameraLens(AndroidCamera.this));
// else
// setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),"1");
// closeCamera();
// openCamera();
// } else {
// mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
// }
// }
// public void onPause() {
// Log.d(TAG,"onPause");
// closeCamera();
//
// closeBackgroundThread();
// super.onPause();
// }
Add this to onStart()
#Override
public void onStart() {
super.onStart();
openCameraIfPossible();
}
Move closing camera to onStop()
#Override
public void onStop() {
super.onStop();
closeCamera();
}
private void closeCamera() {
mCameraOpened = false; // set a field indicating it is closed
...
}
private void openCamera() {
...
mCameraOpened = true; // If successful, set a field indicating it is opened
}
Now another thing I discovered is that you must actually check for permissions in onRequestPermissionsResult() instead of using grantResults flags
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode){
case REQUEST_CAMERA_RESULT:
mPermissionsGranted = hasAllPermissions(this, PERMISSIONS);
setupCameraIfPossible();
break;
default:
break;
}
}

Categories

Resources