How TO Crop Region Of Detected Number (OCR) Using Google Vision? - android

I write a code to knowing the value of money. I'm using OCR from Mobile Vision to get the number and words then if match (i put some condition) the App will play a voice of the value.
Now i want to make some experiment. I want this app cropping the region around the number and show it on my activity. This is what i've do.
public class MainActivity extends AppCompatActivity {
SurfaceView cameraView;
TextView textView;
CameraSource cameraSource;
final int RequestCameraPermissionID = 1001;
String nominal = "";
String bilangan = "";
TextToSpeech tts;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraPermissionID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
break;
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.surface_view);
textView = (TextView) findViewById(R.id.text_view);
tts=new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
// TODO Auto-generated method stub
if(status == TextToSpeech.SUCCESS){
int result=tts.setLanguage(Locale.US);
if(result==TextToSpeech.LANG_MISSING_DATA ||
result==TextToSpeech.LANG_NOT_SUPPORTED){
Log.e("error", "This Language is not supported");
}
else{
ConvertTextToSpeech();
}
}
else
Log.e("error", "Initilization Failed!");
}
});
TextRecognizer textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!textRecognizer.isOperational()) {
Log.w("MainActivity", "Detector dependencies are not yet available");
} else {
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(1280, 1024)
.setRequestedFps(2.0f)
.setAutoFocusEnabled(true)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA},
RequestCameraPermissionID);
return;
}
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
cameraSource.stop();
}
});
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
final SparseArray<TextBlock> items = detections.getDetectedItems();
if(items.size() != 0)
{
textView.post(new Runnable() {
#Override
public void run() {
StringBuilder stringBuilder = new StringBuilder();
for(int i =0;i<items.size();++i)
{
TextBlock item = items.valueAt(i);
/*stringBuilder.append(item.getValue());
stringBuilder.append("\n");*/
if (item.getValue().toLowerCase().contains("seribu")){
bilangan = "seribu rupiah";
}
else if(item.getValue().toLowerCase().contains("dua ribu")){
bilangan = "dua ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("lima ribu")){
bilangan = "lima ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("sepuluh ribu")){
bilangan = "sepuluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("dua puluh ribu")){
bilangan = "dua puluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("lima puluh ribu")){
bilangan = "lima puluh ribu rupiah";
}
else if(item.getValue().toLowerCase().contains("seratus ribu")){
bilangan = "seratus ribu rupiah";
}
if(item.getValue().equals("1000")){
nominal = "1000";
}
else if(item.getValue().equals("2000")){
}
else if(item.getValue().equals("5000")){
nominal = "5000";
}
else if(item.getValue().equals("10000")){
nominal = "10000";
}
else if(item.getValue().equals("20000")){
nominal = "20000";
}
else if(item.getValue().equals("50000")){
nominal = "50000";
}
else if(item.getValue().equals("100000")){
nominal = "100000";
}
stringBuilder.append(bilangan);
stringBuilder.append(nominal);
}
//textView.setText(stringBuilder.toString());
if (!bilangan.equals("") && !nominal.equals("")){
cameraSource.stop();
ConvertTextToSpeech();
}
}
});
}
}
});
}
}
private void ConvertTextToSpeech() {
tts.speak(bilangan,TextToSpeech.QUEUE_FLUSH,null,"Niel");
}
}
Just how to crop the number (red box )like this image below then i'll display it on my activity using ImageView.
Thanks

Related

Take a photo with surfaceview

I've tried to create an app that allows me to see in a SurfaceView what I see with the front Camera. I tried to implement a Button to allows me to take a screenshot of the Activity, because over the camera I have a mask. When I perform the screenshot I can capture only the screenshot of my mask that is over the surfaceView.
I don't know what I'm doing wrong.
Is there anyone can help me?
My Intent is to create an app that allows me take a picture of my self whit a mask over my face.
Her is the example of my virtual device.
I've alredy tried with a real phone but nothing changed.
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Handler.Callback {
static final String TAG = "CamTest";
static final int MY_PERMISSIONS_REQUEST_CAMERA = 1242;
private static final int MSG_CAMERA_OPENED = 1;
private static final int MSG_SURFACE_READY = 2;
private final Handler mHandler = new Handler(this);
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
CameraManager mCameraManager;
String[] mCameraIDsList;
CameraDevice.StateCallback mCameraStateCB;
CameraDevice mCameraDevice;
CameraCaptureSession mCaptureSession;
boolean mSurfaceCreated = true;
boolean mIsCameraConfigured = false;
private Surface mCameraSurface = null;
Button btt_scatta;
Bitmap immagine_screen;
ImageView img_view_screen;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//Richiedo il Fullscreen
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
//Get root view from Activity
final View rootView = getWindow().getDecorView().findViewById(android.R.id.content);
btt_scatta = (Button) findViewById(R.id.btt_scatta);
img_view_screen = (ImageView) findViewById(R.id.img_view_screen);
img_view_screen.setVisibility(View.GONE);
this.mSurfaceView = (SurfaceView) findViewById(R.id.surface_camera);
this.mSurfaceHolder = this.mSurfaceView.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
mCameraIDsList = this.mCameraManager.getCameraIdList();
for (String id : mCameraIDsList) {
Log.v(TAG, "CameraID: " + id);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
mCameraStateCB = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onOpened", Toast.LENGTH_SHORT).show();
mCameraDevice = camera;
mHandler.sendEmptyMessage(MSG_CAMERA_OPENED);
}
#Override
public void onDisconnected(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onDisconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onError(CameraDevice camera, int error) {
Toast.makeText(getApplicationContext(), "onError", Toast.LENGTH_SHORT).show();
}
};
btt_scatta.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
btt_scatta.setVisibility(View.GONE);
immagine_screen = getScreenShot(rootView);
store(immagine_screen,"screenshot Honor");
img_view_screen.setVisibility(View.VISIBLE);
img_view_screen.setImageBitmap(immagine_screen);
}
});
}
#Override
protected void onStart() {
super.onStart();
//requesting permission
int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
Toast.makeText(getApplicationContext(), "request permission", Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(getApplicationContext(), "PERMISSION_ALREADY_GRANTED", Toast.LENGTH_SHORT).show();
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
#Override
protected void onStop() {
super.onStop();
try {
if (mCaptureSession != null) {
mCaptureSession.stopRepeating();
mCaptureSession.close();
mCaptureSession = null;
}
mIsCameraConfigured = false;
} catch (final CameraAccessException e) {
// Doesn't matter, cloising device anyway
e.printStackTrace();
} catch (final IllegalStateException e2) {
// Doesn't matter, cloising device anyway
e2.printStackTrace();
} finally {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
mCaptureSession = null;
}
}
}
#Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MSG_CAMERA_OPENED:
case MSG_SURFACE_READY:
// if both surface is created and camera device is opened
// - ready to set up preview and other things
if (mSurfaceCreated && (mCameraDevice != null)
&& !mIsCameraConfigured) {
configureCamera();
}
break;
}
return true;
}
private void configureCamera() {
// prepare list of surfaces to be used in capture requests
List<Surface> sfl = new ArrayList<Surface>();
sfl.add(mCameraSurface); // surface for viewfinder preview
// configure camera with all the surfaces to be ever used
try {
mCameraDevice.createCaptureSession(sfl,
new CaptureSessionListener(), null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsCameraConfigured = true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case MY_PERMISSIONS_REQUEST_CAMERA:
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
break;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCameraSurface = holder.getSurface();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mCameraSurface = holder.getSurface();
mSurfaceCreated = true;
mHandler.sendEmptyMessage(MSG_SURFACE_READY);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceCreated = false;
}
private class CaptureSessionListener extends
CameraCaptureSession.StateCallback {
#Override
public void onConfigureFailed(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure failed");
}
#Override
public void onConfigured(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure onConfigured");
mCaptureSession = session;
try {
CaptureRequest.Builder previewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(mCameraSurface);
mCaptureSession.setRepeatingRequest(previewRequestBuilder.build(),
null, null);
} catch (CameraAccessException e) {
Log.d(TAG, "setting up preview failed");
e.printStackTrace();
}
}
}
//Capture the root view
public static Bitmap getScreenShot(View view) {
View screenView = view.getRootView();
screenView.setDrawingCacheEnabled(true);
Bitmap bitmap = Bitmap.createBitmap(screenView.getDrawingCache());
screenView.setDrawingCacheEnabled(false);
return bitmap;
}
//Store the Bitmap into the phone
public static void store(Bitmap bm, String fileName){
final String dirPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Screenshots";
File dir = new File(dirPath);
if(!dir.exists())
dir.mkdirs();
File file = new File(dirPath, fileName);
try {
FileOutputStream fOut = new FileOutputStream(file);
bm.compress(Bitmap.CompressFormat.PNG, 85, fOut);
fOut.flush();
fOut.close();
} catch (Exception e) {
e.printStackTrace();
}
}
I think that a good solution is to take a picture with the camera and then put in a Image view under my mask, and then take a screenshot. But the problem is to take a photo with the surfaceView: I don't know how to do it.

mobile vision Qrcode Call intent multiple time

I made a QrScanner apps for scanning using google mobile vision. App is simple like scannig a Qrcode , app decode the Qrcode ,deliver it to result class and result shows in the Result layout. But the problem is When im trying to Scan Qrcode , Some how it call result class multiple time ... heres my MainActivity code :
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
SurfaceView surfaceView;
BarcodeDetector barcodeDetector;
CameraSource cameraSource;
final int RequestCameraID = 1001;
BoxDetector boxDetector;
#Override
protected void onStop() {
super.onStop();
Toast.makeText(getApplicationContext(),"Stop",Toast.LENGTH_LONG).show();
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.cameraView);
surfaceView.setZOrderMediaOverlay(true);
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, PERMISSION_GRANTED);
}
Casting();
}
private void Casting() {
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
boxDetector = new BoxDetector(barcodeDetector, 300, 300);
if (!barcodeDetector.isOperational()) {
Toast.makeText(MainActivity.this, "Sorry couldn't setup the detector", Toast.LENGTH_LONG).show();
}
cameraSource = new CameraSource.Builder(MainActivity.this, boxDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(30)
.setAutoFocusEnabled(true)
.setRequestedPreviewSize(1280, 720)
.build();
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA},RequestCameraID);
return;
}
try {
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { }
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
boxDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> barcodeSparseArray = detections.getDetectedItems();
if(barcodeSparseArray.size()!=0){
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
cameraSource.release();
barcodeDetector.release();
Intent intent = new Intent(MainActivity.this, ResultActivity.class);
intent.putExtra("Result",barcodeSparseArray.valueAt(0).displayValue);
Casting();
startActivity(intent);
}
});
}
}
});
}
public void cS() {
barcodeDetector = new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
boxDetector = new BoxDetector(barcodeDetector, 300, 300);
if (!barcodeDetector.isOperational()) {
Toast.makeText(MainActivity.this, "Sorry couldn't setup the detector", Toast.LENGTH_LONG).show();
}
cameraSource = new CameraSource.Builder(MainActivity.this, boxDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedFps(30)
.setAutoFocusEnabled(true)
.setRequestedPreviewSize(1280, 720)
.build();
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case RequestCameraID: {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(surfaceView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}}
BoxDetector Code is:
public class BoxDetector extends Detector {
private Detector mDelegate;
private int mBoxWidth, mBoxHeight;
BoxDetector(Detector delegate, int boxWidth, int boxHeight) {
mDelegate = delegate;
mBoxWidth = boxWidth;
mBoxHeight = boxHeight;
}
public SparseArray detect(Frame frame) {
int width = frame.getMetadata().getWidth();
int height = frame.getMetadata().getHeight();
int right = ((width / 2) + (mBoxHeight / 2)) -150 ;
int left = ((width / 2) - (mBoxHeight / 2)) - 150;
int bottom = ((height / 2) + (mBoxWidth / 2));
int top = ((height / 2) - ((mBoxWidth) / 2));
YuvImage yuvImage = new YuvImage(frame.getGrayscaleImageData().array(), ImageFormat.NV21, width, height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(left, top, right, bottom), 100, byteArrayOutputStream);
byte[] jpegArray = byteArrayOutputStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegArray, 0, jpegArray.length);
Frame croppedFrame =
new Frame.Builder()
.setBitmap(bitmap)
.setRotation(frame.getMetadata().getRotation())
.build();
return mDelegate.detect(croppedFrame);
}
public void run(){
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public boolean setFocus(int id) {
return mDelegate.setFocus(id);
}
}
Thats it Thank You!

why video is getting stretched in camera2 API?

I am building a video camera app which is always be in landscape mode and always record from front camera.
But when I am recording the video it looks like stretched.How to solve it?
Where I am making mistake??
public class MainActivity extends AppCompatActivity {
public static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
public static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private TextureView mTextureView;
//public ImageView mImageView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
//Toast.makeText(getApplicationContext(),"TextureView is Available",Toast.LENGTH_SHORT).show();
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback;
{
mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if (mIsRecording) {
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
#Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
//Toast.makeText(getApplicationContext(),"Camera connected",Toast.LENGTH_SHORT).show();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int i) {
}
};
}
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private String mCameraId;
private Size mPreviewSize;
private Size mVideoSize;
private MediaRecorder mMediaRecorder;
private Chronometer mChronometer;
private ImageView thumb;
//private String V1, V2, V3, V4, V5;
// private Map<String, String> mapA = new HashMap<>();
// private ImageView[] IMGS = {mImageView1, mImageView2, mImageView3, mImageView4, mImageView5};
private int mTotalRotation;
private CaptureRequest.Builder mCaptureRequestBuilder;
public static int count;
public static int max = 5;
private ImageButton mRecordImageButton;
private boolean mIsRecording = false;
public static File mVideoFolder;
private static File mRawVideoFolder;
public static String mVideoFileName;
//Test
private List<Bitmap> bitMapsAvailable = new ArrayList<>();
private List<String> bitMapsFilePath = new ArrayList<>();
private int bitMapIndex;
CameraCaptureSession storedSession;
private ArrayAdapter bitMapAdapter;
private ArrayAdapter bitMapFileAdapter;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
public int index;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() / (long) rhs.getWidth() * rhs.getHeight());
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
createVideoFolder();
mMediaRecorder = new MediaRecorder();
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mTextureView = (TextureView) findViewById(R.id.textureView);
mRecordImageButton = (ImageButton) findViewById(R.id.videoButton);
mRecordImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsRecording) {
mChronometer.stop();
mChronometer.setVisibility(View.INVISIBLE);
mIsRecording = false;
mRecordImageButton.setImageResource(R.mipmap.start_recording);
//Toast.makeText(getApplicationContext(),"Started",Toast.LENGTH_SHORT).show();
if(storedSession != null){
try {
storedSession.stopRepeating();
storedSession.abortCaptures();
}catch (CameraAccessException e){
throw new RuntimeException(e.getMessage());
}
}
mMediaRecorder.stop();
mMediaRecorder.reset();
startPreview();
//Create bitmap with current video file path
Bitmap bitMap = ThumbnailUtils.createVideoThumbnail(mVideoFileName, MediaStore.Video.Thumbnails.MICRO_KIND);
//Add bitmap to array list
bitMapsAvailable.add(bitMap);
bitMapsFilePath.add(mVideoFileName);
// Shows thumbnails
showThumbnails();
} else {
checkWriteStoragePermission();
}
}
});
}
private void showThumbnails() {
LinearLayout layout = (LinearLayout) findViewById(R.id.thumbnails);
bitMapAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsAvailable);
bitMapFileAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsFilePath);
bitMapIndex = 0;
if (layout.getChildCount() > 0) {
layout.removeAllViews();
}
for (Bitmap eachBitMap : bitMapsAvailable) {
bitMapIndex++;
ImageView thumb = new ImageView(this);
thumb.setId(new Integer(bitMapIndex+ 17));
thumb.setLayoutParams(new android.view.ViewGroup.LayoutParams(100, 80));
thumb.setImageBitmap(eachBitMap);
// Adds the view to the layout
thumb.setOnClickListener(previewThumb(thumb));
layout.addView(thumb);
}
}
View.OnClickListener previewThumb(final ImageView imageview) {
return new View.OnClickListener() {
public void onClick(View arg0) {
index = imageview.getId()-18;
imageview.setBackgroundColor(0xff999999);
// Start NewActivity.class
Intent myIntent = new Intent(MainActivity.this,
VideoViewActivity.class);
Bundle bundle = new Bundle();
bundle.putStringArrayList("bitMapsAvailable", new ArrayList(bitMapsAvailable));
bundle.putStringArrayList("bitMapsFilePath", new ArrayList(bitMapsFilePath));
//Add your data to bundle
bundle.putInt("urlIndex", index);
myIntent.putExtras(bundle);
bitMapAdapter.notifyDataSetChanged();
bitMapFileAdapter.notifyDataSetChanged();
// startActivity(myIntent);
startActivityForResult(myIntent, 111);
}
};
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (requestCode == 111) {
int indexToBeDeleted = data.getIntExtra("indexToBeDeleted",index);
//bitMapsFilePath.remove(indexToBeDeleted);
bitMapsAvailable.remove(indexToBeDeleted);
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permission, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permission, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not run without camera service", Toast.LENGTH_SHORT).show();
}
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not have audio on record ", Toast.LENGTH_SHORT).show();
}
}
if (requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.ic_launcher);
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
Toast.makeText(this, "Permission Successfully Granted", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "App needs to save video to run", Toast.LENGTH_SHORT).show();
}
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
View decorView = getWindow().getDecorView();
if (hasFocus) {
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotateWidth = width;
int rotateHeight = height;
if (swapRotation) {
rotateWidth = height;
rotateHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotateWidth, rotateHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotateWidth, rotateHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this, "Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void startRecord() {
if (this.bitMapsAvailable.size() < max) {
try {
setupMediaRecorder();
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, null);
storedSession = session;
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
} else {
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.stopRepeating();
session.abortCaptures();
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getApplicationContext(), "Unable to setup Camera Preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void closeCamera() {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("AuthorTV");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + 180 + 360) % 360;
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * height / width && option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setVideoEncodingBitRate(1000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
chooseOptimalSize needs to be adjusted and different for mPreviewSize and mVideoSize. Check this link Camera2VideoFragment.java

Lacking privileges to access camera service in Android 6.0

I am using Camera2API in Android 6.0. I was done without error in Android 5.0. However, when I used my code in the Android 6.0, I have a issue. The issue is that sometime I can open the camera successfully and take picture. However, sometime the camera cannot open and it has error
java.lang.SecurityException: Lacking privileges to access camera service
at android.hardware.camera2.utils.CameraBinderDecorator.throwOnError(CameraBinderDecorator.java:108)
I added the runtime permission as follows:
String[] PERMISSIONS = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.CAMERA};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
public static boolean hasAllPermissions(Context context, String... permissions) {
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
This is all log
FATAL EXCEPTION: main
Process: com.example.camera2api, PID: 5376
java.lang.SecurityException: Lacking privileges to access camera service
at android.hardware.camera2.utils.CameraBinderDecorator.throwOnError(CameraBinderDecorator.java:108)
at android.hardware.camera2.utils.CameraBinderDecorator$CameraBinderDecoratorListener.onAfterInvocation(CameraBinderDecorator.java:73)
at android.hardware.camera2.utils.Decorator.invoke(Decorator.java:81)
at java.lang.reflect.Proxy.invoke(Proxy.java:393)
at $Proxy2.cancelRequest(Unknown Source)
at android.hardware.camera2.impl.CameraDeviceImpl.stopRepeating(CameraDeviceImpl.java:926)
at android.hardware.camera2.impl.CameraCaptureSessionImpl.close(CameraCaptureSessionImpl.java:378)
at android.hardware.camera2.impl.CameraCaptureSessionImpl$2.onDisconnected(CameraCaptureSessionImpl.java:514)
at android.hardware.camera2.impl.CameraDeviceImpl$7.run(CameraDeviceImpl.java:228)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:158)
at android.app.ActivityThread.main(ActivityThread.java:7229)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1230)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)
update: For someone who want to look at my full code. I upload my full code here
public class AndroidCamera extends AppCompatActivity {
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final String TAG = "Camera2App";
private String mImageFileLocation = "";
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAIT_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
/**
* Camera state: Waiting for the exposure state to be something other than precapture.
*/
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
/**
* Camera state: Picture was taken.
*/
private static final int STATE_PICTURE_TAKEN = 4;
private int mState;
private TextureView mTextureView;
private Size mPreviewSize;
private String mCameraId;
String[] PERMISSIONS = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE,Manifest.permission.CAMERA};
private static final int PERMISSION_ALL = 105;
private static final int REQUEST_CAMERA_RESULT = 106;
private boolean isRegistred=false;
private int mSensorOrientation;
private TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
setupCamera(width, height,BleUtils.getCameraLens(AndroidCamera.this));
else
setupCamera(width, height,"1");
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
//closeCamera();
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback =
new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraOpenCloseLock.release();
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(),"Camera Opened!", Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private CaptureRequest mPreviewCaptureRequest;
private CaptureRequest.Builder mPreviewCaptureRequestBuilder;
private CameraCaptureSession mCameraCaptureSession;
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result){
switch (mState){
case STATE_PREVIEW:
break;
case STATE_WAIT_LOCK:
Integer afState=result.get(CaptureResult.CONTROL_AF_STATE);
if(afState==CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED){
captureStillImage();
}
else{
captureStillImage();
}
break;
}
}
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
#Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Handler mHandler = new Handler(getMainLooper());
mHandler.post(new Runnable() {
#Override
public void run() {
Toast.makeText(getApplicationContext(), "Focus Lock UnSuccesful", Toast.LENGTH_SHORT).show();
}
});
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private static File mImageFile;
private ImageReader mImageReader;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage()));
}
};
private static class ImageSaver implements Runnable {
private final Image mImage;
private ImageSaver(Image image) {
mImage = image;
}
#Override
public void run() {
ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mImageFile);
fileOutputStream.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_activity);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
mTextureView = (TextureView) findViewById(R.id.texture);
}
public static boolean hasAllPermissions(Context context, String... permissions) {
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
#Override
public void onStart() {
super.onStart();
if (!EventBus.getDefault().isRegistered(this)) {
EventBus.getDefault().register(this);
}
}
//onStop
#Override
public void onStop() {
super.onStop();
EventBus.getDefault().unregister(this);
}
#Subscribe
public void onCaptureNumberReceived(OnCaptureEvent event) {
//get the phone number value here and do something with it
String capturecode = event.getCodeCapture();
Log.d(TAG, capturecode);
if (capturecode.equals("capture")) {
try {
mImageFile = createImageFile();
} catch (IOException e) {
e.printStackTrace();
}
lockFocus();
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
else if(capturecode.equals("end_capture")) {
finish(); // call this to finish the current activity
Intent homeIntent = new Intent(Intent.ACTION_MAIN);
homeIntent.addCategory( Intent.CATEGORY_HOME );
homeIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(homeIntent);
}
}
public void takepicture(View view) {
try {
mImageFile = createImageFile();
Log.d("TAG","=====Take picture=====");
} catch (IOException e) {
e.printStackTrace();
}
lockFocus();
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
public void switch_camera(View view) {
closeCamera();
//swap the id of the camera to be used
if(mCameraId == String.valueOf(Camera.CameraInfo.CAMERA_FACING_BACK)){
mCameraId = String.valueOf(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
else {
mCameraId = String.valueOf(Camera.CameraInfo.CAMERA_FACING_BACK);
}
BleUtils.setCameraLens(this, mCameraId);
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),mCameraId);
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
File createImageFile() throws IOException {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "BLE_" + timeStamp + "_";
File storageDirectory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
if(!storageDirectory.exists()){
if(!storageDirectory.mkdirs()){
Log.e("Dir", "Failed to create directory");
Log.d("MAKE DIR", storageDirectory.mkdir() + "" + storageDirectory.getParentFile() + "");
return null;
}
}
File image = File.createTempFile(imageFileName, ".jpg", storageDirectory);
mImageFileLocation = image.getAbsolutePath();
return image;
}
#Override
public void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(!hasAllPermissions(this, PERMISSIONS)){
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
}
openBackgroundThread();
if (mTextureView.isAvailable()) {
if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),BleUtils.getCameraLens(AndroidCamera.this));
else
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),"1");
closeCamera();
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onDestroy() {
super.onDestroy();
Log.d(TAG,"onDestroy");
}
public void onPause() {
Log.d(TAG,"onPause");
closeCamera();
closeBackgroundThread();
super.onPause();
}
private void setupCamera(int width, int height, String cameraId) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestImageSize = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() -
rhs.getWidth() * rhs.getHeight());
}
}
);
mImageReader = ImageReader.newInstance(largestImageSize.getWidth(),
largestImageSize.getHeight(),
ImageFormat.JPEG,
1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,
mBackgroundHandler);
mPreviewSize = getPreferredPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
Log.d("CAMERA_ID",String.valueOf(mCameraId));
// }
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getPreferredPreviewSize(Size[] mapSizes, int width, int height) {
List<Size> collectorSizes = new ArrayList<>();
for (Size option : mapSizes) {
if (width > height) {
if (option.getWidth() > width &&
option.getHeight() > height) {
collectorSizes.add(option);
}
} else {
if (option.getWidth() > height &&
option.getHeight() > width) {
collectorSizes.add(option);
}
}
}
if (collectorSizes.size() > 0) {
return Collections.min(collectorSizes, new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
}
});
}
return mapSizes[0];
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
Log.v("CAMERA", mCameraId + " " + mCameraDeviceStateCallback);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
if(ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED){
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback,mBackgroundHandler);
}
else {
if (shouldShowRequestPermissionRationale(android.Manifest.permission.CAMERA)){
Toast.makeText(this,"No Permission to use the Camera services", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {android.Manifest.permission.CAMERA},REQUEST_CAMERA_RESULT);
}
}
else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
switch (requestCode){
case REQUEST_CAMERA_RESULT:
if (grantResults[0] != PackageManager.PERMISSION_GRANTED){
Toast.makeText(this, "Cannot run application because camera service permission have not been granted", Toast.LENGTH_SHORT).show();
}
break;
default:
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
break;
}
}
private void closeCamera(){
if(mCameraCaptureSession!=null){
mCameraCaptureSession.close();
mCameraCaptureSession=null;
}
if (mCameraDevice!=null){
mCameraDevice.close();
mCameraDevice=null;
if(mImageReader!=null){
mImageReader.close();
mImageReader=null;
}
}
}
private void createCameraPreviewSession(){
try{
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
Surface previewSurface= new Surface(surfaceTexture);
mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewCaptureRequestBuilder.addTarget(previewSurface);
mPreviewCaptureRequestBuilder.set(CaptureRequest.JPEG_QUALITY, (byte)100);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface,mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
if(mCameraDevice==null){
return;
}
try {
mPreviewCaptureRequest = mPreviewCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(
mPreviewCaptureRequest,
mSessionCaptureCallback,
mBackgroundHandler
);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Handler mHandler = new Handler(getMainLooper());
mHandler.post(new Runnable() {
#Override
public void run() {
Toast.makeText(
getApplicationContext(),
"create camera session failed!",
Toast.LENGTH_SHORT
).show();
}
});
}
},null);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void openBackgroundThread(){
mBackgroundThread=new HandlerThread("Camera2 background thread");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void closeBackgroundThread(){
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread=null;
mBackgroundHandler=null;
}catch (InterruptedException e){
e.printStackTrace();
}
}
private void lockFocus(){
try{
mState=STATE_WAIT_LOCK;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback,mBackgroundHandler);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void unLockFocus(){
try{
mState=STATE_PREVIEW;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback,mBackgroundHandler);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private void captureStillImage(){
try {
CaptureRequest.Builder captureStillBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureStillBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureStillBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// setAutoFlash(captureBuilder);
// Orientation
int rotation=0;
//Front camera
if(mCameraId.equals("1")) {
rotation = this.getWindowManager().getDefaultDisplay().getRotation();
captureStillBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
}
else {
rotation = this.getWindowManager().getDefaultDisplay().getRotation();
captureStillBuilder.set(CaptureRequest.JPEG_ORIENTATION,
ORIENTATIONS.get(rotation));
}
CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
//Toast.makeText(getApplicationContext(),"Image Captured",Toast.LENGTH_SHORT).show();
unLockFocus();
}
};
mCameraCaptureSession.capture(
captureStillBuilder.build(),captureCallback,null
);
}catch (CameraAccessException e){
e.printStackTrace();
}
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation +180) % 360;
}
}
setupCamera() is called right from onSurfaceTextureAvailable, which can be earlier than the permissions are granted.
What you need to do is to track whether the permissions are granted and if the surface texture available in both callbacks.
Make a single entry for checking these conditions and setting up camera
private boolean mSurfaceTextureAvailable;
private boolean mPermissionsGranted;
private boolean mCameraOpened;
private void setupCameraIfPossible() {
if (!mCameraOpened && mSurfaceTextureAvailable && mPermissionsGranted) {
String cameraLens = BleUtils.getCameraLens(AndroidCamera.this);
if (TextUtils.isEmpty(cameraLens)) {
cameraLens = "1";
}
setupCamera(mTextureView.getWidth(), mTextureView.getHeight(), cameraLens);
openCamera();
}
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mSurfaceTextureAvailable = true;
setupCameraIfPossible();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
//closeCamera();
mSurfaceTextureAvailable = false;
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_activity);
openBackgroundThread();
// Make sure the boolean flag is set. Will be true for lower SDK
mPermissionsGranted = hasAllPermissions(this, PERMISSIONS);
if (!mPermissionsGranted) {
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
mTextureView = (TextureView) findViewById(R.id.texture);
}
In onResume() you don't need to check for permissions. If they will be denied while in background your Activity will get killed and you will go through onCreate() again.
Remove the code in onPause() and onResume()!
// #Override
// public void onResume() {
// super.onResume();
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
// if(!hasAllPermissions(this, PERMISSIONS)){
// ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
// }
// }
// openBackgroundThread();
// if (mTextureView.isAvailable()) {
// if (!TextUtils.isEmpty(BleUtils.getCameraLens(AndroidCamera.this)))
// setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),BleUtils.getCameraLens(AndroidCamera.this));
// else
// setupCamera(mTextureView.getWidth(), mTextureView.getHeight(),"1");
// closeCamera();
// openCamera();
// } else {
// mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
// }
// }
// public void onPause() {
// Log.d(TAG,"onPause");
// closeCamera();
//
// closeBackgroundThread();
// super.onPause();
// }
Add this to onStart()
#Override
public void onStart() {
super.onStart();
openCameraIfPossible();
}
Move closing camera to onStop()
#Override
public void onStop() {
super.onStop();
closeCamera();
}
private void closeCamera() {
mCameraOpened = false; // set a field indicating it is closed
...
}
private void openCamera() {
...
mCameraOpened = true; // If successful, set a field indicating it is opened
}
Now another thing I discovered is that you must actually check for permissions in onRequestPermissionsResult() instead of using grantResults flags
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode){
case REQUEST_CAMERA_RESULT:
mPermissionsGranted = hasAllPermissions(this, PERMISSIONS);
setupCameraIfPossible();
break;
default:
break;
}
}

How to show the camera preview on a SurfaceView?

I am trying to open the camera hardware on a SurfaceView. In the layout, I created a SurfaceView and I open the camera as shown in the code below. When I run the code, the toast in the CameraAvailableCB shows up and says "onCameraAvailable" but nothing appears on the SurfaceView.
How do I show the camera display on the SurfaceView?
Code
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
this.mBtnCapture = (Button) findViewById(R.id.actMain_btn_capture);
this.mSurfaceView = (SurfaceView) findViewById(R.id.actMain_surfaceView);
this.mSurfaceHolder = this.mSurfaceView.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
cameraIDsList = this.mCameraManager.getCameraIdList();
for (String id : cameraIDsList) {
Log.v(TAG, "CameraID: " + id);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
cameraStateCB = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onOpened", Toast.LENGTH_SHORT).show();
//requesting permission
int permissionCheck = ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(activity, Manifest.permission.CAMERA)) {
} else {
ActivityCompat.requestPermissions(activity, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
Toast.makeText(getApplicationContext(), "request permission", Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(getApplicationContext(), "PERMISSION_ALREADY_GRANTED", Toast.LENGTH_SHORT).show();
}
//opening the camera
try {
mCameraManager.openCamera(cameraIDsList[1], cameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onDisconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onError(CameraDevice camera, int error) {
Toast.makeText(getApplicationContext(), "onError", Toast.LENGTH_SHORT).show();
}
};
CameraManager.AvailabilityCallback cameraAvailableCB = new CameraManager.AvailabilityCallback() {
#Override
public void onCameraAvailable(String cameraId) {
super.onCameraAvailable(cameraId);
Toast.makeText(getApplicationContext(), "onCameraAvailable", Toast.LENGTH_SHORT).show();
}
#Override
public void onCameraUnavailable(String cameraId) {
super.onCameraUnavailable(cameraId);
Toast.makeText(getApplicationContext(), "onCameraUnavailable", Toast.LENGTH_SHORT).show();
}
};
this.mCameraManager.registerAvailabilityCallback(cameraAvailableCB, new Handler());
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case MY_PERMISSIONS_REQUEST_CAMERA:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED)
// Open Camera
break;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.w(TAG, "surfaceCreated");
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.w(TAG, "surfaceChanged");
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.w(TAG, "surfaceDestroyed");
}
To show preview from camera using Camera2 API you should do the following steps:
Get permission to use camera device
Using CameraManager open connection to a camera
Have prepared Surface for preview
Using opened camera device and desired surfaces (it can include not only preview surface) create CaptureSession
After CaptureSession created you need to create and configure CaptureRequest and submit it to CaptureSession
Need to notice that preparing surfaces and openning connection to camera are independent processes, so you need to be sure, that they are both completed before creating CaptureSession.
Here is the example of activity to display camera preview on the screen:
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Handler.Callback {
static final String TAG = "CamTest";
static final int MY_PERMISSIONS_REQUEST_CAMERA = 1242;
private static final int MSG_CAMERA_OPENED = 1;
private static final int MSG_SURFACE_READY = 2;
private final Handler mHandler = new Handler(this);
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
CameraManager mCameraManager;
String[] mCameraIDsList;
CameraDevice.StateCallback mCameraStateCB;
CameraDevice mCameraDevice;
CameraCaptureSession mCaptureSession;
boolean mSurfaceCreated = true;
boolean mIsCameraConfigured = false;
private Surface mCameraSurface = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
this.mSurfaceView = (SurfaceView) findViewById(R.id.SurfaceViewPreview);
this.mSurfaceHolder = this.mSurfaceView.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
try {
mCameraIDsList = this.mCameraManager.getCameraIdList();
for (String id : mCameraIDsList) {
Log.v(TAG, "CameraID: " + id);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
mCameraStateCB = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onOpened", Toast.LENGTH_SHORT).show();
mCameraDevice = camera;
mHandler.sendEmptyMessage(MSG_CAMERA_OPENED);
}
#Override
public void onDisconnected(CameraDevice camera) {
Toast.makeText(getApplicationContext(), "onDisconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onError(CameraDevice camera, int error) {
Toast.makeText(getApplicationContext(), "onError", Toast.LENGTH_SHORT).show();
}
};
}
#Override
protected void onStart() {
super.onStart();
//requesting permission
int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
Toast.makeText(getApplicationContext(), "request permission", Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(getApplicationContext(), "PERMISSION_ALREADY_GRANTED", Toast.LENGTH_SHORT).show();
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
#Override
protected void onStop() {
super.onStop();
try {
if (mCaptureSession != null) {
mCaptureSession.stopRepeating();
mCaptureSession.close();
mCaptureSession = null;
}
mIsCameraConfigured = false;
} catch (final CameraAccessException e) {
// Doesn't matter, cloising device anyway
e.printStackTrace();
} catch (final IllegalStateException e2) {
// Doesn't matter, cloising device anyway
e2.printStackTrace();
} finally {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
mCaptureSession = null;
}
}
}
#Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MSG_CAMERA_OPENED:
case MSG_SURFACE_READY:
// if both surface is created and camera device is opened
// - ready to set up preview and other things
if (mSurfaceCreated && (mCameraDevice != null)
&& !mIsCameraConfigured) {
configureCamera();
}
break;
}
return true;
}
private void configureCamera() {
// prepare list of surfaces to be used in capture requests
List<Surface> sfl = new ArrayList<Surface>();
sfl.add(mCameraSurface); // surface for viewfinder preview
// configure camera with all the surfaces to be ever used
try {
mCameraDevice.createCaptureSession(sfl,
new CaptureSessionListener(), null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsCameraConfigured = true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case MY_PERMISSIONS_REQUEST_CAMERA:
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)
try {
mCameraManager.openCamera(mCameraIDsList[1], mCameraStateCB, new Handler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
break;
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mCameraSurface = holder.getSurface();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mCameraSurface = holder.getSurface();
mSurfaceCreated = true;
mHandler.sendEmptyMessage(MSG_SURFACE_READY);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceCreated = false;
}
private class CaptureSessionListener extends
CameraCaptureSession.StateCallback {
#Override
public void onConfigureFailed(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure failed");
}
#Override
public void onConfigured(final CameraCaptureSession session) {
Log.d(TAG, "CaptureSessionConfigure onConfigured");
mCaptureSession = session;
try {
CaptureRequest.Builder previewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(mCameraSurface);
mCaptureSession.setRepeatingRequest(previewRequestBuilder.build(),
null, null);
} catch (CameraAccessException e) {
Log.d(TAG, "setting up preview failed");
e.printStackTrace();
}
}
}
}
With CameraX it's easy now.
// Create a preview use case instance
val preview = Preview.Builder().build()
// Bind the preview use case and other needed user cases to a lifecycle
val camera = cameraProvider.bindToLifecycle(lifecycleOwner, cameraSelector, preview, imageAnalysis, imageCapture)
// Create a surfaceProvider using the bound camera's cameraInfo
val surfaceProvider = previewView.createSurfaceProvider(camera.cameraInfo)
// Attach the surfaceProvider to the preview use case to start preview
preview.setSurfaceProvider(surfaceProvider)
<androidx.camera.view.PreviewView
android:layout_width="match_parent"
android:layout_height="match_parent"
app:scaleType="fitEnd" />
https://medium.com/androiddevelopers/display-a-camera-preview-with-previewview-86562433d86c

Categories

Resources