Android camera2 qr code reader - android

I have a fragment which is scanning for a qr code using camera2 API and zxing (google's zebra crossing) API, it works fine for all the phones, the problem is with this specific tablet NVIDIA SHIELD. It would just not find the qr in the image, tried a lot:
Smaller, bigger screen/image dimension.
All different camera settings like iso, contrast, lighter, darker.
Using all image formats like JPEG, YUV_422_888 ...
Using planes nr 0,1,2...
Sorry for such a big snippet of code, ask questions, will respond asap.
Here is my code:
public class FragmentDecoder extends Fragment
implements FragmentCompat.OnRequestPermissionsResultCallback {
private static boolean accessGranted = true;
private static boolean showingText = false;
private ImageView camera_guide_image;
private TextView decoderText;
private ImageButton flashButton;
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private int mSensorOrientation;
private boolean mFlashSupported;
private static final String TAG = FragmentDecoder.class.getName();
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
process(result);
}
};
private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private QRCodeReader mQrReader;
private String mCameraId;
private AutoFitTextureView mTextureView;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image img = null;
Result rawResult = null;
try {
img = reader.acquireLatestImage();
if (img == null) throw new NullPointerException("cannot be null");
ByteBuffer buffer = img.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
int width = img.getWidth();
int height = img.getHeight();
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
rawResult = mQrReader.decode(bitmap);
if (rawResult == null) {
throw new NullPointerException("no QR code");
}
String decoded = rawResult.getText();
if (accessGranted) {
Log.e(TAG, "entered the accessGranted area!");
accessGranted = false;
boolean isPrivKey = BRWalletManager.getInstance(getActivity()).confirmSweep(getActivity(), decoded);
if (isPrivKey) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
BRAnimator.hideDecoderFragment();
}
});
return;
}
String validationString = validateResult(decoded);
if (Objects.equals(validationString, BRConstants.TEXT_EMPTY)) {
onQRCodeRead((MainActivity) getActivity(), rawResult.getText());
} else {
if (!showingText) {
showingText = true;
setCameraGuide(BRConstants.CAMERA_GUIDE_RED);
setGuideText(validationString);
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
showingText = false;
}
}, 1000);
}
accessGranted = true;
}
}
} catch (ReaderException | NullPointerException | IllegalStateException ignored) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
// Log.e(TAG, "Reader shows an exception! ", ignored);
/* Ignored */
} finally {
mQrReader.reset();
if (img != null)
img.close();
}
if (rawResult == null) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
}
}
};
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
getActivity().onBackPressed();
}
};
private Size mPreviewSize;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
private int flashButtonCount;
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_decoder, container, false);
mQrReader = new QRCodeReader();
mTextureView = new AutoFitTextureView(getActivity());
RelativeLayout layout = (RelativeLayout) rootView.findViewById(R.id.fragment_decoder_layout);
camera_guide_image = (ImageView) rootView.findViewById(R.id.decoder_camera_guide_image);
decoderText = (TextView) rootView.findViewById(R.id.decoder_text);
flashButton = (ImageButton) rootView.findViewById(R.id.button_flash);
flashButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
if (mPreviewRequestBuilder == null || mCaptureSession == null)
return;
if (++flashButtonCount % 2 != 0) {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_on);
SpringAnimator.showAnimation(flashButton);
} else {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_off);
SpringAnimator.showAnimation(flashButton);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
layout.addView(mTextureView, 0);
startBackgroundThread();
return rootView;
}
#Override
public void onResume() {
super.onResume();
accessGranted = true;
showingText = false;
new Handler().post(new Runnable() {
#Override
public void run() {
if (camera_guide_image != null)
SpringAnimator.showExpandCameraGuide(camera_guide_image);
}
});
((BreadWalletApp) getActivity().getApplication()).hideKeyboard(getActivity());
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void setUpCameraOutputs(int width, int height) {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth()/2, largest.getHeight()/2,
ImageFormat.YUV_420_888, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
if (mCameraId != null)
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
try {
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException | NullPointerException e) {
e.printStackTrace();
mBackgroundThread = null;
mBackgroundHandler = null;
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
Surface mImageSurface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(mImageSurface);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(mImageSurface, surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
// Log.e(TAG, "onConfigured");
if (mCameraDevice == null) return;
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CONTROL_AF_MODE, CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), getActivity().getResources().getString(R.string.failed),
Toast.LENGTH_SHORT).show();
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configureTransform(int viewWidth, int viewHeight) {
if (mTextureView == null || mPreviewSize == null) return;
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private static synchronized void onQRCodeRead(final MainActivity app, final String text) {
app.runOnUiThread(new Runnable() {
#Override
public void run() {
if (text != null) {
BRAnimator.hideDecoderFragment();
Log.e(TAG, "BEFORE processRequest");
RequestHandler.processRequest(app, text);
}
}
});
}
private static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
private int getStatusBarHeight() {
Rect rectangle = new Rect();
Window window = getActivity().getWindow();
window.getDecorView().getWindowVisibleDisplayFrame(rectangle);
int statusBarHeight = rectangle.top;
int contentViewTop =
window.findViewById(Window.ID_ANDROID_CONTENT).getTop();
int titleBarHeight = contentViewTop - statusBarHeight;
return statusBarHeight + titleBarHeight;
}
private String validateResult(String str) {
RequestObject obj = null;
try {
obj = RequestHandler.getRequestFromString(str);
} catch (InvalidAlgorithmParameterException e) {
e.printStackTrace();
}
if (obj == null) {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
if (obj.r != null) {
return BRConstants.TEXT_EMPTY;
}
if (obj.address != null) {
if (BRWalletManager.validateAddress(obj.address)) {
return BRConstants.TEXT_EMPTY;
} else {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_valid_bitcoin_address);
}
}
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
private void setCameraGuide(final String str) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE)) {
camera_guide_image.setImageResource(R.drawable.cameraguide);
setGuideText(BRConstants.TEXT_EMPTY);
} else if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE_RED)) {
camera_guide_image.setImageResource(R.drawable.cameraguide_red);
} else {
throw new IllegalArgumentException("CAMERA_GUIDE and CAMERA_GUIDE_RED only");
}
}
});
}
private void setGuideText(final String str) {
Activity activity = getActivity();
if (activity != null)
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
decoderText.setText(str);
}
});
}
}

Related

custom camera textureView not adjusting to landscape mode

I am making a custom android camera app. So far, the only problem I have is getting the textureView to adjust when in landscape mode (turning the camera 90 degrees). The view comes out distorted and not in the right orientation. Portrait mode works fine. I am not sure if the problem is in the setupCamera() method, sensorToDeviceRotation() method, or the configureTransform() method. I haven't implemented anything to actually take the photo yet. Any help is appreciated.
EDIT/UPDATE: I updated my code to add the improvements. The original problem still persists despite this. I am using a Pixel XL to test this code. I am still not sure what the problem is.
homePage.java
public class homePage extends AppCompatActivity {
private static final int CAMERA_PERMISSION = 123;
private String mCameraId;
private Size mPreviewSize;
private CaptureRequest.Builder mCaptureRequestBuilder;
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(), "Camera connected", Toast.LENGTH_SHORT).show();
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String permissions[],
#NonNull int[] grantResults) {
switch(requestCode) {
case CAMERA_PERMISSION:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
return;
}else {
Toast.makeText(this, "Certain permissions needed to continue", Toast.LENGTH_SHORT).show();
}
break;
}
}
private boolean hasCamera() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
}else {
AlertDialog camAlert = new AlertDialog.Builder(homePage.this).create();
camAlert.setTitle("Alert");
camAlert.setMessage("there is no usable camera");
camAlert.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
camAlert.show();
return false;
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
int totalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = totalRotation == 90 || totalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e){
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
//Toast.makeText(this, "Camera permission granted", Toast.LENGTH_SHORT).show();
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
//Toast.makeText(this, "non-marshmellow device permission granted", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA}, CAMERA_PERMISSION);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(getApplicationContext(), "Unable to set up camera preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("cameraString");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics,
int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
private void configureTransform (int width, int height) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
final int rotation = getWindowManager().getDefaultDisplay().getRotation();
final Matrix matrix = new Matrix();
final RectF viewRect = new RectF(0, 0, width, height);
final RectF buffetRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
final float centerX = viewRect.centerX();
final float centerY = viewRect.centerY();
if(Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
buffetRect.offset(centerX - buffetRect.centerX(), centerY - buffetRect.centerY());
matrix.setRectToRect(viewRect, buffetRect, Matrix.ScaleToFit.FILL);
final float scale = Math.max(
(float) height / mPreviewSize.getHeight(),
(float) width / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
}else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
//=====added code from suggestions=======================
public interface CameraModule {
void onOrientationChanged(int orientation);
}
CameraModule mCurrentModule;
private MyOrientationEventListener mOrientationListener;
private int mLastRawOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
private class MyOrientationEventListener extends OrientationEventListener {
public MyOrientationEventListener(Context context){
super(context);
}
#Override
public void onOrientationChanged(int orientation) {
if(orientation == ORIENTATION_UNKNOWN)
return;
mLastRawOrientation = orientation;
mCurrentModule.onOrientationChanged(orientation);
}
}
//========================================================================
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() /
(long) rhs.getWidth() * rhs.getHeight());
}
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option: choices) {
if (option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_page);
hasCamera();
mTextureView = (TextureView) findViewById(R.id.textureView);
mOrientationListener = new MyOrientationEventListener(this);
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
}
You can try something like this(Add this private class in your HomePage.java):Notice the onOrientationChanged() method can be applied to xmls also:
We keep the last known orientation. So if you first orient the camera then point the camera to floor or sky, then also TextureView and Camera have the correct orientation.
private MyOrientationEventListener mOrientationListener;
// The degrees of the device rotated clockwise from its natural orientation.
private int mLastRawOrientation= OrientationEventListener.ORIENTATION_UNKNOWN;
//Don't forget to add **mOrientationListener = new MyOrientationEventListener(this);** on your onCreate() method
private class MyOrientationEventListener
extends OrientationEventListener {
public MyOrientationEventListener(Context context) {
super(context);
}
#Override
public void onOrientationChanged(int orientation) {
if (orientation == ORIENTATION_UNKNOWN) return;
mLastRawOrientation = orientation;
mCurrentModule.onOrientationChanged(orientation);
}
}
For more information you can catch this link up:Camera Example
I found the solution by eliminating the configureTransform() method, the cameraModule module, and the myOrientationEventListener() class. I replaced it with the transformImage() method. I then called this method in the onSurfaceTextureAvailable() and the onResume() method.
The problem I might have been facing in the old code was the fact that I was calling the configureTransform()method in the wrong places. I got the answer from looking at this post: Android Camera2 Preview is rotated 90deg while in Landscape
and this youtube tutorial: https://www.youtube.com/watch?v=YvS3iGKhQ_g
These sources explain the topic far better than me so please look them over if facing a similar issue.
new functioning code:
homePage.java:
public class homePage extends AppCompatActivity {
private static final int CAMERA_PERMISSION = 123;
private String mCameraId;
private Size mPreviewSize;
private CaptureRequest.Builder mCaptureRequestBuilder;
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(), "Camera connected", Toast.LENGTH_SHORT).show();
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
transformImage(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String permissions[],
#NonNull int[] grantResults) {
switch(requestCode) {
case CAMERA_PERMISSION:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
return;
}else {
Toast.makeText(this, "Certain permissions needed to continue", Toast.LENGTH_SHORT).show();
}
break;
}
}
private boolean hasCamera() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
}else {
AlertDialog camAlert = new AlertDialog.Builder(homePage.this).create();
camAlert.setTitle("Alert");
camAlert.setMessage("there is no usable camera");
camAlert.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
camAlert.show();
return false;
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
int totalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = totalRotation == 90 || totalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e){
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
//Toast.makeText(this, "Camera permission granted", Toast.LENGTH_SHORT).show();
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
//Toast.makeText(this, "non-marshmellow device permission granted", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA}, CAMERA_PERMISSION);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(getApplicationContext(), "Unable to set up camera preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("cameraString");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics,
int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
//================new method====================================
private void transformImage(int width, int height) {
if(mPreviewSize == null || mTextureView == null) {
return;
}
Matrix matrix = new Matrix();
int rotation = getWindowManager().getDefaultDisplay().getRotation();
RectF textureRectF = new RectF(0, 0, width, height);
RectF previewRectF = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = textureRectF.centerX();
float centerY = textureRectF.centerY();
if(rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) {
previewRectF.offset(centerX - previewRectF.centerX(),
centerY - previewRectF.centerY());
matrix.setRectToRect(textureRectF, previewRectF, Matrix.ScaleToFit.FILL);
float scale = Math.max((float)width / mPreviewSize.getWidth(),
(float)height / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
//===============================================================
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() /
(long) rhs.getWidth() * rhs.getHeight());
}
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option: choices) {
if (option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_page);
hasCamera();
mTextureView = (TextureView) findViewById(R.id.textureView);
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
transformImage(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
}

How to implement video recording in android with smallest possible file size?

I followed this documentation on how to implement video recording (in my case, I implemented it in a Fragment)
static final int REQUEST_VIDEO_CAPTURE = 1;
private void dispatchTakeVideoIntent() {
Intent takeVideoIntent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
if (takeVideoIntent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(takeVideoIntent, REQUEST_VIDEO_CAPTURE);
}
}
https://developer.android.com/training/camera/videobasics.html
However, I noticed that 30 seconds of recording is around 40mb. I tried an app with video recording and its 30 seconds is just around 7mb. I noticed that the width and height of that one is 720x480.
I would like to ask how to implement such.
Thanks for your advice.
You need to implement your own VideoRecorder so that you can modify as per your own requirement.
set video resolution,
mVideoRecorder.setVideoSize(720, 480);
set Max Duration,
mVideoRecorder.setMaxDuration(30000);
Try this
videorecord.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<VideoView
android:id="#+id/videoRecorder"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</LinearLayout>
VideoRecorderExample.java
import android.app.Activity;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.widget.VideoView;
public class VideoRecorderExample extends Activity implements
SurfaceHolder.Callback {
public static final String IS_FRONT_CAMERA = "FRONT_CAMERA";
private static final String TAG = VideoRecorderExample.class.getSimpleName();
private Boolean mRecording = false;
private Boolean isFrontCamera = false;
private VideoView mVideoView = null;
private MediaRecorder mVideoRecorder = null;
private Camera mCamera;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.videorecord);
mVideoView = (VideoView) findViewById(R.id.videoRecorder);
isFrontCamera = getIntent().getExtras().getBoolean(IS_FRONT_CAMERA, true);
final SurfaceHolder holder = mVideoView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
startRecording(holder);
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.e(TAG, "W x H = " + width + "x" + height);
}
private void stopRecording() throws Exception {
mRecording = false;
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder.release();
mVideoRecorder = null;
}
if (mCamera != null) {
mCamera.reconnect();
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
#Override
protected void onDestroy() {
try {
stopRecording();
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
super.onDestroy();
}
private void startRecording(SurfaceHolder holder) throws Exception {
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder.release();
mVideoRecorder = null;
}
if (mCamera != null) {
mCamera.reconnect();
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
String uniqueOutFile = Environment.getExternalStorageDirectory().toString() + "/" + System.currentTimeMillis() + ".3gp";
try {
if (isFrontCamera) {
mCamera = Camera.open(1);
} else {
mCamera = Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(640, 480);
mCamera.setParameters(parameters);
mCamera.startPreview();
mCamera.unlock();
mVideoRecorder = new MediaRecorder();
mVideoRecorder.setCamera(mCamera);
mVideoRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mVideoRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mVideoRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mVideoRecorder.setVideoSize(720, 480);
mVideoRecorder.setVideoFrameRate(20);
mVideoRecorder.setVideoEncodingBitRate(3000000);
mVideoRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mVideoRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_WB);
mVideoRecorder.setAudioSamplingRate(16000);
mVideoRecorder.setMaxDuration(30000);
mVideoRecorder.setPreviewDisplay(holder.getSurface());
mVideoRecorder.setOutputFile(uniqueOutFile);
mVideoRecorder.prepare();
mVideoRecorder.start();
mRecording = true;
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
For camera2.
Try this code it will supported with camera2.
public class Camera2VideoFragment extends Fragment
implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
/**
* An {#link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
private Button mButtonVideo;
private CameraDevice mCameraDevice;
private CameraCaptureSession mPreviewSession;
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private Size mPreviewSize;
private Size mVideoSize;
private MediaRecorder mMediaRecorder;
private boolean mIsRecordingVideo;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
/**
* A {#link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
private Surface mRecorderSurface;
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* #param choices The list of available sizes
* #return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<Size>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (Button) view.findViewById(R.id.video);
mButtonVideo.setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.video: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
/**
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* #param permissions The permissions your app wants to request.
* #return Whether you can show permission rationale UI.
*/
private boolean shouldShowRequestPermissionRationale(String[] permissions) {
for (String permission : permissions) {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
return true;
}
}
return false;
}
/**
* Requests permissions needed for recording video.
*/
private void requestVideoPermissions() {
if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
Log.d(TAG, "onRequestPermissionsResult");
if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
if (grantResults.length == VIDEO_PERMISSIONS.length) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
break;
}
}
} else {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean hasPermissionsGranted(String[] permissions) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(getActivity(), permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Tries to open a {#link CameraDevice}. The result is listened by `mStateCallback`.
*/
private void openCamera(int width, int height) {
if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
requestVideoPermissions();
return;
}
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(getActivity());
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
return context.getExternalFilesDir(null).getAbsolutePath() + "/"
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
mRecorderSurface = mMediaRecorder.getSurface();
surfaces.add(mRecorderSurface);
mPreviewBuilder.addTarget(mRecorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mButtonVideo.setText(R.string.record);
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
public static class ConfirmationDialog extends DialogFragment {
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.permission_request)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
REQUEST_VIDEO_PERMISSIONS);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
parent.getActivity().finish();
}
})
.create();
}
}
}
Reference from: Camera2 Video

How to capture Image without a button click using Camera2 API in Android

Does anybody have experience in using Camera2 API? I want to capture Image without user interaction (Button click) and by using background Service. I have used Google's sample project of Camera2 API. Please help me. Right now, I can only able to capture images using Activity and on a button click.
Below is my code:
public class Camera2BasicFragment extends Fragment
implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final String FRAGMENT_DIALOG = "dialog";
int mPictureCounter = 0;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final String TAG = "Camera2BasicFragment";
private static final int STATE_PREVIEW = 0, STATE_WAITING_LOCK = 1, STATE_WAITING_PRECAPTURE = 2, STATE_WAITING_NON_PRECAPTURE = 3, STATE_PICTURE_TAKEN = 4, MAX_PREVIEW_WIDTH = 1920, MAX_PREVIEW_HEIGHT = 1080;
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private String mCameraId;
private AutoFitTextureView mTextureView;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private File mFile;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Calendar c = Calendar.getInstance();
mFile = new File(getActivity().getExternalFilesDir(null), "pic" + c.get(Calendar.MILLISECOND) + "" + c.get(Calendar.SECOND) + "" + c.get(Calendar.MINUTE) + "" + c.get(Calendar.HOUR_OF_DAY) + "" + c.get(Calendar.DAY_OF_MONTH) + "" + c.get(Calendar.MONTH) + "" + c.get(Calendar.YEAR) + ".jpg");
Log.e("TestingProject", "\n\n\n--------------------------------**********************************\n" +
"Files Absolute Path : " + mFile.getAbsolutePath()
+ "\n**********************************--------------------------------\n\n\n");
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile));
}
};
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mState = STATE_PREVIEW;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private boolean mFlashSupported;
private int mSensorOrientation;
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {runPrecaptureSequence();}}
break;
}
case STATE_WAITING_PRECAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();}
break;}}}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {process(partialResult);}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {process(result);}
};
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});}}
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {notBigEnough.add(option);}}
}
if (bigEnough.size() > 0) {return Collections.min(bigEnough, new CompareSizesByArea());} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];}
}
public static Camera2BasicFragment newInstance() {return new Camera2BasicFragment();}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_basic, container, false);
}
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
view.findViewById(R.id.picture).setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
#Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Calendar c = Calendar.getInstance();
mFile = new File(getActivity().getExternalFilesDir(null), "proof_" + c.get(Calendar.MILLISECOND) + c.get(Calendar.SECOND) + c.get(Calendar.MINUTE) + c.get(Calendar.HOUR_OF_DAY) + c.get(Calendar.DAY_OF_MONTH) + c.get(Calendar.MONTH) + c.get(Calendar.YEAR) + ".jpg");
Log.e("TestingProject", "\n\n\n--------------------------------**********************************\n" +
"File Path : " + mFile.getAbsolutePath()
+ "\n**********************************--------------------------------\n\n\n");
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {openCamera(mTextureView.getWidth(), mTextureView.getHeight());} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void requestCameraPermission() {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.request_permission))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {super.onRequestPermissionsResult(requestCode, permissions, grantResults);}}
private void setUpCameraOutputs(int width, int height) {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {continue;}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
}
private void openCamera(int width, int height) {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs(width, height);
configureTransform(width, height);
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture); mPreviewRequestBuilder=mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice){return;}
mCaptureSession = cameraCaptureSession;
try { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(mPreviewRequestBuilder);
mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);} catch (CameraAccessException e) {e.printStackTrace();}}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {showToast("Failed");}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void takePicture() {
lockFocus();
}
private void lockFocus() {
try {
List<CaptureRequest> captureList = new ArrayList<CaptureRequest>();
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
for (int i = 0; i < 10; i++) {
captureList.add(mPreviewRequestBuilder.build());
}
mCaptureSession.stopRepeating();
mCaptureSession.captureBurst(captureList, cameraCaptureCallback, null);
mPreviewRequestBuilder.removeTarget(mImageReader.getSurface());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
CameraCaptureSession.CaptureCallback cameraCaptureCallback = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,TotalCaptureResult result) {
Log.e("TestingProject", "pic # " + (mPictureCounter + 1));
mPictureCounter++;
if (mPictureCounter >= 10)
unlockFocus();
}
};
private void runPrecaptureSequence() {
try { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {return;}
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
private void unlockFocus() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e("TestingProject", "BHAND... "+e.getMessage());
}
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.picture: {
takePicture();
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
private static class ImageSaver implements Runnable {
private final Image mImage;
private final File mFile;
public ImageSaver(Image image, File file) {
mImage = image;
mFile = file;
}
#Override
public void run() {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
public static class ConfirmationDialog extends DialogFragment {
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.request_permission)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
FragmentCompat.requestPermissions(parent,
new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
Activity activity = parent.getActivity();
if (activity != null) {
activity.finish();
}
}
})
.create();
}
}}

Camera2 is capturing image before auto-focus is locked and flash is fired

this is the fragment through which i am accessing camera2:
I have implemented the Google Camera2 Api Sample(AS IT IS):
But the camera is capturing the image first and focus is locked later, and flash is fired later..
public class Camera2BasicFragment extends Fragment
implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final String TAG = "Camera2BasicFragment";
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAITING_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
private static final int STATE_PICTURE_TAKEN = 4;
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private String mCameraId;
private AutoFitTextureView mTextureView;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private File mFile;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile));
}
};
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mState = STATE_PREVIEW;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private boolean mFlashSupported;
private int mSensorOrientation;
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is working normally.
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
process(result);
}
};
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
public static Camera2BasicFragment newInstance() {
return new Camera2BasicFragment();
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_basic, container, false);
}
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
view.findViewById(R.id.picture).setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
#Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mFile = new File(getActivity().getExternalFilesDir(null), "pic.jpg");
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void requestCameraPermission() {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.request_permission))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private void setUpCameraOutputs(int width, int height) {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
}
private void openCamera(int width, int height) {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs(width, height);
configureTransform(width, height);
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(mPreviewRequestBuilder);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void takePicture() {
lockFocus();
}
private void lockFocus() {
try {
// This is how to tell the camera to lock focus.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_START);
// Tell #mCaptureCallback to wait for the lock.
mState = STATE_WAITING_LOCK;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void runPrecaptureSequence() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRECAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
setAutoFlash(captureBuilder);
// Orientation
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private int getOrientation(int rotation) {
return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
}
private void unlockFocus() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
setAutoFlash(mPreviewRequestBuilder);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
mState = STATE_PREVIEW;
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.picture: {
takePicture();
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
private static class ImageSaver implements Runnable {
private final Image mImage;
private final File mFile;
public ImageSaver(Image image, File file) {
mImage = image;
mFile = file;
}
#Override
public void run() {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
}
Please help me, How to lock focus and fire flash FIRST and THEN capture Image.. ?
Try like that:
private void lockFocus() {
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
// Tell #mCaptureCallback to wait for the lock.
mState = STATE_WAITING_LOCK;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

Android Camera2 Wrong Rotation

I'm trying to implement android.hardware.camera2, but I'm a little bit confused with it.
The camera is rotating with the phone.
BEFORE Taking the picture, If I rotated the phone the camera is rotating instead of keeping the same position.
Example image here.
I don't know why it happens. I don't have two layouts for the holder.
Camera XML:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:fab="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".Camera" >
<TextureView
android:id="#+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentTop="true" />
<com.getbase.floatingactionbutton.FloatingActionButton
android:id="#+id/btn_takepicture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
fab:fab_icon="#drawable/ic_fab_foto"
fab:fab_colorNormal="#FFFF56B9"
fab:fab_colorPressed="#FFD5379B"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="16dp" />
<com.getbase.floatingactionbutton.FloatingActionButton
android:id="#+id/btn_switchcam"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
fab:fab_icon="#drawable/ic_fab_switch"
fab:fab_colorNormal="#267300"
fab:fab_colorPressed="#1e5b00"
fab:fab_size="mini"
android:layout_alignParentTop="true"
android:layout_alignParentRight="true"
android:layout_marginTop="16dp"
android:layout_marginRight="16dp" />
</RelativeLayout>
Camera Activity:
public class AppCamera extends AppCompatActivity {
private Size mPreviewSize;
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private static int cam = 0;
private FloatingActionButton mBtnShot;
private FloatingActionButton mBtnSwitch;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.camera);
mTextureView = (TextureView)findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mBtnShot = (FloatingActionButton)findViewById(R.id.btn_takepicture);
mBtnSwitch = (FloatingActionButton)findViewById(R.id.btn_switchcam);
mBtnShot.setOnClickListener(new OnClickListener(){
#Override
public void onClick(View v) {
takePicture();
}
});
mBtnSwitch.setOnClickListener(new OnClickListener(){
#Override
public void onClick(View v) {
if (cam == 0) {
cam = 1;
} else {
cam = 0;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
openCamera();
}
});
}
protected void takePicture() {
if(null == mCameraDevice) {
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(Environment.getExternalStorageDirectory()+"/DCIM", "teste.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AppCamera.this, "Saved:"+file, Toast.LENGTH_SHORT).show();
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[cam];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[0];
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener(){
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface,
int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
}
#Override
public void onError(CameraDevice camera, int error) {
}
};
#Override
protected void onPause() {
super.onPause();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
protected void startPreview() {
if(null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(AppCamera.this, "onConfigureFailed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
if(null == mCameraDevice) {
}
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
If the "onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height)" is fired, call the following method:
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
For more info, have a look to the following file:
https://github.com/googlesamples/android-Camera2Basic/blob/master/Application/src/main/java/com/example/android/camera2basic/Camera2BasicFragment.java
replace
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}

Categories

Resources