Camera2 pinch to zoom - android

I'm struggling with trying to get pinch-to-zoom functionality working under Camera2; the API is not very helpful it has to be said and other libraries, like CameraKit don't do what I want to do.
I've looked at the various pieces of code on this and other websites and have come up with the following but I can't get anything working - not even a feed from the camera on the TextureView. (I'm not interested in saving the image from the camera, just to display and pinch to zoom). This isn't a problem with permissions, as I get asked whether I will allow camera access, its just that nothing appears.
The relevant section of the layout xml file is as follows:
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/frlayout"
android:layout_weight="0.80"
android:layout_width="match_parent"
android:layout_height="0dp">
<TextureView
android:id="#+id/cameraView"
android:background="#android:color/transparent"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:adjustViewBounds="true" />
</FrameLayout>
While the salient portions of the mainactivity are here:
public class MainActivity extends AppCompatActivity {
public float finger_spacing = 0;
public int zoom_level = 1;
String camerId;
CameraCharacteristics characteristics;
int PERMISSION_ALL = 1;
private Size previewsize;
private Size jpegSizes[]=null;
private TextureView textureView;
private CameraDevice cameraDevice;
private CaptureRequest.Builder previewBuilder;
private CameraCaptureSession previewSession;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, PERMISSION_ALL);
}
textureView=(TextureView)findViewById(R.id.cameraView);
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
public void openCamera()
{
CameraManager manager=(CameraManager)getSystemService(Context.CAMERA_SERVICE);
try
{
camerId=manager.getCameraIdList()[0];
characteristics=manager.getCameraCharacteristics(camerId);
StreamConfigurationMap map=characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
previewsize=map.getOutputSizes(SurfaceTexture.class)[0];
if(ActivityCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
manager.openCamera(camerId, stateCallback, null);
}
}catch (Exception e)
{
}
}
private TextureView.SurfaceTextureListener surfaceTextureListener=new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice.StateCallback stateCallback=new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
cameraDevice=camera;
startCamera();
// With previewBuilder set up, we can now call the fn to initialise ontouch
setuptexturetouch();
}
#Override
public void onDisconnected(CameraDevice camera) {
}
#Override
public void onError(CameraDevice camera, int error) {
}
};
#Override
protected void onPause() {
super.onPause();
if(cameraDevice!=null)
{
cameraDevice.close();
}
}
void startCamera()
{
if(cameraDevice==null||!textureView.isAvailable()|| previewsize==null)
{
return;
}
SurfaceTexture texture=textureView.getSurfaceTexture();
if(texture==null)
{
return;
}
texture.setDefaultBufferSize(previewsize.getWidth(),previewsize.getHeight());
Surface surface=new Surface(texture);
//mPreviewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
//mPreviewRequestBuilder.addTarget(surface);
try
{
previewBuilder=cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}catch (Exception e)
{
}
previewBuilder.addTarget(surface);
try
{
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
previewSession=session;
getChangedPreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
},null);
}catch (Exception e)
{
}
}
void getChangedPreview()
{
if(cameraDevice==null)
{
return;
}
previewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread=new HandlerThread("changed Preview");
thread.start();
Handler handler=new Handler(thread.getLooper());
try
{
previewSession.setRepeatingRequest(previewBuilder.build(), null, handler);
}catch (Exception e){}
}
void setuptexturetouch()
{
textureView.setOnTouchListener( new View.OnTouchListener()
{
#Override
public boolean onTouch( View v, MotionEvent event )
{
try {
Activity activity = MainActivity.this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(camerId);
}
catch(Exception e)
{
}
float maxzoom = (characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM))*10;
Rect m = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
int action = event.getAction();
float current_finger_spacing;
if (event.getPointerCount() > 1)
{
// Multi touch logic
current_finger_spacing = getFingerSpacing(event);
if(finger_spacing != 0){
if(current_finger_spacing > finger_spacing && maxzoom > zoom_level){
zoom_level++;
} else if (current_finger_spacing < finger_spacing && zoom_level > 1){
zoom_level--;
}
int minW = (int) (m.width() / maxzoom);
int minH = (int) (m.height() / maxzoom);
int difW = m.width() - minW;
int difH = m.height() - minH;
int cropW = difW /100 *(int)zoom_level;
int cropH = difH /100 *(int)zoom_level;
cropW -= cropW & 3;
cropH -= cropH & 3;
Rect zoom = new Rect(cropW, cropH, m.width() - cropW, m.height() - cropH);
previewBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
}
finger_spacing = current_finger_spacing;
}
else
{
if (action == MotionEvent.ACTION_UP)
{
//single touch logic
}
}
try
{
previewSession.setRepeatingRequest(previewBuilder.build(), mCaptureCallback, null);
}
catch (CameraAccessException e)
{
//e.printStackTrace();
}
catch (NullPointerException ex)
{
//ex.printStackTrace();
}
}
catch (Exception e)
{
//throw new RuntimeException("can not access camera.", e);
}
return true;
}
});
}
private float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback()
{
private void process(CaptureResult result)
{
// Just a placeholder at present - the original code
// had switch statements to see if an image had been saved
// but I only want to view, not save.
}
#Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request,
TotalCaptureResult result) {
process(result);
}
};
}

Related

Images from Camera2Api in Nexus 5 appear blurry

This is my first attempt at accessing and clicking images using the Camera2Api.
I'm using the Nexus 5 running on API 23.
Though I can get the camera preview, but I cannot understand why does the frame gets locked after the capture request and also the images generated are blurry/totaly unclear.
I've been stuck with this issue for a while now. Any advise would be appreciated
Attached is my MainActivity Code:-
public class MainActivity extends AppCompatActivity implements
View.OnClickListener{
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0,90);
ORIENTATIONS.append(Surface.ROTATION_90,0);
ORIENTATIONS.append(Surface.ROTATION_180,270);
ORIENTATIONS.append(Surface.ROTATION_270,180);
}
private static final String TAG = "MainActivity";
private static final int STATE_PREVIEW=0;
private static final int STATE_WAIT_AF_LOCK=1;
private static final int STATE_PICTURE_CAPTURED=2;
private static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
private static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private static final int REQUEST_READ_EXTERNAL_STORAGE_REQUEST = 2;
private int mState;
private static LruCache<String, Bitmap> mMemoryCache;
private HandlerThread mBackgroundThread;
private final Handler mUIHandler = new Handler(Looper.getMainLooper()){
#Override
public void handleMessage(Message msg){
swapImageAdapter();
}
};
private static File mImageFile;
private Handler mBackgroundHandler;
private TextureView mTextureView;
private Button mCaptureButton;
private File mImageFolder;
private static String mImageFileName;
private Size mPreviewSize; //mImageSize
private RecyclerView mRecycleView;
private String mCameraId;
private CameraDevice mCameraDevice;
private CaptureRequest mPreviewCaptureRequest;
private CaptureRequest.Builder mPreviewCaptureRequestBuilder;
private CameraCaptureSession mCameraCaptureSession;
private ImageReader mImageReader;
private ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(ImageReader reader){
Log.i(TAG,"OnImageAvailableListener!!!");
mBackgroundHandler.post(new
ImageSaver(reader.acquireNextImage(),mUIHandler));
}
};
private static class ImageSaver implements Runnable{
private final Image mImage;
private final Handler mHandler;
ImageSaver(Image image){
Log.i(TAG,"imgSaver const");
mImage=image;
mHandler=null;
}
ImageSaver(Image image,Handler handler){
Log.i(TAG,"imgSaver const");
mImage =image;
mHandler =handler;
}
#Override
public void run(){
Log.i(TAG,"In ImageSaver run()");
ByteBuffer byteBuffer=mImage.getPlanes()[0].getBuffer();
byteBuffer.rewind();
byte[] bytes = new byte[byteBuffer.capacity()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try{
fileOutputStream =new FileOutputStream(mImageFileName);
fileOutputStream.write(bytes);
}catch(IOException e){
e.printStackTrace();
}
finally{
Log.i(TAG,"Closing Image!!");
mImage.close();
if(fileOutputStream!=null){
try {
fileOutputStream.close();
}catch(IOException e){
e.printStackTrace();
}
}
}
Message message = mHandler.obtainMessage();
Log.i(TAG,"sending Message from Image Saver run()");
message.sendToTarget();
}
}
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
Log.i(TAG,"mSessionCaptureCallback+ onCaptureStarted");
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
super.onCaptureCompleted(session,request,result);
Log.i(TAG,"mSessionCaptureCallback+ onCaptureCompleted");
process(result); //process the result once capture session request is completed.
Log.i(TAG,"mSessionCaptureCallback+ onCaptureCompleted ENDS!");
}
#Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure){
super.onCaptureFailed(session, request, failure);
Toast.makeText(getApplicationContext(),"Focus Lock UnSucessfull!", Toast.LENGTH_SHORT).show();
}
private void process(CaptureResult result){
Log.i(TAG,"mSTATE_PICTURE_CAPTURED"+mState);
switch (mState){
case STATE_PREVIEW:
break;
case STATE_WAIT_AF_LOCK:
Log.i(TAG,"process_ wait_AF_LOCK");
Integer afState=result.get(CaptureResult.CONTROL_AF_STATE);
if(afState==CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED){
Log.i(TAG,"Lock Sucessfull!");
Toast.makeText(getApplicationContext(),"Lock Sucessfull!", Toast.LENGTH_SHORT).show();
mState=STATE_PICTURE_CAPTURED;
captureStillImage();
}
Log.i(TAG,"Out of process_WAIT_AF_LOCK");
break;
case STATE_PICTURE_CAPTURED:
break;
}
}
};
private CameraDevice.StateCallback mCameraDeviceStateCallback
= new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
Log.i(TAG,"CameraDeviceStateCallback+ onOpened");
mCameraDevice = cameraDevice;
Toast.makeText(getApplicationContext(), "Cam Opened!", Toast.LENGTH_SHORT).show();
createCameraPreviewSession();
}
#Override
public void onClosed(CameraDevice dev){
mCameraDevice=null;
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
Log.i(TAG,"CameraDeviceStateCallback+ onDisconnected");
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
Log.i(TAG, "Error Number:" + error + "While opening camera!!!");
}
};
private TextureView.SurfaceTextureListener mSurfaceTexture = new
TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "SurfaceTexture Available");
setupCamera(width, height);
Log.i(TAG, "in onSurfaceTextureAvailable ");
transformImage(width, height);
Log.i(TAG, "in onSurfaceTextureAvailable ");
openCamera();
Log.i(TAG, "in onSurfaceTextureAvailable ");
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "in onSurfaceTextureSizeChanged ");
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Log.i(TAG, "in onSurfaceTextureTextureUpdated");
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.i(TAG, "SurfaceTexture Destroyed");
return false;
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
createImageFolder();
setContentView(R.layout.activity_main);
if(ContextCompat.checkSelfPermission(this,Manifest.permission.READ_EXTERNAL_STORAGE)==PackageManager.PERMISSION_GRANTED) {
mRecycleView = (RecyclerView) findViewById(R.id.galleryRecyclerView);
GridLayoutManager gridLayoutManager = new GridLayoutManager(this, 1);
mRecycleView.setLayoutManager(gridLayoutManager);
RecyclerView.Adapter imageAdapter = new ImageAdapter(mImageFolder);
Log.i(TAG, "RecyclerViewSetup!");
mTextureView = (TextureView) findViewById(R.id.textureView);
Log.i(TAG, "TextureViewSetup!");
mCaptureButton = (Button) findViewById(R.id.captureButton);
mCaptureButton.setOnClickListener(this);
}
else{
if(shouldShowRequestPermissionRationale(Manifest.permission.READ_EXTERNAL_STORAGE)) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_READ_EXTERNAL_STORAGE_REQUEST);
}
}
Log.i(TAG,"onCreate Completed");
}
#Override
public void onResume() {
super.onResume();
openBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
transformImage(mTextureView.getWidth(),mTextureView.getHeight());
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
openCamera();
}
else {
if(shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this, "App needs to be able to save Images", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTexture);
}
}
#Override
public void onPause() {
closeCamera();
closeBackgroundThread();
super.onPause();
}
public void setupCamera(int width, int height) {
Log.i(TAG,"setupcam");
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestImageSize = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new Comparator<Size>() {
#Override
public int compare(Size size, Size t1) {
return Long.signum(size.getWidth()*size.getHeight()- t1.getWidth()*t1.getHeight());
}
}
);
mImageReader = ImageReader.newInstance(largestImageSize.getWidth(),largestImageSize.getHeight(),ImageFormat.JPEG,1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,mBackgroundHandler);
mPreviewSize = getPrefferedPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
Log.i(TAG,"Setup Camera Ends!!");
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getPrefferedPreviewSize(Size[] mapSizes, int width, int height) {
List<Size> collectorSizes = new ArrayList<>();
for (Size size : mapSizes) {
if (width > height) {
if (size.getWidth() > width && size.getHeight() > height) {
collectorSizes.add(size);
} else {
if (size.getWidth() > height && size.getHeight() > width) {
collectorSizes.add(size);
}
}
}
}
if (collectorSizes.size() > 0) {
return Collections.min(collectorSizes, new Comparator<Size>() {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getHeight() * rhs.getWidth());
}
});
}
return mapSizes[0];
}
public void openCamera() {
Log.i(TAG,"In OpenCamera");
CameraManager cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
Log.i(TAG,"CamaeraManager.OpenCamera");
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
Log.i(TAG,"CamaeraManager.OpenCameraEnds");
}
catch(SecurityException s){
s.printStackTrace();
}
catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"OpenCamera Ends");
}
public void closeCamera(){
if(mImageReader!=null){
mImageReader.close();
mImageReader=null;
}
if(mCameraCaptureSession!=null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if(mCameraDevice!=null){
mCameraDevice.close();
mCameraDevice = null;
}
}
public void createCameraPreviewSession(){
try{
Log.i(TAG,"CREATE_PREVIEW_CAMERA_SESSION");
SurfaceTexture surfaceTexture=mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
mPreviewCaptureRequestBuilder=mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(
Arrays.asList(previewSurface,mImageReader.getSurface()),
new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(CameraCaptureSession session){
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigured");
if(null == mCameraDevice){
return;
}
try{
mPreviewCaptureRequest=mPreviewCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequest,mSessionCaptureCallback,mBackgroundHandler);
}
catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigured setRepeatingRequests");
}
#Override
public void onConfigureFailed(CameraCaptureSession session){
Log.i(TAG,"mCameraDevice.careateCaptireSession onConfigurationFailed");
Toast.makeText(getApplicationContext(),"CreateCameraSession FAILED!!",Toast.LENGTH_SHORT).show();
}
}
,null);
}catch(CameraAccessException e){
e.printStackTrace();
}
}
private void clickPic(){
Log.i(TAG,"in ClickPic");
lockFocus();
Log.i(TAG,"exiting ClickPic");
}
private void lockFocus(){
try {
Log.i(TAG,"lockFocus");
mState = STATE_WAIT_AF_LOCK;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CaptureRequest.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"exiting lockFocus");
}
private void unlockFocus(){
try {
Log.i(TAG,"unlockFocus");
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),mSessionCaptureCallback,mBackgroundHandler);
mState = STATE_PREVIEW;
mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"exiting lockFocus");
}
private void openBackgroundThread(){
mBackgroundThread=new HandlerThread("Camera Background Thread");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper()) {
public void handleMessage(Message msg) {
Log.i(TAG,"Inside Background Handler");
}
};
}
private void closeBackgroundThread(){
Log.i(TAG,"closing Background Thread!");
mBackgroundThread.quitSafely();
try{
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
}catch(InterruptedException e){
e.printStackTrace();
}
}
private void captureStillImage(){
try {
Log.i(TAG,"captureStillImage");
mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
mPreviewCaptureRequestBuilder.addTarget(mImageReader.getSurface());
int rotation = getWindowManager().getDefaultDisplay().getRotation();
mPreviewCaptureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION,ORIENTATIONS.get(rotation));
CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback(){
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result){
Log.i(TAG,"captureStillImage+ onCaptureCompleted"+mImageFileName);
Toast.makeText(getApplicationContext(),"Image Captured!",Toast.LENGTH_SHORT).show();
unlockFocus();
}
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
Log.i(TAG,"captureStillImage+ onCapturestarted");
try {
mImageFile=createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
};
CameraCaptureSession.stopRepeating();
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),captureCallback,null);
}catch(CameraAccessException e){
e.printStackTrace();
}
Log.i(TAG,"captureStillImage Ends!");
}
private void transformImage(int width,int height){
Matrix matrix = new Matrix();
if(mPreviewSize==null||mTextureView==null) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
RectF textureRectF = new RectF(0,0,width,height);
RectF previewRectF = new RectF(0,0,mPreviewSize.getHeight(),mPreviewSize.getWidth());
float centerX = textureRectF.centerX();
float centerY = textureRectF.centerY();
if(rotation==Surface.ROTATION_90||rotation==Surface.ROTATION_270){
previewRectF.offset(centerX - previewRectF.centerX(),centerY-previewRectF.centerY());
matrix.setRectToRect(textureRectF,previewRectF,Matrix.ScaleToFit.FILL);
float scale = Math.max((float)width/mPreviewSize.getWidth(),
(float)height/mPreviewSize.getHeight());
matrix.postScale(scale,scale,centerX,centerY);
matrix.postRotate(90*(rotation-2),centerX,centerY);
}
mTextureView.setTransform(matrix);
}
#Override
public void onClick(View view){
switch(view.getId()){
case R.id.captureButton:
clickPic();
break;
}
}
private void checkPermission() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED) {
try {
createImageFileName();
} catch (IOException e) {
e.printStackTrace();
}
}
else {
if(shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this, "App needs to be able to save Images", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT);
}
}
private void createImageFolder() {
Log.i(TAG,"createImageFolder");
File imageFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
mImageFolder = new File(imageFile, "camera2Api");
if(!mImageFolder.exists()) {
mImageFolder.mkdirs();
}
}
private File createImageFileName() throws IOException {
Log.i(TAG,"createImageFileName");
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "IMG_" + timestamp + "_";
File imageFile = File.createTempFile(prepend, ".jpg", mImageFolder);
mImageFileName = imageFile.getAbsolutePath();
Log.i(TAG,"Image FILE NAME="+mImageFileName);
return imageFile;
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without camera services", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
}
}
if(requestCode==REQUEST_READ_EXTERNAL_STORAGE_REQUEST){
if(grantResults[0]==PackageManager.PERMISSION_GRANTED){
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
}
}
}
private void swapImageAdapter(){
Log.i(TAG,"swapImageAdapter"+mImageFolder.toString()+"mGalleryFolder from MainActivity!!");
RecyclerView.Adapter newImageAdapter=new ImageAdapter(mImageFolder);
mRecycleView.swapAdapter(newImageAdapter,false);
}
private File[] sortFilesToLatest(File fileImagesDir) {
File[] files = fileImagesDir.listFiles();
Arrays.sort(files, new Comparator<File>() {
#Override
public int compare(File lhs, File rhs) {
return Long.valueOf(rhs.lastModified()).compareTo(lhs.lastModified());
}
});
return files;
}
}
And this is the ImageAdapter class used to inflate the ImageViews in the RecyclerView:-
public class ImageAdapter extends RecyclerView.Adapter {
private static final String TAG="ImageAdapter";
private File imagesFile;
public ImageAdapter(File folderFile) {
Log.i(TAG,folderFile.toString()+"printing Folder File FROM IMAGE_ADAPTER!!!!");
imagesFile = folderFile;
}
#Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Log.i(TAG,"onCreateViewHolder");
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.activity_image, parent, false);
return new ViewHolder(view);
}
#Override
public void onBindViewHolder(ViewHolder holder, int position) {
Log.i(TAG,"onBindViewHolder");
BitmapFactory.Options mBitOptions = new BitmapFactory.Options();
mBitOptions.inScaled=true;
mBitOptions.inSampleSize=2;
int size = imagesFile.listFiles().length;
Log.i(TAG,imagesFile.toString()+" its SIZE="+size+" position="+position);
Log.i(TAG,"CONSIDERING FILE:"+imagesFile.listFiles()[position].getPath());
File imageFile = imagesFile.listFiles()[position];
Bitmap imageBitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath(),mBitOptions);
holder.getImageView().setImageBitmap(imageBitmap);
}
#Override
public int getItemCount() {
Log.i(TAG,"getItemCount");
return imagesFile.listFiles().length;
}
public static class ViewHolder extends RecyclerView.ViewHolder {
private ImageView imageView;
public ViewHolder(View view) {
super(view);
imageView = (ImageView) view.findViewById(R.id.imageGalleryView);
}
public ImageView getImageView() {
return imageView;
}
}
}
Activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<android.support.v7.widget.RecyclerView
android:id="#+id/galleryRecyclerView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignEnd="#+id/captureButton"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true"
android:layout_marginStart="25dp"
android:layout_marginTop="28dp"
android:layout_toStartOf="#+id/captureButton" />
<Button
android:id="#+id/captureButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="24dp"
android:text="#string/CaptureButtonString" />
<TextureView
android:id="#+id/textureView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_alignParentStart="true" />
</RelativeLayout>
Activity_image.xml contains an ImageView in a RelativeLayout. I could not provide code for it due to character restrictions.
Please advise what am I doing wrong?

custom camera textureView not adjusting to landscape mode

I am making a custom android camera app. So far, the only problem I have is getting the textureView to adjust when in landscape mode (turning the camera 90 degrees). The view comes out distorted and not in the right orientation. Portrait mode works fine. I am not sure if the problem is in the setupCamera() method, sensorToDeviceRotation() method, or the configureTransform() method. I haven't implemented anything to actually take the photo yet. Any help is appreciated.
EDIT/UPDATE: I updated my code to add the improvements. The original problem still persists despite this. I am using a Pixel XL to test this code. I am still not sure what the problem is.
homePage.java
public class homePage extends AppCompatActivity {
private static final int CAMERA_PERMISSION = 123;
private String mCameraId;
private Size mPreviewSize;
private CaptureRequest.Builder mCaptureRequestBuilder;
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(), "Camera connected", Toast.LENGTH_SHORT).show();
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String permissions[],
#NonNull int[] grantResults) {
switch(requestCode) {
case CAMERA_PERMISSION:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
return;
}else {
Toast.makeText(this, "Certain permissions needed to continue", Toast.LENGTH_SHORT).show();
}
break;
}
}
private boolean hasCamera() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
}else {
AlertDialog camAlert = new AlertDialog.Builder(homePage.this).create();
camAlert.setTitle("Alert");
camAlert.setMessage("there is no usable camera");
camAlert.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
camAlert.show();
return false;
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
int totalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = totalRotation == 90 || totalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e){
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
//Toast.makeText(this, "Camera permission granted", Toast.LENGTH_SHORT).show();
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
//Toast.makeText(this, "non-marshmellow device permission granted", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA}, CAMERA_PERMISSION);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(getApplicationContext(), "Unable to set up camera preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("cameraString");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics,
int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
private void configureTransform (int width, int height) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
final int rotation = getWindowManager().getDefaultDisplay().getRotation();
final Matrix matrix = new Matrix();
final RectF viewRect = new RectF(0, 0, width, height);
final RectF buffetRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
final float centerX = viewRect.centerX();
final float centerY = viewRect.centerY();
if(Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
buffetRect.offset(centerX - buffetRect.centerX(), centerY - buffetRect.centerY());
matrix.setRectToRect(viewRect, buffetRect, Matrix.ScaleToFit.FILL);
final float scale = Math.max(
(float) height / mPreviewSize.getHeight(),
(float) width / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
}else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
mTextureView.setTransform(matrix);
}
//=====added code from suggestions=======================
public interface CameraModule {
void onOrientationChanged(int orientation);
}
CameraModule mCurrentModule;
private MyOrientationEventListener mOrientationListener;
private int mLastRawOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
private class MyOrientationEventListener extends OrientationEventListener {
public MyOrientationEventListener(Context context){
super(context);
}
#Override
public void onOrientationChanged(int orientation) {
if(orientation == ORIENTATION_UNKNOWN)
return;
mLastRawOrientation = orientation;
mCurrentModule.onOrientationChanged(orientation);
}
}
//========================================================================
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() /
(long) rhs.getWidth() * rhs.getHeight());
}
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option: choices) {
if (option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_page);
hasCamera();
mTextureView = (TextureView) findViewById(R.id.textureView);
mOrientationListener = new MyOrientationEventListener(this);
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
}
You can try something like this(Add this private class in your HomePage.java):Notice the onOrientationChanged() method can be applied to xmls also:
We keep the last known orientation. So if you first orient the camera then point the camera to floor or sky, then also TextureView and Camera have the correct orientation.
private MyOrientationEventListener mOrientationListener;
// The degrees of the device rotated clockwise from its natural orientation.
private int mLastRawOrientation= OrientationEventListener.ORIENTATION_UNKNOWN;
//Don't forget to add **mOrientationListener = new MyOrientationEventListener(this);** on your onCreate() method
private class MyOrientationEventListener
extends OrientationEventListener {
public MyOrientationEventListener(Context context) {
super(context);
}
#Override
public void onOrientationChanged(int orientation) {
if (orientation == ORIENTATION_UNKNOWN) return;
mLastRawOrientation = orientation;
mCurrentModule.onOrientationChanged(orientation);
}
}
For more information you can catch this link up:Camera Example
I found the solution by eliminating the configureTransform() method, the cameraModule module, and the myOrientationEventListener() class. I replaced it with the transformImage() method. I then called this method in the onSurfaceTextureAvailable() and the onResume() method.
The problem I might have been facing in the old code was the fact that I was calling the configureTransform()method in the wrong places. I got the answer from looking at this post: Android Camera2 Preview is rotated 90deg while in Landscape
and this youtube tutorial: https://www.youtube.com/watch?v=YvS3iGKhQ_g
These sources explain the topic far better than me so please look them over if facing a similar issue.
new functioning code:
homePage.java:
public class homePage extends AppCompatActivity {
private static final int CAMERA_PERMISSION = 123;
private String mCameraId;
private Size mPreviewSize;
private CaptureRequest.Builder mCaptureRequestBuilder;
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//Toast.makeText(getApplicationContext(), "Camera connected", Toast.LENGTH_SHORT).show();
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
transformImage(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode,
#NonNull String permissions[],
#NonNull int[] grantResults) {
switch(requestCode) {
case CAMERA_PERMISSION:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
return;
}else {
Toast.makeText(this, "Certain permissions needed to continue", Toast.LENGTH_SHORT).show();
}
break;
}
}
private boolean hasCamera() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
}else {
AlertDialog camAlert = new AlertDialog.Builder(homePage.this).create();
camAlert.setTitle("Alert");
camAlert.setMessage("there is no usable camera");
camAlert.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
camAlert.show();
return false;
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
int totalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = totalRotation == 90 || totalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e){
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
//Toast.makeText(this, "Camera permission granted", Toast.LENGTH_SHORT).show();
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
//Toast.makeText(this, "non-marshmellow device permission granted", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA}, CAMERA_PERMISSION);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(getApplicationContext(), "Unable to set up camera preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("cameraString");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics,
int deviceOrientation) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + deviceOrientation + 360) % 360;
}
//================new method====================================
private void transformImage(int width, int height) {
if(mPreviewSize == null || mTextureView == null) {
return;
}
Matrix matrix = new Matrix();
int rotation = getWindowManager().getDefaultDisplay().getRotation();
RectF textureRectF = new RectF(0, 0, width, height);
RectF previewRectF = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = textureRectF.centerX();
float centerY = textureRectF.centerY();
if(rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) {
previewRectF.offset(centerX - previewRectF.centerX(),
centerY - previewRectF.centerY());
matrix.setRectToRect(textureRectF, previewRectF, Matrix.ScaleToFit.FILL);
float scale = Math.max((float)width / mPreviewSize.getWidth(),
(float)height / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
//===============================================================
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() /
(long) rhs.getWidth() * rhs.getHeight());
}
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for(Size option: choices) {
if (option.getHeight() == option.getWidth() * height / width &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home_page);
hasCamera();
mTextureView = (TextureView) findViewById(R.id.textureView);
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
transformImage(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
}

Surface View Preview is stretched vertically

I am working on Live Streaming application using RTMP. I am in the initial state, I successful done lot of things but now stuck on one place. I am using Surface view and my video is little bit stretched vertically. Codes are below:
This is my SrcCameraView
public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Renderer {
private GPUImageFilter magicFilter;
private SurfaceTexture surfaceTexture;
private int mOESTextureId = OpenGLUtils.NO_TEXTURE;
private int mSurfaceWidth;
private int mSurfaceHeight;
private int mPreviewWidth;
private int mPreviewHeight;
private boolean mIsEncoding;
private boolean mIsTorchOn = false;
private float mInputAspectRatio;
private float mOutputAspectRatio;
private float[] mProjectionMatrix = new float[16];
private float[] mSurfaceMatrix = new float[16];
private float[] mTransformMatrix = new float[16];
private Camera mCamera;
private ByteBuffer mGLPreviewBuffer;
private final static int PIXEL_FORMAT = ImageFormat.NV21;
private int mCamId = -1;
private int mPreviewRotation = 90;
private int mPreviewOrientation = Configuration.ORIENTATION_PORTRAIT;
private Thread worker;
private final Object writeLock = new Object();
private ConcurrentLinkedQueue<IntBuffer> mGLIntBufferCache = new
ConcurrentLinkedQueue<>();
private PreviewCallback mPrevCb;
public SrsCameraView(Context context) {
this(context, null);
}
public SrsCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
surfaceTexture = new SurfaceTexture(mOESTextureId);
surfaceTexture.setOnFrameAvailableListener(new
SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
// For camera preview on activity creation
if (mCamera != null) {
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
mSurfaceWidth = width;
mSurfaceHeight = height;
magicFilter.onDisplaySizeChanged(width, height);
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
if (width > height) {
Matrix.orthoM(mProjectionMatrix, 0, -1.0f, 1.0f, -aspectRatio,
aspectRatio, -1.0f, 1.0f);
} else {
Matrix.orthoM(mProjectionMatrix, 0, -aspectRatio, aspectRatio, -1.0f, 1.0f, -1.0f, 1.0f);
}
}
#Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0,
mProjectionMatrix, 0);
magicFilter.setTextureTransformMatrix(mTransformMatrix);
magicFilter.onDrawFrame(mOESTextureId);
if (mIsEncoding) {
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
public void setPreviewCallback(PreviewCallback cb) {
mPrevCb = cb;
}
public int[] setPreviewResolution(int width, int height) {
getHolder().setFixedSize(width, height);
mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width,
height));
if (rs != null) {
mPreviewWidth = rs.width;
mPreviewHeight = rs.height;
}
mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocateDirect(mPreviewWidth *
mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ?
(float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight /
mPreviewWidth;
return new int[] { mPreviewWidth, mPreviewHeight };
}
public boolean setFilter(final MagicFilterType type) {
if (mCamera == null) {
return false;
}
queueEvent(new Runnable() {
#Override
public void run() {
if (magicFilter != null) {
magicFilter.destroy();
}
magicFilter = MagicFilterFactory.initFilters(type);
if (magicFilter != null) {
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth,
mPreviewHeight);
magicFilter.onDisplaySizeChanged(mSurfaceWidth,
mSurfaceHeight);
}
}
});
requestRender();
return true;
}
private void deleteTextures() {
if (mOESTextureId != OpenGLUtils.NO_TEXTURE) {
queueEvent(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{ mOESTextureId }, 0);
mOESTextureId = OpenGLUtils.NO_TEXTURE;
}
});
}
}
public void setCameraId(int id) {
mCamId = id;
setPreviewOrientation(mPreviewOrientation);
}
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCamId, info);
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = info.orientation % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 360) % 360;
}
} else if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = (info.orientation + 90) % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 270) % 360;
}
}
}
public int getCameraId() {
return mCamId;
}
public void enableEncoding() {
worker = new Thread(new Runnable() {
#Override
public void run() {
while (!Thread.interrupted()) {
while (!mGLIntBufferCache.isEmpty()) {
IntBuffer picture = mGLIntBufferCache.poll();
mGLPreviewBuffer.asIntBuffer().put(picture.array());
mPrevCb.onGetRgbaFrame(mGLPreviewBuffer.array(),
mPreviewWidth, mPreviewHeight);
}
// Waiting for next frame
synchronized (writeLock) {
try {
// isEmpty() may take some time, so we set timeout to detect next frame
writeLock.wait(500);
} catch (InterruptedException ie) {
worker.interrupt();
}
}
}
}
});
worker.start();
mIsEncoding = true;
}
public void disableEncoding() {
mIsEncoding = false;
mGLIntBufferCache.clear();
if (worker != null) {
worker.interrupt();
try {
worker.join();
} catch (InterruptedException e) {
e.printStackTrace();
worker.interrupt();
}
worker = null;
}
}
public boolean startCamera() {
if (mCamera == null) {
mCamera = openCamera();
if (mCamera == null) {
return false;
}
}
Camera.Parameters params = mCamera.getParameters();
params.setPictureSize(mPreviewWidth, mPreviewHeight);
params.setPreviewSize(mPreviewWidth, mPreviewHeight);
int[] range = adaptFpsRange(SrsEncoder.VFPS,
params.getSupportedPreviewFpsRange());
params.setPreviewFpsRange(range[0], range[1]);
params.setPreviewFormat(ImageFormat.NV21);
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && !supportedFocusModes.isEmpty()) {
if
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if
(supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.autoFocus(null);
} else {
params.setFocusMode(supportedFocusModes.get(0));
}
}
List<String> supportedFlashModes = params.getSupportedFlashModes();
if (supportedFlashModes != null && !supportedFlashModes.isEmpty()) {
if
(supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
if (mIsTorchOn) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
} else {
params.setFlashMode(supportedFlashModes.get(0));
}
}
mCamera.setParameters(params);
mCamera.setDisplayOrientation(mPreviewRotation);
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return true;
}
public void stopCamera() {
disableEncoding();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
private Camera openCamera() {
Camera camera;
if (mCamId < 0) {
Camera.CameraInfo info = new Camera.CameraInfo();
int numCameras = Camera.getNumberOfCameras();
int frontCamId = -1;
int backCamId = -1;
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
backCamId = i;
} else if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
frontCamId = i;
break;
}
}
if (frontCamId != -1) {
mCamId = frontCamId;
} else if (backCamId != -1) {
mCamId = backCamId;
} else {
mCamId = 0;
}
}
camera = Camera.open(mCamId);
return camera;
}
private Camera.Size adaptPreviewResolution(Camera.Size resolution) {
float diff = 100f;
float xdy = (float) resolution.width / (float) resolution.height;
Camera.Size best = null;
for (Camera.Size size :
mCamera.getParameters().getSupportedPreviewSizes()) {
if (size.equals(resolution)) {
return size;
}
float tmp = Math.abs(((float) size.width / (float) size.height) -
xdy);
if (tmp < diff) {
diff = tmp;
best = size;
}
}
return best;
}
private int[] adaptFpsRange(int expectedFps, List<int[]> fpsRanges) {
expectedFps *= 1000;
int[] closestRange = fpsRanges.get(0);
int measure = Math.abs(closestRange[0] - expectedFps) +
Math.abs(closestRange[1] - expectedFps);
for (int[] range : fpsRanges) {
if (range[0] <= expectedFps && range[1] >= expectedFps) {
int curMeasure = Math.abs(range[0] - expectedFps) +
Math.abs(range[1] - expectedFps);
if (curMeasure < measure) {
closestRange = range;
measure = curMeasure;
}
}
}
return closestRange;
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);
}
}
This is My Main Activity:
public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpListener,
SrsRecordHandler.SrsRecordListener, SrsEncodeHandler.SrsEncodeListener {
private static final String TAG = "Yasea";
Button btnPublish = null;
Button btnSwitchCamera = null;
Button btnRecord = null;
Button btnSwitchEncoder = null;
private SharedPreferences sp;
private String rtmpUrl = "rtmp://00.00.000.00/live/" + getRandomAlphaString(3) + '/' + getRandomAlphaDigitString(5);
private String recPath = Environment.getExternalStorageDirectory().getPath() + "/test.mp4";
private SrsPublisher mPublisher;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
// response screen rotation event
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
// restore data.
sp = getSharedPreferences("Yasea", MODE_PRIVATE);
rtmpUrl = sp.getString("rtmpUrl", rtmpUrl);
// initialize url.
final EditText efu = (EditText) findViewById(R.id.url);
efu.setText(rtmpUrl);
btnPublish = (Button) findViewById(R.id.publish);
btnSwitchCamera = (Button) findViewById(R.id.swCam);
btnRecord = (Button) findViewById(R.id.record);
btnSwitchEncoder = (Button) findViewById(R.id.swEnc);
mPublisher = new SrsPublisher((SrsCameraView) findViewById(R.id.glsurfaceview_camera));
mPublisher.setEncodeHandler(new SrsEncodeHandler(this));
mPublisher.setRtmpHandler(new RtmpHandler(this));
mPublisher.setRecordHandler(new SrsRecordHandler(this));
mPublisher.setPreviewResolution(640, 480);
mPublisher.setOutputResolution(720, 1280);
mPublisher.setVideoHDMode();
mPublisher.startCamera();
btnPublish.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnPublish.getText().toString().contentEquals("publish")) {
rtmpUrl = efu.getText().toString();
SharedPreferences.Editor editor = sp.edit();
editor.putString("rtmpUrl", rtmpUrl);
editor.apply();
mPublisher.startPublish(rtmpUrl);
mPublisher.startCamera();
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
Toast.makeText(getApplicationContext(), "Use hard encoder", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(getApplicationContext(), "Use soft encoder", Toast.LENGTH_SHORT).show();
}
btnPublish.setText("stop");
btnSwitchEncoder.setEnabled(false);
} else if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
}
}
});
btnSwitchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mPublisher.switchCameraFace((mPublisher.getCamraId() + 1) % Camera.getNumberOfCameras());
}
});
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnRecord.getText().toString().contentEquals("record")) {
if (mPublisher.startRecord(recPath)) {
btnRecord.setText("pause");
}
} else if (btnRecord.getText().toString().contentEquals("pause")) {
mPublisher.pauseRecord();
btnRecord.setText("resume");
} else if (btnRecord.getText().toString().contentEquals("resume")) {
mPublisher.resumeRecord();
btnRecord.setText("pause");
}
}
});
btnSwitchEncoder.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
mPublisher.switchToSoftEncoder();
btnSwitchEncoder.setText("hard encoder");
} else if (btnSwitchEncoder.getText().toString().contentEquals("hard encoder")) {
mPublisher.switchToHardEncoder();
btnSwitchEncoder.setText("soft encoder");
}
}
});
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
} else {
switch (id) {
case R.id.cool_filter:
mPublisher.switchCameraFilter(MagicFilterType.COOL);
break;
case R.id.beauty_filter:
mPublisher.switchCameraFilter(MagicFilterType.BEAUTY);
break;
case R.id.early_bird_filter:
mPublisher.switchCameraFilter(MagicFilterType.EARLYBIRD);
break;
case R.id.evergreen_filter:
mPublisher.switchCameraFilter(MagicFilterType.EVERGREEN);
break;
case R.id.n1977_filter:
mPublisher.switchCameraFilter(MagicFilterType.N1977);
break;
case R.id.nostalgia_filter:
mPublisher.switchCameraFilter(MagicFilterType.NOSTALGIA);
break;
case R.id.romance_filter:
mPublisher.switchCameraFilter(MagicFilterType.ROMANCE);
break;
case R.id.sunrise_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNRISE);
break;
case R.id.sunset_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNSET);
break;
case R.id.tender_filter:
mPublisher.switchCameraFilter(MagicFilterType.TENDER);
break;
case R.id.toast_filter:
mPublisher.switchCameraFilter(MagicFilterType.TOASTER2);
break;
case R.id.valencia_filter:
mPublisher.switchCameraFilter(MagicFilterType.VALENCIA);
break;
case R.id.walden_filter:
mPublisher.switchCameraFilter(MagicFilterType.WALDEN);
break;
case R.id.warm_filter:
mPublisher.switchCameraFilter(MagicFilterType.WARM);
break;
case R.id.original_filter:
default:
mPublisher.switchCameraFilter(MagicFilterType.NONE);
break;
}
}
setTitle(item.getTitle());
return super.onOptionsItemSelected(item);
}
#Override
protected void onResume() {
super.onResume();
final Button btn = (Button) findViewById(R.id.publish);
btn.setEnabled(true);
mPublisher.resumeRecord();
}
#Override
protected void onPause() {
super.onPause();
mPublisher.pauseRecord();
}
#Override
protected void onDestroy() {
super.onDestroy();
mPublisher.stopPublish();
mPublisher.stopRecord();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mPublisher.stopEncode();
mPublisher.stopRecord();
btnRecord.setText("record");
mPublisher.setScreenOrientation(newConfig.orientation);
if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.startEncode();
}
mPublisher.startCamera();
}
private static String getRandomAlphaString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private static String getRandomAlphaDigitString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz0123456789";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private void handleException(Exception e) {
try {
Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show();
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
} catch (Exception e1) {
//
}
}
// Implementation of SrsRtmpListener.
#Override
public void onRtmpConnecting(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpConnected(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoStreaming() {
}
#Override
public void onRtmpAudioStreaming() {
}
#Override
public void onRtmpStopped() {
Toast.makeText(getApplicationContext(), "Stopped", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpDisconnected() {
Toast.makeText(getApplicationContext(), "Disconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoFpsChanged(double fps) {
Log.i(TAG, String.format("Output Fps: %f", fps));
}
#Override
public void onRtmpVideoBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Video bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Video bitrate: %d bps", rate));
}
}
#Override
public void onRtmpAudioBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Audio bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Audio bitrate: %d bps", rate));
}
}
#Override
public void onRtmpSocketException(SocketException e) {
handleException(e);
}
#Override
public void onRtmpIOException(IOException e) {
handleException(e);
}
#Override
public void onRtmpIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
#Override
public void onRtmpIllegalStateException(IllegalStateException e) {
handleException(e);
}
// Implementation of SrsRecordHandler.
#Override
public void onRecordPause() {
Toast.makeText(getApplicationContext(), "Record paused", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordResume() {
Toast.makeText(getApplicationContext(), "Record resumed", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordStarted(String msg) {
Toast.makeText(getApplicationContext(), "Recording file: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordFinished(String msg) {
Toast.makeText(getApplicationContext(), "MP4 file saved: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordIOException(IOException e) {
handleException(e);
}
#Override
public void onRecordIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
// Implementation of SrsEncodeHandler.
#Override
public void onNetworkWeak() {
Toast.makeText(getApplicationContext(), "Network weak", Toast.LENGTH_SHORT).show();
}
#Override
public void onNetworkResume() {
Toast.makeText(getApplicationContext(), "Network resume", Toast.LENGTH_SHORT).show();
}
#Override
public void onEncodeIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
}
I am not sure that this is the issue (a screenshot could have helped) but there might be a precision issue here (or even more in other places in the code):
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
Both width & height are integers. The result of their division is also an integer and hence it losses precision. I suggest you cast them both to float before the division:
mOutputAspectRatio = width > height ? ((float) width) / ((float)height) : ((float)
height) / ((float) width);
And the same for the computation of mInputAspectRatio.

How to implement video recording in android with smallest possible file size?

I followed this documentation on how to implement video recording (in my case, I implemented it in a Fragment)
static final int REQUEST_VIDEO_CAPTURE = 1;
private void dispatchTakeVideoIntent() {
Intent takeVideoIntent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
if (takeVideoIntent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(takeVideoIntent, REQUEST_VIDEO_CAPTURE);
}
}
https://developer.android.com/training/camera/videobasics.html
However, I noticed that 30 seconds of recording is around 40mb. I tried an app with video recording and its 30 seconds is just around 7mb. I noticed that the width and height of that one is 720x480.
I would like to ask how to implement such.
Thanks for your advice.
You need to implement your own VideoRecorder so that you can modify as per your own requirement.
set video resolution,
mVideoRecorder.setVideoSize(720, 480);
set Max Duration,
mVideoRecorder.setMaxDuration(30000);
Try this
videorecord.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<VideoView
android:id="#+id/videoRecorder"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</LinearLayout>
VideoRecorderExample.java
import android.app.Activity;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.widget.VideoView;
public class VideoRecorderExample extends Activity implements
SurfaceHolder.Callback {
public static final String IS_FRONT_CAMERA = "FRONT_CAMERA";
private static final String TAG = VideoRecorderExample.class.getSimpleName();
private Boolean mRecording = false;
private Boolean isFrontCamera = false;
private VideoView mVideoView = null;
private MediaRecorder mVideoRecorder = null;
private Camera mCamera;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.videorecord);
mVideoView = (VideoView) findViewById(R.id.videoRecorder);
isFrontCamera = getIntent().getExtras().getBoolean(IS_FRONT_CAMERA, true);
final SurfaceHolder holder = mVideoView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
startRecording(holder);
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.e(TAG, "W x H = " + width + "x" + height);
}
private void stopRecording() throws Exception {
mRecording = false;
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder.release();
mVideoRecorder = null;
}
if (mCamera != null) {
mCamera.reconnect();
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
#Override
protected void onDestroy() {
try {
stopRecording();
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
super.onDestroy();
}
private void startRecording(SurfaceHolder holder) throws Exception {
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder.release();
mVideoRecorder = null;
}
if (mCamera != null) {
mCamera.reconnect();
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
String uniqueOutFile = Environment.getExternalStorageDirectory().toString() + "/" + System.currentTimeMillis() + ".3gp";
try {
if (isFrontCamera) {
mCamera = Camera.open(1);
} else {
mCamera = Camera.open();
}
mCamera.setPreviewDisplay(holder);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(640, 480);
mCamera.setParameters(parameters);
mCamera.startPreview();
mCamera.unlock();
mVideoRecorder = new MediaRecorder();
mVideoRecorder.setCamera(mCamera);
mVideoRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mVideoRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mVideoRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mVideoRecorder.setVideoSize(720, 480);
mVideoRecorder.setVideoFrameRate(20);
mVideoRecorder.setVideoEncodingBitRate(3000000);
mVideoRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mVideoRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_WB);
mVideoRecorder.setAudioSamplingRate(16000);
mVideoRecorder.setMaxDuration(30000);
mVideoRecorder.setPreviewDisplay(holder.getSurface());
mVideoRecorder.setOutputFile(uniqueOutFile);
mVideoRecorder.prepare();
mVideoRecorder.start();
mRecording = true;
} catch (Exception e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
For camera2.
Try this code it will supported with camera2.
public class Camera2VideoFragment extends Fragment
implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
/**
* An {#link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
private Button mButtonVideo;
private CameraDevice mCameraDevice;
private CameraCaptureSession mPreviewSession;
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private Size mPreviewSize;
private Size mVideoSize;
private MediaRecorder mMediaRecorder;
private boolean mIsRecordingVideo;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
/**
* A {#link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
private Surface mRecorderSurface;
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* #param choices The list of available sizes
* #return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #param aspectRatio The aspect ratio
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<Size>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
#Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (Button) view.findViewById(R.id.video);
mButtonVideo.setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.video: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
/**
* Starts a background thread and its {#link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* #param permissions The permissions your app wants to request.
* #return Whether you can show permission rationale UI.
*/
private boolean shouldShowRequestPermissionRationale(String[] permissions) {
for (String permission : permissions) {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
return true;
}
}
return false;
}
/**
* Requests permissions needed for recording video.
*/
private void requestVideoPermissions() {
if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions,
#NonNull int[] grantResults) {
Log.d(TAG, "onRequestPermissionsResult");
if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
if (grantResults.length == VIDEO_PERMISSIONS.length) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
break;
}
}
} else {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean hasPermissionsGranted(String[] permissions) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(getActivity(), permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Tries to open a {#link CameraDevice}. The result is listened by `mStateCallback`.
*/
private void openCamera(int width, int height) {
if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
requestVideoPermissions();
return;
}
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[0];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {#link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {#link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* #param viewWidth The width of `mTextureView`
* #param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(getActivity());
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
return context.getExternalFilesDir(null).getAbsolutePath() + "/"
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
mRecorderSurface = mMediaRecorder.getSurface();
surfaces.add(mRecorderSurface);
mPreviewBuilder.addTarget(mRecorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mButtonVideo.setText(R.string.record);
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {#code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
public static class ConfirmationDialog extends DialogFragment {
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.permission_request)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
REQUEST_VIDEO_PERMISSIONS);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
parent.getActivity().finish();
}
})
.create();
}
}
}
Reference from: Camera2 Video

Android camera2 qr code reader

I have a fragment which is scanning for a qr code using camera2 API and zxing (google's zebra crossing) API, it works fine for all the phones, the problem is with this specific tablet NVIDIA SHIELD. It would just not find the qr in the image, tried a lot:
Smaller, bigger screen/image dimension.
All different camera settings like iso, contrast, lighter, darker.
Using all image formats like JPEG, YUV_422_888 ...
Using planes nr 0,1,2...
Sorry for such a big snippet of code, ask questions, will respond asap.
Here is my code:
public class FragmentDecoder extends Fragment
implements FragmentCompat.OnRequestPermissionsResultCallback {
private static boolean accessGranted = true;
private static boolean showingText = false;
private ImageView camera_guide_image;
private TextView decoderText;
private ImageButton flashButton;
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private int mSensorOrientation;
private boolean mFlashSupported;
private static final String TAG = FragmentDecoder.class.getName();
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
process(result);
}
};
private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private QRCodeReader mQrReader;
private String mCameraId;
private AutoFitTextureView mTextureView;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image img = null;
Result rawResult = null;
try {
img = reader.acquireLatestImage();
if (img == null) throw new NullPointerException("cannot be null");
ByteBuffer buffer = img.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
int width = img.getWidth();
int height = img.getHeight();
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
rawResult = mQrReader.decode(bitmap);
if (rawResult == null) {
throw new NullPointerException("no QR code");
}
String decoded = rawResult.getText();
if (accessGranted) {
Log.e(TAG, "entered the accessGranted area!");
accessGranted = false;
boolean isPrivKey = BRWalletManager.getInstance(getActivity()).confirmSweep(getActivity(), decoded);
if (isPrivKey) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
BRAnimator.hideDecoderFragment();
}
});
return;
}
String validationString = validateResult(decoded);
if (Objects.equals(validationString, BRConstants.TEXT_EMPTY)) {
onQRCodeRead((MainActivity) getActivity(), rawResult.getText());
} else {
if (!showingText) {
showingText = true;
setCameraGuide(BRConstants.CAMERA_GUIDE_RED);
setGuideText(validationString);
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
showingText = false;
}
}, 1000);
}
accessGranted = true;
}
}
} catch (ReaderException | NullPointerException | IllegalStateException ignored) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
// Log.e(TAG, "Reader shows an exception! ", ignored);
/* Ignored */
} finally {
mQrReader.reset();
if (img != null)
img.close();
}
if (rawResult == null) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
}
}
};
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
getActivity().onBackPressed();
}
};
private Size mPreviewSize;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
private int flashButtonCount;
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_decoder, container, false);
mQrReader = new QRCodeReader();
mTextureView = new AutoFitTextureView(getActivity());
RelativeLayout layout = (RelativeLayout) rootView.findViewById(R.id.fragment_decoder_layout);
camera_guide_image = (ImageView) rootView.findViewById(R.id.decoder_camera_guide_image);
decoderText = (TextView) rootView.findViewById(R.id.decoder_text);
flashButton = (ImageButton) rootView.findViewById(R.id.button_flash);
flashButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
if (mPreviewRequestBuilder == null || mCaptureSession == null)
return;
if (++flashButtonCount % 2 != 0) {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_on);
SpringAnimator.showAnimation(flashButton);
} else {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_off);
SpringAnimator.showAnimation(flashButton);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
layout.addView(mTextureView, 0);
startBackgroundThread();
return rootView;
}
#Override
public void onResume() {
super.onResume();
accessGranted = true;
showingText = false;
new Handler().post(new Runnable() {
#Override
public void run() {
if (camera_guide_image != null)
SpringAnimator.showExpandCameraGuide(camera_guide_image);
}
});
((BreadWalletApp) getActivity().getApplication()).hideKeyboard(getActivity());
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void setUpCameraOutputs(int width, int height) {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth()/2, largest.getHeight()/2,
ImageFormat.YUV_420_888, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
if (mCameraId != null)
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
try {
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException | NullPointerException e) {
e.printStackTrace();
mBackgroundThread = null;
mBackgroundHandler = null;
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
Surface mImageSurface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(mImageSurface);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(mImageSurface, surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
// Log.e(TAG, "onConfigured");
if (mCameraDevice == null) return;
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CONTROL_AF_MODE, CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), getActivity().getResources().getString(R.string.failed),
Toast.LENGTH_SHORT).show();
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configureTransform(int viewWidth, int viewHeight) {
if (mTextureView == null || mPreviewSize == null) return;
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private static synchronized void onQRCodeRead(final MainActivity app, final String text) {
app.runOnUiThread(new Runnable() {
#Override
public void run() {
if (text != null) {
BRAnimator.hideDecoderFragment();
Log.e(TAG, "BEFORE processRequest");
RequestHandler.processRequest(app, text);
}
}
});
}
private static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
private int getStatusBarHeight() {
Rect rectangle = new Rect();
Window window = getActivity().getWindow();
window.getDecorView().getWindowVisibleDisplayFrame(rectangle);
int statusBarHeight = rectangle.top;
int contentViewTop =
window.findViewById(Window.ID_ANDROID_CONTENT).getTop();
int titleBarHeight = contentViewTop - statusBarHeight;
return statusBarHeight + titleBarHeight;
}
private String validateResult(String str) {
RequestObject obj = null;
try {
obj = RequestHandler.getRequestFromString(str);
} catch (InvalidAlgorithmParameterException e) {
e.printStackTrace();
}
if (obj == null) {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
if (obj.r != null) {
return BRConstants.TEXT_EMPTY;
}
if (obj.address != null) {
if (BRWalletManager.validateAddress(obj.address)) {
return BRConstants.TEXT_EMPTY;
} else {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_valid_bitcoin_address);
}
}
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
private void setCameraGuide(final String str) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE)) {
camera_guide_image.setImageResource(R.drawable.cameraguide);
setGuideText(BRConstants.TEXT_EMPTY);
} else if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE_RED)) {
camera_guide_image.setImageResource(R.drawable.cameraguide_red);
} else {
throw new IllegalArgumentException("CAMERA_GUIDE and CAMERA_GUIDE_RED only");
}
}
});
}
private void setGuideText(final String str) {
Activity activity = getActivity();
if (activity != null)
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
decoderText.setText(str);
}
});
}
}

Categories

Resources