I'm trying to get the Android camera2 running in the background service, then process the frame in the callback ImageReader.OnImageAvailableListener. I already use the suggested raw format YUV_420_888 to get max fps, however I only get around 7fps on the resolution 640x480. This is even slower than what I get using the old Camera interface( I want to upgrade to Camera2 to get higher fps ) or with the OpenCV JavaCameraView( I can't use this because I need to run processing in the background service ).
Below is my service class. What am I missing?
My phone is Redmi Note 3 running Android 5.0.2
public class Camera2ServiceYUV extends Service {
protected static final String TAG = "VideoProcessing";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession captureSession;
protected ImageReader imageReader;
// A semaphore to prevent the app from exiting before closing the camera.
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
public static final String RESULT_RECEIVER = "resultReceiver";
private static final int JPEG_COMPRESSION = 90;
public static final int RESULT_OK = 0;
public static final int RESULT_DEVICE_NO_CAMERA= 1;
public static final int RESULT_GET_CAMERA_FAILED = 2;
public static final int RESULT_ALREADY_RUNNING = 3;
public static final int RESULT_NOT_RUNNING = 4;
private static final String START_SERVICE_COMMAND = "startServiceCommands";
private static final int COMMAND_NONE = -1;
private static final int COMMAND_START = 0;
private static final int COMMAND_STOP = 1;
private boolean mRunning = false;
public Camera2ServiceYUV() {
}
public static void startToStart(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_START);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
public static void startToStop(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_STOP);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
// SERVICE INTERFACE
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
switch (intent.getIntExtra(START_SERVICE_COMMAND, COMMAND_NONE)) {
case COMMAND_START:
startCamera(intent);
break;
case COMMAND_STOP:
stopCamera(intent);
break;
default:
throw new UnsupportedOperationException("Cannot start the camera service with an illegal command.");
}
return START_STICKY;
}
#Override
public void onDestroy() {
try {
captureSession.abortCaptures();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
captureSession.close();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
// CAMERA2 INTERFACE
/**
* 1. The android CameraManager class is used to manage all the camera devices in our android device
* Each camera device has a range of properties and settings that describe the device.
* It can be obtained through the camera characteristics.
*/
public void startCamera(Intent intent) {
final ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (mRunning) {
resultReceiver.send(RESULT_ALREADY_RUNNING, null);
return;
}
mRunning = true;
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String pickedCamera = getCamera(manager);
Log.e(TAG,"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA " + pickedCamera);
manager.openCamera(pickedCamera, cameraStateCallback, null);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(pickedCamera);
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.YUV_420_888);
}
int width = 640;
int height = 480;
// if (jpegSizes != null && 0 < jpegSizes.length) {
// width = jpegSizes[jpegSizes.length -1].getWidth();
// height = jpegSizes[jpegSizes.length - 1].getHeight();
// }
// for(Size s : jpegSizes)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
// DEBUG
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return;
}
Log.e(TAG,"Width = " + width + ", Height = " + height);
Log.e(TAG,"output stall duration = " + map.getOutputStallDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
Log.e(TAG,"Min output stall duration = " + map.getOutputMinFrameDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
// Size[] sizeList = map.getInputSizes(ImageFormat.YUV_420_888);
// for(Size s : sizeList)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
imageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.i(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
resultReceiver.send(RESULT_DEVICE_NO_CAMERA, null);
}catch (InterruptedException e) {
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
catch(SecurityException se)
{
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Security permission exception while trying to open the camera.", se);
}
resultReceiver.send(RESULT_OK, null);
}
// We can pick the camera being used, i.e. rear camera in this case.
private String getCamera(CameraManager manager) {
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
return null;
}
/**
* 1.1 Callbacks when the camera changes its state - opened, disconnected, or error.
*/
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.i(TAG, "CameraDevice.StateCallback onOpened");
mCameraOpenCloseLock.release();
cameraDevice = camera;
createCaptureSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
};
/**
* 2. To capture or stream images from a camera device, the application must first create
* a camera capture captureSession.
* The camera capture needs a surface to output what has been captured, in this case
* we use ImageReader in order to access the frame data.
*/
public void createCaptureSession() {
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
Log.i(TAG, "CameraCaptureSession.StateCallback onConfigured");
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the captureSession is ready, we start to grab the frame.
Camera2ServiceYUV.this.captureSession = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "CameraCaptureSession.StateCallback onConfigureFailed");
}
};
/**
* 3. The application then needs to construct a CaptureRequest, which defines all the capture parameters
* needed by a camera device to capture a single image.
*/
private CaptureRequest createCaptureRequest() {
try {
/**
* Check other templates for further details.
* TEMPLATE_MANUAL = 6
* TEMPLATE_PREVIEW = 1
* TEMPLATE_RECORD = 3
* TEMPLATE_STILL_CAPTURE = 2
* TEMPLATE_VIDEO_SNAPSHOT = 4
* TEMPLATE_ZERO_SHUTTER_LAG = 5
*
* TODO: can set camera features like auto focus, auto flash here
* captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
*/
CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// captureRequestBuilder.set(CaptureRequest.EDGE_MODE,
// CaptureRequest.EDGE_MODE_OFF);
// captureRequestBuilder.set(
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
// captureRequestBuilder.set(
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
// CaptureRequest.NOISE_REDUCTION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
// CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
//
// captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
captureRequestBuilder.addTarget(imageReader.getSurface());
return captureRequestBuilder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
/**
* ImageReader provides a surface for the camera to output what has been captured.
* Upon the image available, call processImage() to process the image as desired.
*/
private long frameTime = 0;
private ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "called ImageReader.OnImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if( frameTime != 0 )
{
Log.e(TAG, "fps = " + (float)(1000.0 / (float)(SystemClock.elapsedRealtime() - frameTime)) + " fps");
}
frameTime = SystemClock.elapsedRealtime();
img.close();
}
}
};
private void processImage(Image image) {
Mat outputImage = imageToMat(image);
Bitmap bmp = Bitmap.createBitmap(outputImage.cols(), outputImage.rows(), Bitmap.Config.ARGB_8888);
Utils.bitmapToMat(bmp, outputImage);
Point mid = new Point(0, 0);
Point inEnd = new Point(outputImage.cols(), outputImage.rows());
Imgproc.line(outputImage, mid, inEnd, new Scalar(255, 0, 0), 2, Core.LINE_AA, 0);
Utils.matToBitmap(outputImage, bmp);
Intent broadcast = new Intent();
broadcast.setAction("your_load_photo_action");
broadcast.putExtra("BitmapImage", bmp);
sendBroadcast(broadcast);
}
private Mat imageToMat(Image image) {
ByteBuffer buffer;
int rowStride;
int pixelStride;
int width = image.getWidth();
int height = image.getHeight();
int offset = 0;
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
for (int i = 0; i < planes.length; i++) {
buffer = planes[i].getBuffer();
rowStride = planes[i].getRowStride();
pixelStride = planes[i].getPixelStride();
int w = (i == 0) ? width : width / 2;
int h = (i == 0) ? height : height / 2;
for (int row = 0; row < h; row++) {
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
if (pixelStride == bytesPerPixel) {
int length = w * bytesPerPixel;
buffer.get(data, offset, length);
// Advance buffer the remainder of the row stride, unless on the last row.
// Otherwise, this will throw an IllegalArgumentException because the buffer
// doesn't include the last padding.
if (h - row != 1) {
buffer.position(buffer.position() + rowStride - length);
}
offset += length;
} else {
// On the last row only read the width of the image minus the pixel stride
// plus one. Otherwise, this will throw a BufferUnderflowException because the
// buffer doesn't include the last padding.
if (h - row == 1) {
buffer.get(rowData, 0, width - pixelStride + 1);
} else {
buffer.get(rowData, 0, rowStride);
}
for (int col = 0; col < w; col++) {
data[offset++] = rowData[col * pixelStride];
}
}
}
}
// Finally, create the Mat.
Mat mat = new Mat(height + height / 2, width, CV_8UC1);
mat.put(0, 0, data);
return mat;
}
private void stopCamera(Intent intent) {
ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (!mRunning) {
resultReceiver.send(RESULT_NOT_RUNNING, null);
return;
}
closeCamera();
resultReceiver.send(RESULT_OK, null);
mRunning = false;
Log.d(TAG, "Service is finished.");
}
/**
* Closes the current {#link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
}
I bumped into this problem recently when I try to upgrade my AR app from camera1 to camera2 API, I used a mid-range device for testing (Meizu S6) which has Exynos 7872 CPU and Mali-G71 GPU. What I want to achieve is a steady 30fps AR experience.
But through the migration I found that its quite tricky to get a decent preview frame rate using Camera2 API.
I configured my capture request using TEMPLATE_PREVIEW
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Then I Put 2 surfaces, one for preview which is a surfaceTexture at size 1280x720,
another ImageReader at size 1280x720 for image processing.
mImageReader = ImageReader.newInstance(
mVideoSize.getWidth(),
mVideoSize.getHeight(),
ImageFormat.YUV_420_888,
2);
List<Surface> surfaces =new ArrayList<>();
Surface previewSurface = new Surface(mSurfaceTexture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface frameCaptureSurface = mImageReader.getSurface();
surfaces.add(frameCaptureSurface);
mPreviewBuilder.addTarget(frameCaptureSurface);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), captureCallback, mBackgroundHandler);
Everything works as expected, my TextureView gets updated and framecallback gets called too Except ... the frame rate is about 10 fps and I haven't even do any image processing yet.
I have experimented many Camera2 API settings include SENSOR_FRAME_DURATION and different ImageFormat and size combinations but none of them improve the frame rate. But if I just remove the ImageReader from output surfaces, then preview gets 30 fps easily!
So I guess the problem is By adding ImageReader as Camera2 output surface decreased the preview frame rate drastically. At least on my case, so what is the solution?
My solution is glReadPixel
I know glReadPixel is one of the evil things because it copy bytes from GPU to main memory and also causing OpenGL to flush draw commands thus for sake of performance we'd better avoid using it. But its surprising that glReadPixel is actually pretty fast and providing much better frame rate then ImageReader's YUV_420_888 output.
In addition to reduce the memory overhead I make another draw call with smaller frame buffer like 360x640 instead of preview's 720p dedicated for feature detection.
Based on the implementation of camera2 by the openCV library.
I had the same problem, then I noticed this piece of code in the openCV code for the JavaCamera2View, you need to change the settings of the CaptureRequest.Builder that way:
CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
It changed the fps from 10fps to around 28-30fps for me. Worked for me with two target surfaces, one surface of the preview textureview, the second of the ImageReader:
Surface readerSurface = imageReader.getSurface();
Surface surface = new Surface(surfaceTexture);
captureBuilder.addTarget(surface);
captureBuilder.addTarget(readerSurface);
Cannot post a comment (not enough reps). But running into the same issue with Redmi 6.
If using the the TextureView for previewing the camera output I get around 30 fps but replacing it with ImageReader it went down to 8/9 fps. All the camera configs are same in either case.
Interesting enough, on trying out the CameraXBasic, it showed the same issue. The updates from Camera were sluggish. But the android-Camera2Basic (using TextureView) was running without any issues.
Update: 1
Tested out with lowering the preview size from 1280x720 to 640x480, and as expected saw a better performance.
This is what I know after tweaking with it a little, the problem lies on ImageReader's maxImage param, I changed it from 2 to 3 to 56, it changed the fps quite a lot, what I think is the surface which we render to camera2 from ImageReader has a tendency to block the process of saving the camera's image to the buffer/cache when Image class from ImageReader.OnImageAvailableListener is being processed or isn't released, or we could say the camera wanna use the buffer but it doesn't have enough buffer, so when we increase the max buffer of imageReader, we could give space to camera2 to save the image.
I created a custom camera app that saves the image in SD.
I have another activity that involves an ImageView.
I want to display the photo taken from camera in ImageView.
How should I do that?
public class CameraActivity extends Activity {
private static final String TAG = "CUSTOMCAM";
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
private Camera mCamera;
private CameraPreview mPreview;
private SensorManager sensorManager = null;
private int orientation;
private ExifInterface exif;
private int deviceHeight;
private Button ibRetake;
private Button ibUse;
private Button ibCapture;
private FrameLayout flBtnContainer;
private File sdRoot;
private String dir;
private String fileName;
private ImageView rotatingImage;
private int degrees = -1;
private Uri imageUri;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
// Setting all the path for the image
sdRoot = Environment.getExternalStorageDirectory();
dir = "/DCIM/Camera/";
// Getting all the needed elements from the layout
rotatingImage = (ImageView) findViewById(R.id.imageView1);
ibRetake = (Button) findViewById(R.id.ibRetake);
ibUse = (Button) findViewById(R.id.ibUse);
ibCapture = (Button) findViewById(R.id.ibCapture);
flBtnContainer = (FrameLayout) findViewById(R.id.flBtnContainer);
// Getting the sensor service.
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
// Selecting the resolution of the Android device so we can create a
// proportional preview
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
deviceHeight = display.getHeight();
// Add a listener to the Capture button
ibCapture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
// Add a listener to the Retake button
ibRetake.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Deleting the image from the SD card/
File discardedPhoto = new File(sdRoot, dir + fileName);
discardedPhoto.delete();
// Restart the camera preview.
mCamera.startPreview();
// Reorganize the buttons on the screen
flBtnContainer.setVisibility(LinearLayout.VISIBLE);
ibRetake.setVisibility(LinearLayout.GONE);
ibUse.setVisibility(LinearLayout.GONE);
}
});
// Add a listener to the Use button
ibUse.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Everything is saved so we can quit the app.
finish();
Intent intent = new Intent(CameraActivity.this, PicturePreview.class);
startActivity(intent);
}
});
}
private PictureCallback mPicture = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
// Replacing the button after a photho was taken.
flBtnContainer.setVisibility(View.GONE);
ibRetake.setVisibility(View.VISIBLE);
ibUse.setVisibility(View.VISIBLE);
// File name of the image that we just took.
fileName = "IMG_" + new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()).toString() + ".jpg";
// Creating the directory where to save the image. Sadly in older
// version of Android we can not get the Media catalog name
File mkDir = new File(sdRoot, dir);
mkDir.mkdirs();
// Main file where to save the data that we recive from the camera
File pictureFile = new File(sdRoot, dir + fileName);
try {
FileOutputStream purge = new FileOutputStream(pictureFile);
purge.write(data);
purge.close();
} catch (FileNotFoundException e) {
Log.d("DG_DEBUG", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
}
// Adding Exif data for the orientation. For some strange reason the
// ExifInterface class takes a string instead of a file.
try {
exif = new ExifInterface("/sdcard/" + dir + fileName);
exif.setAttribute(ExifInterface.TAG_ORIENTATION, "" + orientation);
exif.saveAttributes();
} catch (IOException e) {
e.printStackTrace();
}
//SendBroadcasts let's us instantly update the SD card with our image
sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://"+Environment.getExternalStorageDirectory())));
}
};
private void createCamera() {
// Create an instance of Camera
mCamera = getCameraInstance();
Setting the right parameters in the camera
Camera.Parameters params = mCamera.getParameters();
mCamera.setParameters(params);
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
// Calculating the width of the preview so it is proportional.
float widthFloat = (float) (deviceHeight) * 4 / 3;
int width = Math.round(widthFloat);
// Resizing the LinearLayout so we can make a proportional preview. This
// approach is not 100% perfect because on devices with a really small
// screen the the image will still be distorted - there is place for
// improvment.
LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(width, deviceHeight);
preview.setLayoutParams(layoutParams);
// Adding the camera preview after the FrameLayout and before the button
// as a separated element.
preview.addView(mPreview, 0);
}
#Override
protected void onResume() {
super.onResume();
// Test if there is a camera on the device and if the SD card is
// mounted.
if (!checkCameraHardware(this)) {
Intent i = new Intent(this, NoCamera.class);
startActivity(i);
finish();
} else if (!checkSDCard()) {
Intent i = new Intent(this, NoSDCard.class);
startActivity(i);
finish();
}
// Creating the camera
createCamera();
// Register this class as a listener for the accelerometer sensor
////sensorManager.registerListener(this, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
protected void onPause() {
super.onPause();
// release the camera immediately on pause event
releaseCamera();
// removing the inserted view - so when we come back to the app we
// won't have the views on top of each other.
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
private boolean checkSDCard() {
boolean state = false;
String sd = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(sd)) {
state = true;
}
return state;
}
/**
* A safe way to get an instance of the Camera object.
*/
public static Camera getCameraInstance() {
Camera c = null;
try {
// attempt to get a Camera instance
c = Camera.open();
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
// returns null if camera is unavailable
return c;
}
/**
* Putting in place a listener so we can get the sensor data only when
* something changes.
*/
public void onSensorChanged(SensorEvent event) {
synchronized (this) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
RotateAnimation animation = null;
if (event.values[0] < 4 && event.values[0] > -4) {
if (event.values[1] > 0 && orientation != ExifInterface.ORIENTATION_ROTATE_90) {
// UP
orientation = ExifInterface.ORIENTATION_ROTATE_90;
animation = getRotateAnimation(270);
degrees = 270;
} else if (event.values[1] < 0 && orientation != ExifInterface.ORIENTATION_ROTATE_270) {
// UP SIDE DOWN
orientation = ExifInterface.ORIENTATION_ROTATE_270;
animation = getRotateAnimation(90);
degrees = 90;
}
} else if (event.values[1] < 4 && event.values[1] > -4) {
if (event.values[0] > 0 && orientation != ExifInterface.ORIENTATION_NORMAL) {
// LEFT
orientation = ExifInterface.ORIENTATION_NORMAL;
animation = getRotateAnimation(0);
degrees = 0;
} else if (event.values[0] < 0 && orientation != ExifInterface.ORIENTATION_ROTATE_180) {
// RIGHT
orientation = ExifInterface.ORIENTATION_ROTATE_180;
animation = getRotateAnimation(180);
degrees = 180;
}
}
if (animation != null) {
rotatingImage.startAnimation(animation);
}
}
}
}
/**
* Calculating the degrees needed to rotate the image imposed on the button
* so it is always facing the user in the right direction
*
* #param toDegrees
* #return
*/
private RotateAnimation getRotateAnimation(float toDegrees) {
float compensation = 0;
if (Math.abs(degrees - toDegrees) > 180) {
compensation = 360;
}
// When the device is being held on the left side (default position for
// a camera) we need to add, not subtract from the toDegrees.
if (toDegrees == 0) {
compensation = -compensation;
}
// Creating the animation and the RELATIVE_TO_SELF means that he image
// will rotate on it center instead of a corner.
RotateAnimation animation = new RotateAnimation(degrees, toDegrees - compensation, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f);
// Adding the time needed to rotate the image
animation.setDuration(250);
// Set the animation to stop after reaching the desired position. With
// out this it would return to the original state.
animation.setFillAfter(true);
return animation;
}
/**
* STUFF THAT WE DON'T NEED BUT MUST BE HEAR FOR THE COMPILER TO BE HAPPY.
*/
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.custom_cam, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
this is the code
and then I have a second activity that I need to call the image taken
Keep the saved image path in string. Then pass it to other activity with Bundle.
eg.
In your camera activity
under your ibUse.setOnClickListener. Like this;
ibUse.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Everything is saved so we can quit the app.
finish();
Intent intent = new Intent(CameraActivity.this, PicturePreview.class);
Bundle extras = new Bundle();
extras.putString("ImagePath", path);
intent.putExtras(extras);
startActivity(intent);
}
});
In your preview activity under onCreate method
Bundle bundle = getIntent().getExtras();
String path = bundle.getString("ImagePath");
ImageView image = (ImageView) findViewById(R.id.image);
File imgFile = new File(path);
if(imgFile.exists()){
Bitmap myBitmap = BitmapFactory.decodeFile(imgFile.getAbsolutePath());
image.setImageBitmap(myBitmap);
}
I am writing a Augmented reality demo for the Epson BT 200 glasses, I have sample which I have up and running that if you point it at a certain picture, it detects it and renders a cube on top of it.
Now obviously this is fine for a phone, but for see thru glasses I want it to work but not show the camera feed. I thought turning off the view would stop it, or setting the visibility to gone. But still does not work.
The only work around I have found so far is by not setting the OpenGL render to transparent pixels
// The OpenCV loader callback.
private BaseLoaderCallback mLoaderCallback =
new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(final int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.d(TAG, "OpenCV loaded successfully");
mCameraView.enableView();
mCameraView.enableFpsMeter();
mBgr = new Mat();
final ARFilter starryNight;
try {
// Define The Starry Night to be 1.0 units tall.
starryNight = new ImageDetectionFilter(
CameraActivity.this,
R.drawable.the_lab,
mCameraProjectionAdapter, 1.0);
} catch (IOException e) {
Log.e(TAG, "Failed to load drawable: " +
"starry_night");
e.printStackTrace();
break;
}
final ARFilter akbarHunting;
try {
// Define Akbar Hunting with Cheetahs to be 1.0
// units wide.
akbarHunting = new ImageDetectionFilter(
CameraActivity.this,
R.drawable.akbar_hunting_with_cheetahs,
mCameraProjectionAdapter, 1.0);
} catch (IOException e) {
Log.e(TAG, "Failed to load drawable: " +
"akbar_hunting_with_cheetahs");
e.printStackTrace();
break;
}
mImageDetectionFilters = new ARFilter[] {
//new NoneARFilter(),
starryNight,
akbarHunting
};
mARRenderer.filter = mImageDetectionFilters[
mImageDetectionFilterIndex];
break;
default:
super.onManagerConnected(status);
break;
}
}
};
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks.
#SuppressLint("NewApi")
#Override
protected void onCreate(final Bundle savedInstanceState) {
goFullScreen();
super.onCreate(savedInstanceState);
if (savedInstanceState != null) {
mCameraIndex = savedInstanceState.getInt(
STATE_CAMERA_INDEX, 0);
mImageSizeIndex = savedInstanceState.getInt(
STATE_IMAGE_SIZE_INDEX, 0);
mImageDetectionFilterIndex = savedInstanceState.getInt(
STATE_IMAGE_DETECTION_FILTER_INDEX, 0);
} else {
mCameraIndex = 0;
mImageSizeIndex = cameraSize; // was 0
mImageDetectionFilterIndex = 0;
}
final FrameLayout layout = new FrameLayout(this);
layout.setLayoutParams(new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT));
layout.setBackgroundColor(Color.RED);
setContentView(layout);
mCameraView = new JavaCameraView(this, mCameraIndex);
mCameraView.setLayoutParams(new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT));
layout.addView(mCameraView);
//mCameraView.setVisibility(View.INVISIBLE);
GLSurfaceView glSurfaceView = new GLSurfaceView(this);
glSurfaceView.getHolder().setFormat(
PixelFormat.TRANSPARENT);
glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 0, 0);
glSurfaceView.setZOrderOnTop(true);
glSurfaceView.setLayoutParams(new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT));
layout.addView(glSurfaceView);
mCameraProjectionAdapter = new CameraProjectionAdapter();
mARRenderer = new ARCubeRenderer();
mARRenderer.cameraProjectionAdapter =
mCameraProjectionAdapter;
// Earlier, we defined the printed image's size as 1.0
// unit.
// Define the cube to be half this size.
mARRenderer.scale = 0.5f;
glSurfaceView.setRenderer(mARRenderer);
final Camera camera;
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.GINGERBREAD) {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
mIsCameraFrontFacing =
(cameraInfo.facing ==
CameraInfo.CAMERA_FACING_FRONT);
mNumCameras = Camera.getNumberOfCameras();
camera = Camera.open(mCameraIndex);
} else { // pre-Gingerbread
// Assume there is only 1 camera and it is rear-facing.
mIsCameraFrontFacing = false;
mNumCameras = 1;
camera = Camera.open();
}
final Parameters parameters = camera.getParameters();
camera.release();
mSupportedImageSizes =
parameters.getSupportedPreviewSizes();
final Size size = mSupportedImageSizes.get(mImageSizeIndex);
mCameraProjectionAdapter.setCameraParameters(
parameters, size);
// Earlier, we defined the printed image's size as 1.0
// unit.
// Leave the near and far clip distances at their default
// values, which are 0.1 (one-tenth the image size) and
// 10.0 (ten times the image size).
mCameraView.setMaxFrameSize(size.width, size.height);
mCameraView.setCvCameraViewListener(this);
}
I need to increase fps rate in my app. Now i have between 6 - 10 FPS which is very low imo because i have Nexus4. So i decided to switch from setPreviewCallback to setPreviewCallbackWithBuffer but i do not see ANY difference in framerate. In log i can see that addressess of buffers are changing circular but still i have only 6 - 10 fps (sometimes 12 but rarely)...
Could you try to give me some advices? BTW. I have Android 4.4 and i tried OpenCV but there result is almost the same... My code is:
public class XPreview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
long mTimer = 0;
public XPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
try {
int formatt = mCamera.getParameters().getPreviewFormat();
int bytesPerPx = ImageFormat.getBitsPerPixel( formatt );
int width = mCamera.getParameters().getPreviewSize().width;
int height = mCamera.getParameters().getPreviewSize().height;
int size = (int)( ( width * height * bytesPerPx ) / 8.0);
Parameters params = mCamera.getParameters();
mCamera.setParameters(params);
Log.d(TAG, "Data: " + formatt + " " + bytesPerPx + " " + width + "x" + height + " " + size );
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.startPreview();
} catch (Exception e){
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
long time = System.currentTimeMillis();
Log.d( TAG, "Time between frames: " + ( time - mTimer ) + "ms, FPS: " + ( 1000.0 / (time - mTimer )) + ", data " + data );
mTimer = time;
camera.addCallbackBuffer(data);
}
}
Activity:
public class RTP extends Activity {
private Camera mCamera;
private XPreview mPreview;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mCamera = Camera.open();
mPreview = new XPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById( R.id.frameLayout1 );
preview.addView(mPreview);
}
#Override
public void onPause(){
super.onPause();
if( mCamera != null ){
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
}
Edit:
Parameters params = mCamera.getParameters();
params.setRecordingHint(true);
mCamera.setParameters(params);
This decreased delay between frames to about ~30ms with version with buffers and decresed delay to about 60 with version without buffer. Anyway i'd be glad if someone could give me some more advices. A bit deeper tests shows that now i have between 24 and 31 fps using version with buffering and two buffers and between 15 and 22 using default version.
I used the following code to capture image. Everything works fine, when i capture the image it has 2592x1944 size and the image is captured in landscape mode. Now i want to capture the image with the size of 534x534. I changed this parameter values params.setPictureSize(534, 534); Nothing will change. How can i do this. Thanks in advance.
DgCamActivity.java
public class DgCamActivity extends Activity implements SensorEventListener {
private Camera mCamera;
private CameraPreview mPreview;
private SensorManager sensorManager = null;
private int orientation;
private ExifInterface exif;
private int deviceHeight;
private Button ibRetake;
private Button ibUse;
private Button ibCapture;
// private FrameLayout flBtnContainer;
private File sdRoot;
private String dir;
private String fileName;
// private ImageView rotatingImage;
private int degrees = -1;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.test);
// Setting all the path for the image
sdRoot = Environment.getExternalStorageDirectory();
dir = "/SimpleCamera/";
// Getting all the needed elements from the layout
// rotatingImage = (ImageView) findViewById(R.id.imageView1);
ibRetake = (Button) findViewById(R.id.ibRetake);
ibUse = (Button) findViewById(R.id.ibUse);
ibCapture = (Button) findViewById(R.id.ibCapture);
// flBtnContainer = (FrameLayout) findViewById(R.id.flBtnContainer);
// Getting the sensor service.
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
// Selecting the resolution of the Android device so we can create a
// proportional preview
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
deviceHeight = display.getHeight();
// Add a listener to the Capture button
ibCapture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
// Add a listener to the Retake button
ibRetake.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Deleting the image from the SD card/
File discardedPhoto = new File(sdRoot, dir + fileName);
discardedPhoto.delete();
// Restart the camera preview.
mCamera.startPreview();
// Reorganize the buttons on the screen
// flBtnContainer.setVisibility(LinearLayout.VISIBLE);
ibRetake.setVisibility(LinearLayout.GONE);
ibUse.setVisibility(LinearLayout.GONE);
}
});
// Add a listener to the Use button
ibUse.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Everything is saved so we can quit the app.
finish();
}
});
}
private void createCamera() {
// Create an instance of Camera
mCamera = getCameraInstance();
// Setting the right parameters in the camera
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPictureSizes();
Log.v("SUPORTED SIZE IS>>>>>.", params.getSupportedPictureSizes() + "");
Log.v("SUPORTED SIZE IS>>>>>.", sizes.size() + "");
params.setPictureSize(1600, 1200);
params.setPictureFormat(PixelFormat.JPEG);
params.setJpegQuality(100);
mCamera.setParameters(params);
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
RelativeLayout preview = (RelativeLayout) findViewById(R.id.camera_preview);
// Calculating the width of the preview so it is proportional.
float widthFloat = (float) (deviceHeight) * 4 / 3;
int width = Math.round(widthFloat);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
preview.setLayoutParams(layoutParams);
preview.addView(mPreview, 0);
}
#Override
protected void onResume() {
super.onResume();
createCamera();
sensorManager.registerListener(this,
sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
protected void onPause() {
super.onPause();
releaseCamera();
RelativeLayout preview = (RelativeLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
public static Camera getCameraInstance() {
Camera c = null;
try {
// attempt to get a Camera instance
c = Camera.open();
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
// returns null if camera is unavailable
return c;
}
private PictureCallback mPicture = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
ibRetake.setVisibility(View.VISIBLE);
ibUse.setVisibility(View.VISIBLE);
// File name of the image that we just took.
fileName = "IMG_"
+ new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date()).toString() + ".jpg";
File mkDir = new File(sdRoot, dir);
mkDir.mkdirs();
// Main file where to save the data that we recive from the camera
File pictureFile = new File(sdRoot, dir + fileName);
try {
FileOutputStream purge = new FileOutputStream(pictureFile);
purge.write(data);
purge.close();
} catch (FileNotFoundException e) {
Log.d("DG_DEBUG", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
}
try {
exif = new ExifInterface("/sdcard/" + dir + fileName);
exif.setAttribute(ExifInterface.TAG_ORIENTATION, ""
+ orientation);
exif.saveAttributes();
} catch (IOException e) {
e.printStackTrace();
}
}
};
/**
* Putting in place a listener so we can get the sensor data only when
* something changes.
*/
public void onSensorChanged(SensorEvent event) {
synchronized (this) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
RotateAnimation animation = null;
if (event.values[0] < 4 && event.values[0] > -4) {
if (event.values[1] > 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_90) {
// UP
orientation = ExifInterface.ORIENTATION_ROTATE_90;
animation = getRotateAnimation(270);
degrees = 270;
} else if (event.values[1] < 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_270) {
// UP SIDE DOWN
orientation = ExifInterface.ORIENTATION_ROTATE_270;
animation = getRotateAnimation(90);
degrees = 90;
}
} else if (event.values[1] < 4 && event.values[1] > -4) {
if (event.values[0] > 0
&& orientation != ExifInterface.ORIENTATION_NORMAL) {
// LEFT
orientation = ExifInterface.ORIENTATION_NORMAL;
animation = getRotateAnimation(0);
degrees = 0;
} else if (event.values[0] < 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_180) {
// RIGHT
orientation = ExifInterface.ORIENTATION_ROTATE_180;
animation = getRotateAnimation(180);
degrees = 180;
}
}
if (animation != null) {
// rotatingImage.startAnimation(animation);
}
}
}
}
/**
* Calculating the degrees needed to rotate the image imposed on the button
* so it is always facing the user in the right direction
*
* #param toDegrees
* #return
*/
private RotateAnimation getRotateAnimation(float toDegrees) {
float compensation = 0;
if (Math.abs(degrees - toDegrees) > 180) {
compensation = 360;
}
// When the device is being held on the left side (default position for
// a camera) we need to add, not subtract from the toDegrees.
if (toDegrees == 0) {
compensation = -compensation;
}
// Creating the animation and the RELATIVE_TO_SELF means that he image
// will rotate on it center instead of a corner.
RotateAnimation animation = new RotateAnimation(degrees, toDegrees
- compensation, Animation.RELATIVE_TO_SELF, 0.5f,
Animation.RELATIVE_TO_SELF, 0.5f);
// Adding the time needed to rotate the image
animation.setDuration(250);
// Set the animation to stop after reaching the desired position. With
// out this it would return to the original state.
animation.setFillAfter(true);
return animation;
}
/**
* STUFF THAT WE DON'T NEED BUT MUST BE HEAR FOR THE COMPILER TO BE HAPPY.
*/
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
CameraPreview.java
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mHolder.setSizeFromLayout();
mHolder.setFixedSize(100, 100);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the
// preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("DG_DEBUG", "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// make any resize, rotate or reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d("DG_DEBUG", "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
}
test.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<RelativeLayout
android:id="#+id/camera_preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentTop="true"
android:layout_centerHorizontal="true" >
</RelativeLayout>
<RelativeLayout
android:id="#+id/relativeLayout1"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="41dp" >
<Button
android:id="#+id/ibCapture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBottom="#+id/relativeLayout1"
android:layout_alignLeft="#+id/camera_preview"
android:text="Capture" />
<Button
android:id="#+id/ibRetake"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_marginLeft="36dp"
android:layout_toRightOf="#+id/ibCapture"
android:text="ReTake" />
<Button
android:id="#+id/ibUse"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_alignParentTop="true"
android:layout_marginRight="38dp"
android:text="Save" />
</RelativeLayout>
</RelativeLayout>
I believe Android will not allow for arbitrary image sizes when taking a picture, you should use the parameters.getSupportedPictureSizes() method to query the supported image sizes.
I suspect you would have to choose a big enough size to cut your desired 534x534 patch from. You could do this by using BitmapFactory methods to decode the picture that was taken and then use bitmap.getPixels() method to extract the desired patch size, or something like bitmap.createScaledBitmap() to scale your picture to the desired size.
After you have your correctly sized bitmap, you could just use bitmap.compress() to save your image, if that's the final format you are going for.