I am developing an app that takes the photo of the person who tries to unlock a device with wrong password or pattern.
The app detects failed attempts with DeviceAdminReceiver.onPasswordFailed() routine. Everything is working fine on all versions of Android, except when I am using it on Android O real device (it is working fine on the emulator).
Here is my code:
public class AdminReceiver extends DeviceAdminReceiver {
#Override
public void onPasswordFailed(Context context, Intent intent) {
super.onPasswordFailed(context, intent);
Intent i = new Intent(context, CameraActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(i);
}
And the code for taking the photo is in a transparent activity:
public class CameraActivity extends AppCompatActivity {
private static final String TAG = CameraActivity.class.getSimpleName();
private Camera mCamera;
private boolean executed = false;
private CameraPreview.CamCallback camCallback = (data, camera) -> {
//used to prevent picture from being captured twice, because apparently
// Camera.stopPreview() takes sometime to execute
if (!executed) {
executed = true;
log.d(TAG, "CameraActivity camCallback executed");
Camera.Parameters params = camera.getParameters();
Camera.Size size = params.getPreviewSize();
camera.stopPreview();
File dir = new File(STORE_DIRECTORY);
if (!(dir.exists() || dir.mkdirs())) {
Log.e(TAG, "Failed to create file storage directory.");
return;
}
String imgname = new SimpleDateFormat("dd-MMM-yyyy hh:mm:ss",
Locale.getDefault()).format(new Date());
File file = new File(dir, imgname + ".jpg");
OutputStream fOut;
try {
if (file.exists() && !file.delete()) {
Log.e(TAG, "File already exists and could not be deleted!");
finish();
return;
}
if (!file.exists() && !file.createNewFile()) {
Log.e(TAG, "failed to create image file.");
finish();
return;
}
fOut = new FileOutputStream(file);
YuvImage yuvImage = new YuvImage(data, params.getPreviewFormat(),
size.width, size.height, null);
processImage(yuvImage, fOut);
fOut.flush();
fOut.close();
} catch (FileNotFoundException e) {
Log.e(TAG, "Output file not found!");
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
finish();
};
private void processImage(#NotNull YuvImage yuvImage, OutputStream fOut) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()),90, os);
byte[] bytes = os.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Matrix matrix = new Matrix();
matrix.postRotate(bitmap.getWidth() > bitmap.getHeight() ? -90.0f : 0.0f);
Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true)
.compress(Bitmap.CompressFormat.JPEG, 100, fOut);
}
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
log.i(TAG, "CameraActivity->onCreate");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_ALLOW_LOCK_WHILE_SCREEN_ON);
// Setup the camera and the preview object
int frontCamIndex = findFrontFacingCamera();
if (frontCamIndex == -1) {
Log.e(TAG, "findFrontFacingCamera: ",
new IOException("No facing Camera detected on this device"));
finish();
return;
}
mCamera = Camera.open(frontCamIndex);
CameraPreview camPreview = new CameraPreview(this, mCamera, camCallback);
camPreview.setSurfaceTextureListener(camPreview);
// Connect the preview object to a FrameLayout in your UI
// You'll have to create a FrameLayout object in your UI to place this preview in
FrameLayout preview = findViewById(R.id.cameraView);
preview.addView(camPreview);
// Attach a callback for preview
mCamera.setPreviewCallback(camCallback);
}
#Override
protected void onDestroy() {
super.onDestroy();
Logger.log("CameraActivity->onDestroy");
if (mCamera != null)
mCamera.release();
}
}
CameraActivity in the manifest:
<activity
android:name=".activities.CameraActivity"
android:showWhenLocked="true"
android:theme="#style/Theme.Transparent" />
The style for the CameraActivity:
<style name="Theme.Transparent" parent="android:Theme">
<item name="android:windowIsTranslucent">true</item>
<item name="android:windowBackground">#android:color/transparent</item>
<item name="android:windowContentOverlay">#null</item>
<item name="android:windowNoTitle">true</item>
<item name="android:windowIsFloating">true</item>
<item name="android:backgroundDimEnabled">false</item>
</style>
And the CameraPreview class with CamCallback interface:
public class CameraPreview extends TextureView implements TextureView.SurfaceTextureListener {
public interface CamCallback extends Camera.PreviewCallback {
void onPreviewFrame(byte[] data, Camera camera);
}
private Camera mCamera;
private CamCallback callback;
public CameraPreview(Context context) {
super(context);
}
public CameraPreview(Context context, Camera camera, CamCallback callback) {
super(context);
mCamera = camera;
this.callback = callback;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Camera.Parameters params = mCamera.getParameters();
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
t.printStackTrace();
}
Camera.Size size = getBiggestPictureSize();
setLayoutParams(new FrameLayout.LayoutParams(size.width, size.height, Gravity.CENTER));
params.setPreviewSize(size.width, size.height);
mCamera.setParameters(params);
mCamera.setPreviewCallback(callback);
try {
mCamera.setPreviewTexture(surface);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
setVisibility(INVISIBLE); // Make the surface invisible as soon as it is created
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Put code here to handle texture size change if you want to
try {
mCamera.stopPreview();
mCamera.setPreviewCallback(callback);
try {
mCamera.setPreviewTexture(surface);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Update your view here!
}
private Camera.Size getBiggestPictureSize() {
Camera.Size result = null;
for (Camera.Size size : mCamera.getParameters().getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
return (result);
}
}
The CameraActivity is being created because I can see it in the logcat, and the logcat is showing no error at all.
Related
I have this code in which it gets the parameters the first time but when I try to resume the activity or again start the activity using intent the app crashes.
I have tried to release the camera in different parts of my code but it seems to be not working.
here is the code for surface changed
private SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
/**
* {#inheritDoc}
*/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(previewHolder);
mCamera.setPreviewCallback(previewCallback);
} catch (Throwable t) {
Log.e("MotionDetector", "Exception in setPreviewDisplay()", t);
}
// try {
// if(!mInitSuccesful)
// initRecorder(previewHolder.getSurface());
// } catch (IOException e) {
// e.printStackTrace();
// }
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters parameters = mCamera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes.contains(parameters.FOCUS_MODE_CONTINUOUS_PICTURE))
parameters.setFocusMode(parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
Camera.Size size = getBestPreviewSize(width, height, parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
Log.d("MotionDetector", "Using width=" + size.width + " height=" + size.height);
}
mCamera.setDisplayOrientation(90);
mCamera.setParameters(parameters);
mCamera.startPreview();
inPreview = true;
}
/**
* {#inheritDoc}
*/
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// Ignore
mCamera.stopPreview();
releaseCamera();
}
I have this code. in on surface destroyed I called the stop preview and release camera but keep getting the error as mentioned in title.
Note: First time it runs smooth but if i press back and open activity again the error arises
Error seems to be in this code
#Override
public void run() {
while (isRunning.get()) {
long now = System.currentTimeMillis();
if (now - lastCheck > checkInterval) {
lastCheck = now;
if (nextData.get() != null) {
int[] img = ImageProcessing.decodeYUV420SPtoLuma(nextData.get(), nextWidth.get(), nextHeight.get());
// check if it is too dark
int lumaSum = 0;
for (int i : img) {
lumaSum += i;
}
if (lumaSum < minLuma) {
if (motionDetectorCallback != null) {
mHandler.post(new Runnable() {
#Override
public void run() {
motionDetectorCallback.onTooDark();
}
});
}
} else if (detector.detect(img, nextWidth.get(), nextHeight.get())) {
// check
if (motionDetectorCallback != null) {
mHandler.post(new Runnable() {
#Override
public void run() {
Camera.Parameters params = imgCam.getParameters();
int w = params.getPreviewSize().width;
int h = params.getPreviewSize().height;
int format = params.getPreviewFormat();
YuvImage image = new YuvImage(imgByte, format, w, h, null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Rect area = new Rect(0, 0, w, h);
image.compressToJpeg(area, 100, out);
Bitmap bm = BitmapFactory.decodeByteArray(out.toByteArray(), 0, out.size());
float degrees = 90;//rotation degree
Matrix matrix = new Matrix();
matrix.setRotate(degrees);
bOutput = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
// mMediaRecorder.start();
// try {
// Thread.sleep(10 * 1000); // This will recode for 10 seconds, if you don't want then just remove it.
// } catch (Exception e) {
// e.printStackTrace();
// }
// getOutputMediaFile(1);
motionDetectorCallback.onMotionDetected();
}
});
}
}
}
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
Use this TO RELEASE CAMERA :
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mcamera.stopPreview();
mcamera.release();
}
Use this TO RESUME CAMERA PREVIEW IN MAIN ACTIVITY AFTER onCreate METHOD :
#Override
public void onResume() {
super.onResume();
mCamera = Camera.open();
mCamera.startPreview();
}
surfaceChanged() has nothing to do.
I need to convert camera stream to Bitmap and display it into an ImageView. Here is the code for CameraPreviewActivity:
public class CameraPreviewActivity extends Activity {
private MyCamera myCamera;
private Camera camera;
private ImageView camView;
private Runnable r;
private Bitmap bm;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myCamera = new MyCamera();
myCamera.startCamera();
camView = (ImageView) findViewById(R.id.camViewId);
setContentView(R.layout.camera_preview_layout);
getCameraFrameView();
}
private void getCameraFrameView() {
r = new Runnable() {
#Override
public void run() {
bm = myCamera.getCameraFrameBitmap();
if(bm != null) {
runOnUiThread(new Runnable() {
#Override
public void run() {
camView.setImageBitmap(bm);
}
});
}
}
};
r.run();
}
}
and here is the Class that Initialize and run the camera preview:
public class MyCamera {
private Camera mCamera = null;
private SurfaceTexture st = new SurfaceTexture(1);
private Bitmap cameraFrameBitmap;
private Camera.Parameters parameters;
private Camera.Size previewSize;
public void startCamera() {
Log.d("MyCamera", "Start Camera Initialized");
mCamera = Camera.open();
setupCameraParams();
try{
mCamera.setPreviewTexture(st);
mCamera.startPreview();
Log.d("MyCamera", "Camera Started Preview");
} catch (Exception e) {
e.printStackTrace();
}
}
private void setupCameraParams() {
if(mCamera != null) {
Log.d("MyCamera", "Setup Camera Params Started");
parameters = mCamera.getParameters();
List<Camera.Size> cameraSize = parameters.getSupportedPreviewSizes();
previewSize = cameraSize.get(0);
for(Camera.Size s : cameraSize) {
if((s.width * s.height) > (previewSize.height * previewSize.width)) {
previewSize = s;
}
}
parameters.setPreviewSize(previewSize.width, previewSize.height);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(previewCallback);
Log.d("MyCamera", "Setup Camera Done");
} else {
return;
}
}
private Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(parameters == null && previewSize == null) {
return;
}
try{
int format = parameters.getPreviewFormat();
YuvImage yuvImage = new YuvImage(data, format, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, previewSize.width, previewSize.height);
yuvImage.compressToJpeg(rect, 100, baos);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPurgeable = true;
options.inInputShareable = true;
cameraFrameBitmap = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.size(), options);
baos.flush();
baos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
};
public Bitmap getCameraFrameBitmap() {
return cameraFrameBitmap;
}
}
Is this the right way to do it? or is there another way to stream the camera preview into ImageView as bitmap? My app can't display the frame. I think there might be something wrong when I tried running the Runnable. I am not used to Thread and Runnable.
My application got crashed on devices with no SD cards in it, but is working fine with devices which are having SD card in it.When i debugged it, i found that on
mCamera.takePicture(null, null, jpegCallBack);
Method app is getting crashed with above error.I goggled a lot but didn't found any solution , i saw this link :-
http://forums.androidcentral.com/motorola-droid-x/102987-camera-won-t-take-pictures-without-sd-card.html
So is it possible to capture images in background service in device with no SD card in it.
Please provide me some clues
Here are some methods of my hiddenCamera class
#SuppressWarnings("deprecation")
private void startCapturingCall() {
final Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
if (mCamera != null) {
parameters = mCamera.getParameters();
if (FLASH_MODE == null || FLASH_MODE.isEmpty()) {
FLASH_MODE = "auto";
}
parameters.setFlashMode(FLASH_MODE);
pictureSize = getBiggesttPictureSize(parameters);
if (pictureSize != null)
parameters
.setPictureSize(pictureSize.width, pictureSize.height);
// set camera parameters
mCamera.setParameters(parameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#SuppressWarnings("deprecation")
#Override
public void run() {
if (isSDPresent) {
mCamera.takePicture(null, null, jpegCallBack);
} else {
Toast.makeText(getApplicationContext(),
"Please Insert SD card", 1000).show();
}
}
}, 2000);
}
}
#SuppressWarnings("deprecation")
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
IMAGE_DIRECTORY_NAME);
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
}
}
try {
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// set file out stream
FileOutputStream out = new FileOutputStream(mediaFile);
// set compress format quality and stream
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mediaStorageDir.exists()) {
getPathOfCapturedImage();
}
HiddenCamera.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
And also isSDPresent always returns me true value .
Please provide me your suggestions on this. I am really stuck at this point from last 2-3 days.
This is the issue of Device too as in Samsung Grand , my code is working fine even its not having SD card in it.But in Moto E its my application getting crashed.Camera settings plays an important role in it.
Thanks
Finally i am done with this, though i got busy in some other task but today i get time to post my Answer on this topic, As this topis is very general , so i am posting this answer inorder to help others who might have thought of this functionality , So i done this thing by using SurfaceTexture but it will only work for versions greater thet 4 and for versions less than 4 you need to use surfaceView.
So here is my code :-
public class SurfaceTextureActivity extends Activity implements
SurfaceTextureListener {
private Parameters mParameters;
private Camera.Size mPictureSize;
private static final String sIMAGE_DIRECTORY_NAME = "HiddenCapturedPics";
private byte[] mByteArray;
private Camera mCamera;
private TextureView mTextureView;
private File mMediaFile, mMediaStorageDir = null;
private String mEncodedImage, mImageName, mFinalResponse,
mFlashMode;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTextureView = new TextureView(this);
setContentView(mTextureView);
if (checkCameraHardware(getApplicationContext())) {
mTextureView.setSurfaceTextureListener(this);
Bundle extras = getIntent().getExtras();
mFlashMode = extras.getString("FLASH");
} else {
Toast.makeText(getApplicationContext(),
"Your Device dosen't have a Camera !", Toast.LENGTH_LONG)
.show();
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
int height) {
mCamera = Camera.open();
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(0, 0,
Gravity.CENTER));
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
}
mCamera.startPreview();
startCapturingCall();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
int height) {
// Ignored, the Camera does all the work for us
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
mCamera.stopPreview();
mCamera.release();
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Toast.makeText(getApplicationContext(), "Dfg", Toast.LENGTH_SHORT)
.show();
// Update your view here!
}
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mMediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
sIMAGE_DIRECTORY_NAME);
mMediaFile = new File(mMediaStorageDir.getPath()
+ File.separator + "IMG_" + timeStamp + ".jpg");
if (!mMediaStorageDir.exists()) {
if (!mMediaStorageDir.mkdirs()) {
}
}
try {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 8;
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length, options);
FileOutputStream out = new FileOutputStream(mMediaFile);
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mMediaStorageDir.exists()) {
getPathOfCapturedImage();
}
SurfaceTextureActivity.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
private void startCapturingCall() {
if (mCamera != null) {
mParameters = mCamera.getParameters();
if (mFlashMode == null || mFlashMode.isEmpty()) {
mFlashMode = "auto";
}
mParameters.setFlashMode(mFlashMode);
mPictureSize = getBiggesttPictureSize(mParameters);
if (mPictureSize != null)
mParameters.setPictureSize(mPictureSize.width,
mPictureSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
if (mCamera != null) {
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
}
}
}, 2000);
}
}
private Camera.Size getBiggesttPictureSize(Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
return (result);
}
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
Hope this will help others .......
Here are the links for reference :-
Example of Camera preview using SurfaceTexture in Android
Camera.takePicture throws RunTimeException
Cheers!!!!!
Hi aim working n android application to detect page number using camera preview.
After i received frame from OnPreviewFrame i'm doing real time image processing using opencv to find page number position .Now my question is How can i draw rectangle on surface View ?
Use this code
PreviewDemo.java
public class PreviewDemo extends Activity implements OnClickListener {
private SurfaceView preview = null;
private SurfaceHolder previewHolder = null;
private Camera camera = null;
private boolean inPreview = false;
ImageView image;
Bitmap bmp, itembmp;
static Bitmap mutableBitmap;
PointF start = new PointF();
PointF mid = new PointF();
float oldDist = 1f;
File imageFileName = null;
File imageFileFolder = null;
private MediaScannerConnection msConn;
Display d;
int screenhgt, screenwdh;
ProgressDialog dialog;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.preview);
image = (ImageView) findViewById(R.id.image);
preview = (SurfaceView) findViewById(R.id.surface);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
previewHolder.setFixedSize(getWindow().getWindowManager()
.getDefaultDisplay().getWidth(), getWindow().getWindowManager()
.getDefaultDisplay().getHeight());
}
#Override
public void onResume() {
super.onResume();
camera = Camera.open();
}
#Override
public void onPause() {
if (inPreview) {
camera.stopPreview();
}
camera.release();
camera = null;
inPreview = false;
super.onPause();
}
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size: parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return (result);
}
SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast.makeText(PreviewDemo.this, t.getMessage(), Toast.LENGTH_LONG)
.show();
}
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width,
int height) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height,
parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// no-op
}
};
Camera.PictureCallback photoCallback = new Camera.PictureCallback() {
public void onPictureTaken(final byte[] data, final Camera camera) {
dialog = ProgressDialog.show(PreviewDemo.this, "", "Saving Photo");
new Thread() {
public void run() {
try {
Thread.sleep(1000);
} catch (Exception ex) {}
onPictureTake(data, camera);
}
}.start();
}
};
public void onPictureTake(byte[] data, Camera camera) {
bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
mutableBitmap = bmp.copy(Bitmap.Config.ARGB_8888, true);
savePhoto(mutableBitmap);
dialog.dismiss();
}
class SavePhotoTask extends AsyncTask < byte[], String, String > {#Override
protected String doInBackground(byte[]...jpeg) {
File photo = new File(Environment.getExternalStorageDirectory(), "photo.jpg");
if (photo.exists()) {
photo.delete();
}
try {
FileOutputStream fos = new FileOutputStream(photo.getPath());
fos.write(jpeg[0]);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
return (null);
}
}
public void savePhoto(Bitmap bmp) {
imageFileFolder = new File(Environment.getExternalStorageDirectory(), "Rotate");
imageFileFolder.mkdir();
FileOutputStream out = null;
Calendar c = Calendar.getInstance();
String date = fromInt(c.get(Calendar.MONTH)) + fromInt(c.get(Calendar.DAY_OF_MONTH)) + fromInt(c.get(Calendar.YEAR)) + fromInt(c.get(Calendar.HOUR_OF_DAY)) + fromInt(c.get(Calendar.MINUTE)) + fromInt(c.get(Calendar.SECOND));
imageFileName = new File(imageFileFolder, date.toString() + ".jpg");
try {
out = new FileOutputStream(imageFileName);
bmp.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
scanPhoto(imageFileName.toString());
out = null;
} catch (Exception e) {
e.printStackTrace();
}
}
public String fromInt(int val) {
return String.valueOf(val);
}
public void scanPhoto(final String imageFileName) {
msConn = new MediaScannerConnection(PreviewDemo.this, new MediaScannerConnectionClient() {
public void onMediaScannerConnected() {
msConn.scanFile(imageFileName, null);
Log.i("msClient obj in Photo Utility", "connection established");
}
public void onScanCompleted(String path, Uri uri) {
msConn.disconnect();
Log.i("msClient obj in Photo Utility", "scan completed");
}
});
msConn.connect();
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU && event.getRepeatCount() == 0) {
onBack();
}
return super.onKeyDown(keyCode, event);
}
public void onBack() {
Log.e("onBack :", "yes");
camera.takePicture(null, null, photoCallback);
inPreview = false;
}
#Override
public void onClick(View v) {
}
}
Preview.xml
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<android.view.SurfaceView
android:id="#+id/surface"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
</RelativeLayout>
Add the Permissions also
I am developing a custom camera application and testing on two galaxy tablets.. one 7inch and one 10inch.. the 10 inch works great but on the 7inch, when i take a picture, it freezes the camera preview and the logCat stops at CAMERA SNAP, CLICKED log in my btn_snap_pic on click in my customcamera class with no error. the app doesn't crash just hangs.. and if i back out of it and open the app again i get a "fail to connect to camera" i assume that this error is because when my app froze and i backed out, the camera never got released.. anyway, below is my both my CustomCamera class and my CamLayer which is my preview..
public class CustomCamera extends Activity{
String camFace ="back";
FrameLayout frame;
RelativeLayout rel;
CamLayer camPreview;
ImageView btn_snap_pic;
ImageView btn_switch_cam;
String TAG = "custom cam";
public void onCreate(Bundle savedInstanceState)
{
Bitmap btnSwitch = BitmapFactory.decodeResource(this.getResources(),
R.drawable.btn_switch_camera);
Bitmap btnSnap = BitmapFactory.decodeResource(this.getResources(),
R.drawable.btn_take_picture);
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
//Set Screen Orientation
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
RelativeLayout.LayoutParams buttonS = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
RelativeLayout.LayoutParams buttonT = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
RelativeLayout.LayoutParams cam = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
cam.addRule(RelativeLayout.BELOW);
buttonS.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
buttonT.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
buttonT.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
try{
//Create Intance of Camera
camPreview = new CamLayer(this.getApplicationContext(),camFace);
//Relative view for everything
rel = new RelativeLayout(this);
// set as main view
setContentView(rel);
//FrameLayOut for camera
frame = new FrameLayout(this);
// add Camera to view
frame.setLayoutParams(cam);
frame.addView(camPreview);
rel.addView(frame);
btn_switch_cam = new ImageView (this);
btn_switch_cam.setImageBitmap(btnSwitch);
btn_switch_cam.setLayoutParams(buttonS);
buttonS.rightMargin = 25;
buttonS.topMargin = 25;
rel.addView(btn_switch_cam);
btn_switch_cam.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Log.v("CAMERA Switch", "CLICKED");
//frame.removeView(camPreview);
if(camFace.equals("front")){
camFace = "back";
}else{
camFace = "front";
}
//camPreview.stopCamera();
frame.removeView(camPreview);
restartCam();
//camPreview.switchCam(camFace);
}
});
btn_snap_pic = new ImageView(this);
btn_snap_pic.setImageBitmap(btnSnap);
btn_snap_pic.setLayoutParams(buttonT);
buttonT.rightMargin = 25;
buttonT.bottomMargin = 25;
rel.addView(btn_snap_pic);
btn_snap_pic.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Log.v("CAMERA Snap", "CLICKED");
camPreview.camera.takePicture(shutterCallback, rawCallback,
jpegCallback);
}
});
} catch(Exception e){}
}
public void restartCam(){
camPreview = new CamLayer(this.getApplicationContext(),camFace);
frame.addView(camPreview);
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, android.hardware.Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, android.hardware.Camera camera) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/LC/images/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
AND THE CAMLAYER:
public class CamLayer extends SurfaceView implements SurfaceHolder.Callback {
Camera camera;
SurfaceHolder previewHolder;
String camID;
private static final String TAG = "Cam Preview";
public CamLayer(Context context, String facing)
{
super(context);
camID = facing;
previewHolder = this.getHolder();
previewHolder.addCallback(this);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
startCamera();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Parameters params = camera.getParameters();
//params.setPreviewSize(width, height);
//params.setPictureFormat(PixelFormat.JPEG);
camera.setParameters(params);
camera.startPreview();
}
public void surfaceDestroyed(SurfaceHolder arg0)
{
//camera.stopPreview();
//camera.release();
stopCamera();
}
public void onResume() {
//camera.startPreview();
startCamera();
}
public void onPause() {
// TODO Auto-generated method stub
//camera.stopPreview();
stopCamera();
}
public void switchCam(String newCamId) {
/*camera.stopPreview();
//camera.release();
if(camID.equals("front")){
camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}else{
camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}*/
//camera.startPreview();
//camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
stopCamera();
camID = newCamId;
startCamera();
}
public void stopCamera(){
System.out.println("stopCamera method");
if (camera != null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
previewHolder.removeCallback(this);
previewHolder = null;
}
}
private void startCamera(){
if(camID.equals("front")){
camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}else{
camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
//FileOutputStream outStream = null;
/*try {
//outStream = new FileOutputStream(String.format(
//"/sdcard/%d.jpg", System.currentTimeMillis()));
//outStream.write(data);
//outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}*/
//CamLayer.this.invalidate();
}
});
}
catch (Throwable e){ Log.w("TAG,", "failed create surface !?!?"); }
}
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
This thread says raw is not supported
https://groups.google.com/forum/?fromgroups#!topic/android-developers/D43AdrbP9oE
A raw image would consume too much memory is my guess. Other than that, I'm also disappointed its not supported.