I want to save image only if face is detected, currently it is saving even if face is not present. I want to capture only if the face detected, I am using android face API for face detection.
My code:here i want to save image only if there are faces that is in else block i need to put my code. according to my code if i start capture session with in the else block i cant access readerListener so how to do this.
final ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
mBitmapToSave1 = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mBitmapToSave = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
Bitmap scaled = Bitmap.createScaledBitmap(mBitmapToSave, width, height, true);
int w = scaled.getWidth();
int h = scaled.getHeight();
// Setting post rotate to 90
Matrix mtx = new Matrix();
mtx.postRotate(-180);
// Rotating Bitmap
mBitmapToSave = Bitmap.createBitmap(scaled, 0, 0, w, h, mtx, true);
// mBitmapToSave = Bitmap.createBitmap(width+rowPadding/pixelStride,height, Bitmap.Config.RGB_565);
// mBitmapToSave.copyPixelsToBuffer(buffer);
if (detector.isOperational() && mBitmapToSave != null) {
Frame frame = new Frame.Builder().setBitmap(mBitmapToSave).build();
SparseArray<Face> faces = detector.detect(frame);
for (index = 0; index < faces.size(); ++index) {
Face face = faces.valueAt(index);
}if (faces.size() == 0) {
MediaPlayer mediaPlayer = MediaPlayer.create(getApplicationContext(), R.raw.not);
mediaPlayer.start();
//Toast.makeText(AndroidCamera2API.this, "Face Not detected Adjust Camera Properly", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(AndroidCamera2API.this, "Face Found " + "\n", Toast.LENGTH_SHORT).show();
}
}
}catch(FileNotFoundException e){
e.printStackTrace();
} catch(IOException e){
e.printStackTrace();
} finally{
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AndroidCamera2API.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
uploadMultipart();
createCameraPreview();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
mBitmapToSave = null;
} catch(CameraAccessException e){
e.printStackTrace();
}
}
Related
I was trying to use the Camera2 API of Android. The front camera is working fine, but when it comes to using the back/rear camera, this error occurs:
LegacyCameraDevice_nativeGetSurfaceId: Could not retrieve native Surface from surface.
This problem occurs after I click the button to take a picture. The capture callback is successful, but I get no image in onImageAvailable().
I followed the tutorial of https://web.archive.org/web/20161011160303/https://inducesmile.com/android/android-camera2-api-example-tutorial/ . I do not have any idea on how to proceed with the error that I am facing right now.
Here is the code used in capturing the image:
private void takePicture() {
if (mCameraDevice == null) {
return;
}
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
CameraManager mManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics mCharacteristics = mManager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (mCharacteristics != null) {
jpegSizes = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
for(int x = 0; x < jpegSizes.length; x++) {
Log.wtf("jpegSizes", String.valueOf(jpegSizes[x]));
}
if (jpegSizes != null && jpegSizes.length > 0) {
width = jpegSizes[4].getWidth();
height = jpegSizes[4].getHeight();
}
final ImageReader mReader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> mOutputSurface = new ArrayList<>(2);
mOutputSurface.add(mReader.getSurface());
mOutputSurface.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder mCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
mCaptureBuilder.addTarget(mReader.getSurface());
mCaptureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
int mRotation = getWindowManager().getDefaultDisplay().getRotation();
int jpegOrientation = (ORIENTATIONS.get(mRotation) + mSensorOrientation + 270) % 360;
if(cameraId.equals("0")) {
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(mRotation));
} else {
if(extras.getString("orient").equals("landscape")) {
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(mRotation));
} else {
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, jpegOrientation);
}
}
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if(mImage == null) {
Toast.makeText(StartCameraActivity.this, "Capturing Image Failed, Please Try Again", Toast.LENGTH_SHORT).show();
Log.wtf("onCaptureComplete", "Image not Available");
} else {
Log.wtf("onCaptureComplete", "Image Available");
}
//createCameraPreview();
}
#Override
public void onCaptureFailed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.wtf("FAILED", failure.toString());
}
};
mCameraDevice.createCaptureSession(mOutputSurface, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
Log.wtf("onConfigured", "succes");
try {
session.capture(mCaptureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.wtf("onConfigureFailed", "failed");
}
}, mBackgroundHandler);
mReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(final ImageReader reader) {
mImage = reader.acquireLatestImage();
Log.wtf("imageAvail", "OnImageAvailable");
StartCameraActivity.this.runOnUiThread(new Runnable() {
#Override
public void run() {
if (mImage == null) {
return;
}
final Image.Plane[] planes = mImage.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mTextureView.setVisibility(View.INVISIBLE);
if(cameraId.equals("0")) {
screenshotHolder.setImageBitmap(bitmap);
} else {
screenshotHolder.setImageBitmap(flip(bitmap, mImage.getWidth(), mImage.getHeight()));
}
new RenderPicture(StartCameraActivity.this).execute();
if (mImage != null) {
mImage.close();
}
if(mReader != null) {
mReader.close();
}
}
});
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
Your ImageReader is a local variable in takePicture, and doesn't look like it's stored anywhere in the parent class. It's likely being garbage collected immediately or soon after takePicture exits, so when the camera tries to set itself up, the Surface reports as being abandoned.
A Surface is like a weak reference and won't keep the ImageReader alive by itself. Store it in the parent class like you do with the camera device.
I am new to camera2 api. I want to build an image processing framework on my Android phone.
Step1: use the Camera2 API to open a camera preview stream
Step2: feed the preview frame data to OpenCV for processing
Step3: display the processed result live on the screen
Currently, I have finished Step1 using ImageReader and C++ OpenCV code. However, I don't know how to do step3.
How do I display the processed image on the screen? (I want to display the normal image, and overlay an icon if I detect predefined object)
Here are some key codes:
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
// Surface surface = new Surface(texture);
Surface mImageSurface = mImageReader.getSurface();
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// captureRequestBuilder.addTarget(surface);
captureRequestBuilder.addTarget(mImageSurface);
cameraDevice.createCaptureSession(Arrays.asList(mImageSurface), new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
if(null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.e(TAG, "onImageAvailable: " + count++);
Image img = null;
img = reader.acquireNextImage();
try {
if (img == null) throw new NullPointerException("cannot be null");
ByteBuffer buffer = img.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
int width = img.getWidth();
int height = img.getHeight();
// ****try to get captured img for display here (synchronized)
// ****try to process image for detecting the object here (asynchronized)
} catch (NullPointerException ex) {
ex.printStackTrace();
}finally {
Log.e(TAG, "in the finally! ------------");
if (img != null)
img.close();
}
}
};
I am trying get JPEG image from both camera parallel on Snapdragon 820 platform.
I not getting first camera Image callback. I only getting second camera JPEG callback.
Here is my code :
protected void takePictureBack() {
Log.d(TAG, "takePictureBack() called");
if (null == cameraDeviceBack) {
Log.e(TAG, "cameraDeviceBack is null");
return;
}
try {
final File file_back = new File(Environment.getExternalStorageDirectory() + "/pic_back.jpg");
final CaptureRequest.Builder captureBuilderBack = cameraDeviceBack.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
List<Surface> outputSurfaces = new ArrayList<Surface>(3);
outputSurfaces.add(new Surface(mTextureViewBack.getSurfaceTexture()));
ImageReader reader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 1);
outputSurfaces.add(reader.getSurface());
captureBuilderBack.addTarget(reader.getSurface());
ImageReader.OnImageAvailableListener readerListenerBack = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable() called with: reader = [" + reader + "]");
if (reader.getImageFormat() == ImageFormat.JPEG) {
Log.d(TAG, "onImageAvailable() called with back: reader = JPEG");
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file_back);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListenerBack, mBackgroundHandlerBack);
captureBuilderBack.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
final CameraCaptureSession.CaptureCallback captureListenerBack = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if (DEBUG) Log.d(TAG, "onCaptureCompleted: take picture back successfully");
//Toast.makeText(getActivity(), "Take picture successfully", Toast.LENGTH_SHORT).show();
createCameraPreviewBack();
mCaptureResultBack = result;
}
};
cameraDeviceBack.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilderBack.build(), captureListenerBack, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Front camera capture code is also same.
Individual single camera JPEG capture work fine.
Any idea, why I am not getting bot JPEG Images callback ?
I find solution.We need take "ImageReader reader" as global variable.
By this change I am able to get two JPEG from both camera.
I am developing a custome camera but when trying to capture an image using Camera 2 apithen i am getting black image.i am using below code for capture
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = "";
if (cameraFront) {
cameraId = "" + findFrontFacingCamera();
} else {
cameraId = "" + findBackFacingCamera();
}
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(
ImageFormat.JPEG);
}
int width = 720;
int height = 640;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest =
Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
ImageReader reader =
ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /* maxImages */1);
// ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
if (cameraFront) {
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation) + 180);
} else {
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
}
final File file = getOutputMediaFile();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener()
{
#Override
public void onImageAvailable(ImageReader reader)
{
Image image = null;
try {
image = reader.acquireLatestImage();
// ByteBuffer buffer = image.getPlanes()[0].getBuffer();
// final byte[] bytes = new byte[buffer.capacity()];
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
save(bytes);
buffer.clear();
runOnUiThread(new Runnable()
{
#Override
public void run()
{
mThumbnail.setVisibility(View.VISIBLE);
filePathLabel.setVisibility(View.VISIBLE);
filePathValue.setText(file.getAbsolutePath());
Bitmap bmp =
UtilityMethods.getScaledBitmap(CameraImageTestActivityLoliipop.this, bytes);
mThumbnail.setImageBitmap(bmp);
}
});
} catch (FileNotFoundException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
} catch (IOException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException
{
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback()
{
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result)
{
super.onCaptureCompleted(session, request, result);
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(CameraCaptureSession session)
{
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session)
{
}
}, backgroudHandler);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
and below are the methods for camera preview
protected void startPreview()
{
try {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "startPreview fail, return");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if (null == texture) {
Log.e(TAG, "texture is null, return");
return;
}
Log.e(TAG, "Width: " + mPreviewSize.getWidth() + " Hieght : " + mPreviewSize.getHeight());
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(CameraCaptureSession session)
{
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session)
{
}
}, null);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
}
protected void updatePreview()
{
try {
if (null == mCameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
}
its working in all devices but when i am runnig on Micromax Q382 device then getting black image with below warning in logcat
I/Choreographer: Skipped 37 frames! The application may be doing too much work on its main thread.
W/ImageReader_JNI: Unable to acquire a lockedBuffer, very likely client tries to lock more than maxImages buffers
I am not getting any idea that what happening.Please Help me.
You're passing a null argument to createCaptureSession inside startPreview. Make backgroudHandler previously used visible to that method (pass as parameter or initialize it as a class variable) and use there as well.
You could try setting some delay before starting the capture (and after opening the camera). Something like:
new Handler().postDelayed(() -> {
//takePicture();
}, 500);
If you want, I've created a service that massively facilitates photos capturing with Android Camera 2 API: https://github.com/hzitoun/android-camera2-secret-picture-taker . Usage is described in the readme file.
Hope that helped!
You're setting the AE mode to 'always flash', but then you don't run the precapture sequence to allow the camera device to meter for that flash; this will likely not work very well on any device, and on some devices you may end up with some default exposure value (bad).
If you want to fire the flash, you need to use a precapture sequence first (send a single request with AE_PRECAPTURE_TRIGGER set, wait for the PRECAPTURE AE_STATE to end, then issue the capture request), on non-LEGACY devices. If the device is LEGACY-level, then your current code should be OK for those.
My application got crashed on devices with no SD cards in it, but is working fine with devices which are having SD card in it.When i debugged it, i found that on
mCamera.takePicture(null, null, jpegCallBack);
Method app is getting crashed with above error.I goggled a lot but didn't found any solution , i saw this link :-
http://forums.androidcentral.com/motorola-droid-x/102987-camera-won-t-take-pictures-without-sd-card.html
So is it possible to capture images in background service in device with no SD card in it.
Please provide me some clues
Here are some methods of my hiddenCamera class
#SuppressWarnings("deprecation")
private void startCapturingCall() {
final Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
if (mCamera != null) {
parameters = mCamera.getParameters();
if (FLASH_MODE == null || FLASH_MODE.isEmpty()) {
FLASH_MODE = "auto";
}
parameters.setFlashMode(FLASH_MODE);
pictureSize = getBiggesttPictureSize(parameters);
if (pictureSize != null)
parameters
.setPictureSize(pictureSize.width, pictureSize.height);
// set camera parameters
mCamera.setParameters(parameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#SuppressWarnings("deprecation")
#Override
public void run() {
if (isSDPresent) {
mCamera.takePicture(null, null, jpegCallBack);
} else {
Toast.makeText(getApplicationContext(),
"Please Insert SD card", 1000).show();
}
}
}, 2000);
}
}
#SuppressWarnings("deprecation")
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
IMAGE_DIRECTORY_NAME);
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
}
}
try {
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// set file out stream
FileOutputStream out = new FileOutputStream(mediaFile);
// set compress format quality and stream
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mediaStorageDir.exists()) {
getPathOfCapturedImage();
}
HiddenCamera.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
And also isSDPresent always returns me true value .
Please provide me your suggestions on this. I am really stuck at this point from last 2-3 days.
This is the issue of Device too as in Samsung Grand , my code is working fine even its not having SD card in it.But in Moto E its my application getting crashed.Camera settings plays an important role in it.
Thanks
Finally i am done with this, though i got busy in some other task but today i get time to post my Answer on this topic, As this topis is very general , so i am posting this answer inorder to help others who might have thought of this functionality , So i done this thing by using SurfaceTexture but it will only work for versions greater thet 4 and for versions less than 4 you need to use surfaceView.
So here is my code :-
public class SurfaceTextureActivity extends Activity implements
SurfaceTextureListener {
private Parameters mParameters;
private Camera.Size mPictureSize;
private static final String sIMAGE_DIRECTORY_NAME = "HiddenCapturedPics";
private byte[] mByteArray;
private Camera mCamera;
private TextureView mTextureView;
private File mMediaFile, mMediaStorageDir = null;
private String mEncodedImage, mImageName, mFinalResponse,
mFlashMode;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTextureView = new TextureView(this);
setContentView(mTextureView);
if (checkCameraHardware(getApplicationContext())) {
mTextureView.setSurfaceTextureListener(this);
Bundle extras = getIntent().getExtras();
mFlashMode = extras.getString("FLASH");
} else {
Toast.makeText(getApplicationContext(),
"Your Device dosen't have a Camera !", Toast.LENGTH_LONG)
.show();
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
int height) {
mCamera = Camera.open();
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(0, 0,
Gravity.CENTER));
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
}
mCamera.startPreview();
startCapturingCall();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
int height) {
// Ignored, the Camera does all the work for us
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
mCamera.stopPreview();
mCamera.release();
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Toast.makeText(getApplicationContext(), "Dfg", Toast.LENGTH_SHORT)
.show();
// Update your view here!
}
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mMediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
sIMAGE_DIRECTORY_NAME);
mMediaFile = new File(mMediaStorageDir.getPath()
+ File.separator + "IMG_" + timeStamp + ".jpg");
if (!mMediaStorageDir.exists()) {
if (!mMediaStorageDir.mkdirs()) {
}
}
try {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 8;
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length, options);
FileOutputStream out = new FileOutputStream(mMediaFile);
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mMediaStorageDir.exists()) {
getPathOfCapturedImage();
}
SurfaceTextureActivity.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
private void startCapturingCall() {
if (mCamera != null) {
mParameters = mCamera.getParameters();
if (mFlashMode == null || mFlashMode.isEmpty()) {
mFlashMode = "auto";
}
mParameters.setFlashMode(mFlashMode);
mPictureSize = getBiggesttPictureSize(mParameters);
if (mPictureSize != null)
mParameters.setPictureSize(mPictureSize.width,
mPictureSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
if (mCamera != null) {
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
}
}
}, 2000);
}
}
private Camera.Size getBiggesttPictureSize(Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
return (result);
}
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
Hope this will help others .......
Here are the links for reference :-
Example of Camera preview using SurfaceTexture in Android
Camera.takePicture throws RunTimeException
Cheers!!!!!