I am very new to OpenCV, trying it for the first time. I ran the Tutorial3CameraControl, camera preview was fine portrait and full screen, when I integrate the same code in my project, I somehow get the camera in landscape mode and its not full screen. I get it this question has been asked quite a few times, but none of the solutions were helpful. I want my application to open up org.opencv.JavaCameraView in full screen portrait mode. Could someone please help me?
Below is the activity:
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
// OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
List<String> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator<Size> resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
}
else if (item.getGroupId() == 2)
{
int id = item.getItemId();
Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
//
// #SuppressLint("SimpleDateFormat")
// #Override
// public boolean onTouch(View v, MotionEvent event) {
// Log.i(TAG,"onTouch event");
// SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
// String currentDateandTime = sdf.format(new Date());
// String fileName = Environment.getExternalStorageDirectory().getPath() +
// "/sample_picture_" + currentDateandTime + ".jpg";
// mOpenCvCameraView.takePicture(fileName);
// Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
// return false;
// }
#Override
public void onClick(View v) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
}
}
This is the custom view that extends org.opencv.JavaCameraView.
public class Tutorial3View extends JavaCameraView implements PictureCallback {
private static final String TAG = "Sample::Tutorial3View";
private String mPictureFileName;
public Tutorial3View(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
Just check the AndroidManifest.xml in the sample, the orientation of the activity is android:screenOrientation="landscape"
Related
I want to use a service to take photo secretly. Here is the code:
** Takes a single photo on service start. */
public class PhotoTakingService extends Service {
static String TAG = "Khan";
private boolean isRunning = false;
#Override
public void onCreate() {
Log.d("shahjahan", "i am in photo taking service.");
super.onCreate();
Log.d(TAG, "the name of the for ground process is: " + getForgroundProcessName());
takePhoto(this);
}
#SuppressWarnings("deprecation")
private static void takePhoto(final Context context) {
final SurfaceView preview = new SurfaceView(context);
SurfaceHolder holder = preview.getHolder();
// deprecated setting, but required on Android versions prior to 3.0
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
holder.addCallback(new SurfaceHolder.Callback() {
#Override
//The preview must happen at or after this point or takePicture fails
public void surfaceCreated(SurfaceHolder holder) {
showMessage("Surface created");
Camera camera = null;
try {
camera = Camera.open();
showMessage("Opened camera");
try {
camera.setPreviewDisplay(holder);
} catch (IOException e) {
throw new RuntimeException(e);
}
camera.startPreview();
showMessage("Started preview");
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
showMessage("Took picture");
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
camera.release();
}
});
} catch (Exception e) {
if (camera != null)
camera.release();
throw new RuntimeException(e);
}
}
#Override public void surfaceDestroyed(SurfaceHolder holder) {}
#Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
});
WindowManager wm = (WindowManager)context
.getSystemService(Context.WINDOW_SERVICE);
WindowManager.LayoutParams params = new WindowManager.LayoutParams(
1, 1, //Must be at least 1x1
WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
0,
//Don't know if this is a safe default
PixelFormat.UNKNOWN);
//Don't set the preview visibility to GONE or INVISIBLE
wm.addView(preview, params);
}
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type){
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
} else if(type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
private static void showMessage(String message) {
Log.i("Camera", message);
}
#Override public IBinder onBind(Intent intent) { return null; }
public String getForgroundProcessName()
{
ActivityManager activityManager = (ActivityManager) this.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningAppProcessInfo> appProcesses = activityManager.getRunningAppProcesses();
for (ActivityManager.RunningAppProcessInfo appProcess : appProcesses) {
if (appProcess.importance == ActivityManager.RunningAppProcessInfo.IMPORTANCE_FOREGROUND) {
// Log.i("Foreground App", appProcess.processName);
// Toast.makeText(this, "the forground application is: " + appProcess.processName, Toast.LENGTH_LONG).show();
return appProcess.processName;
}
}
return "None";
}
}
But, I want to run takePhoto(this) after a regular interval in background. I tried handler, but it's not working fine:
Handler uiHandler = new Handler(Looper.getMainLooper());
Runnable runnable = new Runnable() {
#Override
public void run() {
// TODO Auto-generated method stub
int i = 0;
while(true)
{
Log.d(TAG,"I am in while.");
i++;
takePhoto(getApplicationContext());
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
e.printStackTrace();
}
if ( i > 5 )
break;
}
}
};
uiHandler.post(runnable);
Using above code freezes my app. Please guide me to implement above code correctly.
I am implementing android-gpuimage library in my code.
https://github.com/CyberAgent/android-gpuimage
I used the camera code of the sample project. Everyting works fine, but whenever I switch the camera, front camera opens in the side in a small view. I want the front camera to occupy the whole Surfaceview.
Here is my code:
public class CameraActivity extends Activity implements OnSeekBarChangeListener, OnClickListener {
private GPUImage mGPUImage;
private CameraHelper mCameraHelper;
private CameraLoader mCamera;
private GPUImageFilter mFilter;
private int cameraId;
private FilterAdjuster mFilterAdjuster;
static int camerastate=0;
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
((SeekBar) findViewById(R.id.seekBar)).setOnSeekBarChangeListener(this);
findViewById(R.id.filter).setOnClickListener(this);
findViewById(R.id.flipCamera).setOnClickListener(this);
findViewById(R.id.captureImage).setOnClickListener(this);
cameraId = CameraInfo.CAMERA_FACING_BACK;
mGPUImage = new GPUImage(this);
mGPUImage.setImage(mImage);
mGPUImage.setFilter(mGPUImageFilter);
mGPUImage.getBitmapWithFilterApplied();*/
mGPUImage.setGLSurfaceView((GLSurfaceView) findViewById(R.id.surfaceView));
mCameraHelper = new CameraHelper(this);
mCamera = new CameraLoader();
}
#Override
protected void onResume() {
super.onResume();
mCamera.onResume();
}
#Override
protected void onPause() {
mCamera.onPause();
super.onPause();
}
#Override
public void onClick(final View v) {
switch (v.getId()) {
case R.id.filter:
GPUImageFilterTools.showDialog(this, new OnGpuImageFilterChosenListener() {
#Override
public void onGpuImageFilterChosenListener(final GPUImageFilter filter) {
switchFilterTo(filter);
}
});
break;
case R.id.captureImage:
if (mCamera.mCameraInstance.getParameters().getFocusMode().equals(
Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
takePicture();
} else {
mCamera.mCameraInstance.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(final boolean success, final Camera camera) {
takePicture();
}
});
}
break;
case R.id.flipCamera:
mCamera.switchCamera();
break;
}
}
private void takePicture() {
// TODO get a size that is about the size of the screen
Camera.Parameters params = mCamera.mCameraInstance.getParameters();
params.setRotation(90);
mCamera.mCameraInstance.setParameters(params);
for (Camera.Size size : params.getSupportedPictureSizes()) {
Log.i("ASDF", "Supported: " + size.width + "x" + size.height);
}
mCamera.mCameraInstance.takePicture(null, null,
new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, final Camera camera) {
final File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d("ASDF",
"Error creating media file, check storage permissions");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d("ASDF", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("ASDF", "Error accessing file: " + e.getMessage());
}
data = null;
Bitmap bitmap = BitmapFactory.decodeFile(pictureFile.getAbsolutePath());
mGPUImage1.setImage(pictureFile);
mGPUImage1.setFilter(new GPUImageSepiaFilter());
final GLSurfaceView view = (GLSurfaceView) findViewById(R.id.surfaceView);
view.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
mGPUImage1.saveToPictures(bitmap1, "GPUImage",
System.currentTimeMillis() + ".jpg",
new OnPictureSavedListener() {
#Override
public void onPictureSaved(final Uri
uri) {
pictureFile.delete();
camera.startPreview();
view.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
}
});
Log.e("Activity", "GPUIMAGE " + mGPUImage.toString());
}
});
}
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
private static File getOutputMediaFile(final int type) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
private void switchFilterTo(final GPUImageFilter filter) {
if (mFilter == null
|| (filter != null && !mFilter.getClass().equals(filter.getClass()))) {
mFilter = filter;
mGPUImage.setFilter(mFilter);
mFilterAdjuster = new FilterAdjuster(mFilter);
}
}
#Override
public void onProgressChanged(final SeekBar seekBar, final int progress,
final boolean fromUser) {
if (mFilterAdjuster != null) {
mFilterAdjuster.adjust(progress);
}
}
#Override
public void onStartTrackingTouch(final SeekBar seekBar) {
}
#Override
public void onStopTrackingTouch(final SeekBar seekBar) {
}
private class CameraLoader {
int mCurrentCameraId = 0;
Camera mCameraInstance;
public void onResume() {
setUpCamera(mCurrentCameraId);
}
public void onPause() {
releaseCamera();
}
public void switchCamera() {
releaseCamera();
mCurrentCameraId = (mCurrentCameraId + 1) % mCameraHelper.getNumberOfCameras();
setUpCamera(mCurrentCameraId);
}
private void setUpCamera(int id) {
Log.e("Activity", "ID1 " + id);
mCameraInstance = getCameraInstance(id);
Camera.Parameters parameters = mCameraInstance.getParameters();
// TODO adjust by getting supportedPreviewSizes and then choosing
// the best one for screen size (best fill screen)
if (parameters.getSupportedFocusModes().contains(
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
parameters.setPictureSize(640,480);
mCameraInstance.setParameters(parameters);
int orientation = mCameraHelper.getCameraDisplayOrientation(
CameraActivity.this, mCurrentCameraId);
CameraInfo2 cameraInfo = new CameraInfo2();
mCameraHelper.getCameraInfo(mCurrentCameraId, cameraInfo);
boolean flipHorizontal = cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT;
mGPUImage.setUpCamera(mCameraInstance, orientation, flipHorizontal, false);
}
/** A safe way to get an instance of the Camera object. */
private Camera getCameraInstance(int id) {
Camera c = null;
Log.e("Activity","Camera Instance " + id);
try {
c = mCameraHelper.openCamera(id);
} catch (Exception e) {
e.printStackTrace();
}
return c;
}
private void releaseCamera() {
mCameraInstance.setPreviewCallback(null);
mCameraInstance.stopPreview();
mCameraInstance.release();
mCameraInstance= null;
}
}
}
EDIT:
SCreenshot of my camera: when i press the switch camera button, the back view stops and my front view starts in the corner.
i tested it in other device. The switchcamera feature is working perfectly.but, it is not working correctly in some devices.
This will surely fix your issue :
**How to switch**
public void switchCamera() {
releaseCamera();
mCameraId = (mCameraId + 1) % Camera.getNumberOfCameras();
**mGpuImageView.reInitLayout();**//the trick
prepareCamera();
}
**releaseCamera**
private void releaseCamera() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
**prepareCamera**
your camera prepare method
**mGpuImageView.reInitLayout**
//Put this inside your GPUImageView.java class
public void reInitLayout() {
mGLSurfaceView.requestLayout();
mGPUImage.deleteImage();
}
Let me know if you face any further issue.
android:layout_width="wrap_content"
android:layout_height="wrap_content"
Set this for fill_parent and check.
Could you check this and paste screen
<ImageView
android:id="#+id/img_switch_camera"
android:layout_width="300dp"
android:layout_height="300dp"
android:layout_gravity="center"
android:layout_weight="0.08"
android:src="#drawable/ic_switch_camera" />
It supose to change size for 300x300
My application got crashed on devices with no SD cards in it, but is working fine with devices which are having SD card in it.When i debugged it, i found that on
mCamera.takePicture(null, null, jpegCallBack);
Method app is getting crashed with above error.I goggled a lot but didn't found any solution , i saw this link :-
http://forums.androidcentral.com/motorola-droid-x/102987-camera-won-t-take-pictures-without-sd-card.html
So is it possible to capture images in background service in device with no SD card in it.
Please provide me some clues
Here are some methods of my hiddenCamera class
#SuppressWarnings("deprecation")
private void startCapturingCall() {
final Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
if (mCamera != null) {
parameters = mCamera.getParameters();
if (FLASH_MODE == null || FLASH_MODE.isEmpty()) {
FLASH_MODE = "auto";
}
parameters.setFlashMode(FLASH_MODE);
pictureSize = getBiggesttPictureSize(parameters);
if (pictureSize != null)
parameters
.setPictureSize(pictureSize.width, pictureSize.height);
// set camera parameters
mCamera.setParameters(parameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#SuppressWarnings("deprecation")
#Override
public void run() {
if (isSDPresent) {
mCamera.takePicture(null, null, jpegCallBack);
} else {
Toast.makeText(getApplicationContext(),
"Please Insert SD card", 1000).show();
}
}
}, 2000);
}
}
#SuppressWarnings("deprecation")
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
IMAGE_DIRECTORY_NAME);
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
}
}
try {
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// set file out stream
FileOutputStream out = new FileOutputStream(mediaFile);
// set compress format quality and stream
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mediaStorageDir.exists()) {
getPathOfCapturedImage();
}
HiddenCamera.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
And also isSDPresent always returns me true value .
Please provide me your suggestions on this. I am really stuck at this point from last 2-3 days.
This is the issue of Device too as in Samsung Grand , my code is working fine even its not having SD card in it.But in Moto E its my application getting crashed.Camera settings plays an important role in it.
Thanks
Finally i am done with this, though i got busy in some other task but today i get time to post my Answer on this topic, As this topis is very general , so i am posting this answer inorder to help others who might have thought of this functionality , So i done this thing by using SurfaceTexture but it will only work for versions greater thet 4 and for versions less than 4 you need to use surfaceView.
So here is my code :-
public class SurfaceTextureActivity extends Activity implements
SurfaceTextureListener {
private Parameters mParameters;
private Camera.Size mPictureSize;
private static final String sIMAGE_DIRECTORY_NAME = "HiddenCapturedPics";
private byte[] mByteArray;
private Camera mCamera;
private TextureView mTextureView;
private File mMediaFile, mMediaStorageDir = null;
private String mEncodedImage, mImageName, mFinalResponse,
mFlashMode;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTextureView = new TextureView(this);
setContentView(mTextureView);
if (checkCameraHardware(getApplicationContext())) {
mTextureView.setSurfaceTextureListener(this);
Bundle extras = getIntent().getExtras();
mFlashMode = extras.getString("FLASH");
} else {
Toast.makeText(getApplicationContext(),
"Your Device dosen't have a Camera !", Toast.LENGTH_LONG)
.show();
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
int height) {
mCamera = Camera.open();
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(0, 0,
Gravity.CENTER));
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
}
mCamera.startPreview();
startCapturingCall();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
int height) {
// Ignored, the Camera does all the work for us
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
mCamera.stopPreview();
mCamera.release();
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Toast.makeText(getApplicationContext(), "Dfg", Toast.LENGTH_SHORT)
.show();
// Update your view here!
}
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mMediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
sIMAGE_DIRECTORY_NAME);
mMediaFile = new File(mMediaStorageDir.getPath()
+ File.separator + "IMG_" + timeStamp + ".jpg");
if (!mMediaStorageDir.exists()) {
if (!mMediaStorageDir.mkdirs()) {
}
}
try {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 8;
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length, options);
FileOutputStream out = new FileOutputStream(mMediaFile);
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mMediaStorageDir.exists()) {
getPathOfCapturedImage();
}
SurfaceTextureActivity.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
private void startCapturingCall() {
if (mCamera != null) {
mParameters = mCamera.getParameters();
if (mFlashMode == null || mFlashMode.isEmpty()) {
mFlashMode = "auto";
}
mParameters.setFlashMode(mFlashMode);
mPictureSize = getBiggesttPictureSize(mParameters);
if (mPictureSize != null)
mParameters.setPictureSize(mPictureSize.width,
mPictureSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
if (mCamera != null) {
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
}
}
}, 2000);
}
}
private Camera.Size getBiggesttPictureSize(Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
return (result);
}
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
Hope this will help others .......
Here are the links for reference :-
Example of Camera preview using SurfaceTexture in Android
Camera.takePicture throws RunTimeException
Cheers!!!!!
Hi aim working n android application to detect page number using camera preview.
After i received frame from OnPreviewFrame i'm doing real time image processing using opencv to find page number position .Now my question is How can i draw rectangle on surface View ?
Use this code
PreviewDemo.java
public class PreviewDemo extends Activity implements OnClickListener {
private SurfaceView preview = null;
private SurfaceHolder previewHolder = null;
private Camera camera = null;
private boolean inPreview = false;
ImageView image;
Bitmap bmp, itembmp;
static Bitmap mutableBitmap;
PointF start = new PointF();
PointF mid = new PointF();
float oldDist = 1f;
File imageFileName = null;
File imageFileFolder = null;
private MediaScannerConnection msConn;
Display d;
int screenhgt, screenwdh;
ProgressDialog dialog;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.preview);
image = (ImageView) findViewById(R.id.image);
preview = (SurfaceView) findViewById(R.id.surface);
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
previewHolder.setFixedSize(getWindow().getWindowManager()
.getDefaultDisplay().getWidth(), getWindow().getWindowManager()
.getDefaultDisplay().getHeight());
}
#Override
public void onResume() {
super.onResume();
camera = Camera.open();
}
#Override
public void onPause() {
if (inPreview) {
camera.stopPreview();
}
camera.release();
camera = null;
inPreview = false;
super.onPause();
}
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size: parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return (result);
}
SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
try {
camera.setPreviewDisplay(previewHolder);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast.makeText(PreviewDemo.this, t.getMessage(), Toast.LENGTH_LONG)
.show();
}
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width,
int height) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height,
parameters);
if (size != null) {
parameters.setPreviewSize(size.width, size.height);
camera.setParameters(parameters);
camera.startPreview();
inPreview = true;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// no-op
}
};
Camera.PictureCallback photoCallback = new Camera.PictureCallback() {
public void onPictureTaken(final byte[] data, final Camera camera) {
dialog = ProgressDialog.show(PreviewDemo.this, "", "Saving Photo");
new Thread() {
public void run() {
try {
Thread.sleep(1000);
} catch (Exception ex) {}
onPictureTake(data, camera);
}
}.start();
}
};
public void onPictureTake(byte[] data, Camera camera) {
bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
mutableBitmap = bmp.copy(Bitmap.Config.ARGB_8888, true);
savePhoto(mutableBitmap);
dialog.dismiss();
}
class SavePhotoTask extends AsyncTask < byte[], String, String > {#Override
protected String doInBackground(byte[]...jpeg) {
File photo = new File(Environment.getExternalStorageDirectory(), "photo.jpg");
if (photo.exists()) {
photo.delete();
}
try {
FileOutputStream fos = new FileOutputStream(photo.getPath());
fos.write(jpeg[0]);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
return (null);
}
}
public void savePhoto(Bitmap bmp) {
imageFileFolder = new File(Environment.getExternalStorageDirectory(), "Rotate");
imageFileFolder.mkdir();
FileOutputStream out = null;
Calendar c = Calendar.getInstance();
String date = fromInt(c.get(Calendar.MONTH)) + fromInt(c.get(Calendar.DAY_OF_MONTH)) + fromInt(c.get(Calendar.YEAR)) + fromInt(c.get(Calendar.HOUR_OF_DAY)) + fromInt(c.get(Calendar.MINUTE)) + fromInt(c.get(Calendar.SECOND));
imageFileName = new File(imageFileFolder, date.toString() + ".jpg");
try {
out = new FileOutputStream(imageFileName);
bmp.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
scanPhoto(imageFileName.toString());
out = null;
} catch (Exception e) {
e.printStackTrace();
}
}
public String fromInt(int val) {
return String.valueOf(val);
}
public void scanPhoto(final String imageFileName) {
msConn = new MediaScannerConnection(PreviewDemo.this, new MediaScannerConnectionClient() {
public void onMediaScannerConnected() {
msConn.scanFile(imageFileName, null);
Log.i("msClient obj in Photo Utility", "connection established");
}
public void onScanCompleted(String path, Uri uri) {
msConn.disconnect();
Log.i("msClient obj in Photo Utility", "scan completed");
}
});
msConn.connect();
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU && event.getRepeatCount() == 0) {
onBack();
}
return super.onKeyDown(keyCode, event);
}
public void onBack() {
Log.e("onBack :", "yes");
camera.takePicture(null, null, photoCallback);
inPreview = false;
}
#Override
public void onClick(View v) {
}
}
Preview.xml
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<android.view.SurfaceView
android:id="#+id/surface"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
</RelativeLayout>
Add the Permissions also
Following the documentation from developper.android.com
I wrote a short Activity to take picture from within the app.
I sometimes met this error :
02-23 17:37:06.323: E/IMemory(5003): binder=0x3c010388 transaction failed fd=-2147483647, size=0, err=-2147483646 (Unknown error: 2147483646)
02-23 17:37:06.328: E/IMemory(5003): cannot dup fd=-2147483647, size=0, err=-2147483646 (Bad file number)
02-23 17:37:06.328: E/IMemory(5003): cannot map BpMemoryHeap (binder=0x3c010388), size=0, fd=-1 (Bad file number)
can you help me understand it ?
Does setting the picture size to the smallest value retrieved from the getPictureSizes() method requieres less memory at the capture time or is it applied later ?
For information, here is my activitity (just the concatenation of the code example from this page ). Before that code, I used a very simplistic code with a preview and takePicture.
public class CatchImage extends Activity {
private boolean fgDebugLocal = true;
private String tagLocal = "CatchImage ";
private Button btTake, btRetour;
private Intent intent;
private String refPhoto;
private String strPath = "";
private String strFileName = "";
private Context context;
private CameraPreview mPreview;
private FrameLayout preview;
private Camera cameraCatchImage;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
context = getApplicationContext();
/**
* Set full screen
* Used in Landscape
*/
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
Baseline.tepvLogger.stat(tagLocal, "init ");
setContentView(R.layout.camera);
/**
* get the id of picture
*/
intent = getIntent();
refPhoto = intent.getStringExtra("refPhoto");
strPath = intent.getStringExtra("strSdInternalPath");
StringBuilder sb = new StringBuilder();
sb.append(strPath);
sb.append(File.separatorChar);
sb.append(Params.PATH_REP_PHOTO);
sb.append(File.separatorChar);
sb.append(Params.PHOTO_FILE_NAME);
sb.append(refPhoto);
sb.append(Params.PHOTO_FILE_EXT_JPG);
strFileName = sb.toString();
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "strFileName = " + strFileName);};
btTake = (Button) findViewById(R.id.btStatCatchPictureTake);
btRetour = (Button) findViewById(R.id.btStatCatchPictureRetour);
btTake.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "btTake");};
mPreview.mCamera.takePicture(shutterCallback, rawCallback, jpegCallback);
}
});
btRetour.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "btRetour ");};
Intent intent = new Intent(CatchImage.this, StationnementPhoto.class);
startActivity(intent);
finish();
}
});
}
#Override
public void onResume() {
super.onResume();
if (!checkCameraHardware(context)) {
afficheAlertDlg(getResources().getString(R.string.dlg_titre_alert), getResources().getString(R.string.catch_image_no_camera), getResources().getDrawable(R.drawable.ic_alert));
} else {
if (Camera.getNumberOfCameras()>1) {
new TePVException(this.getClass().getName(), "checkCameraHardware", "More than one camera detected");
}
if (safeCameraOpen(0)) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "safeCameraOpen(0) true : creating preview");};
btTake.setEnabled(true);
btTake.setBackgroundResource(R.drawable.bt_border_selector);
mPreview = new CameraPreview(context, cameraCatchImage);
preview = (FrameLayout) findViewById(R.id.flStatCatchPreview);
preview.addView(mPreview);
} else {
btTake.setEnabled(false);
btTake.setBackgroundResource(R.drawable.bt_border_disable);
}
}
}
#Override
public void onPause() {
super.onPause();
releaseCameraAndPreview(); // release the camera immediately on pause event
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
releaseCameraAndPreview();
cameraCatchImage = Camera.open(id);
qOpened = (cameraCatchImage != null);
} catch (Exception e) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "failed to open Camera");};
e.printStackTrace();
}
return qOpened;
}
private void releaseCameraAndPreview() {
if (mPreview!=null) mPreview.setCamera(null);
if (cameraCatchImage != null) {
cameraCatchImage.release();
cameraCatchImage = null;
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "shutterCallback ");};
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "rawCallback ");};
}
};
private PictureCallback jpegCallback = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "jpegCallback ");};
FileOutputStream outStream = null;
try {
//String strFileNameComplet = strPath + File.separatorChar + strFileName + refPhoto + Params.PHOTO_FILE_EXT_JPG;
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "strFileName = " + strFileName);};
outStream = new FileOutputStream(strFileName);
outStream.write(data);
outStream.close();
File fileVerif = new File(strFileName);
if (fileVerif.exists()){
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "Photo stockée ");};
Intent intent = new Intent(CatchImage.this, StationnementPhoto.class);
startActivity(intent);
finish();
}else {
new TePVException("CatchImage", "PictureCallback", "Pb stockage image = " + strFileName);
Toast.makeText(context, "Erreur enregistrement de la photo, essayez à nouveau", Toast.LENGTH_LONG).show();
}
} catch (FileNotFoundException e) {
new TePVException("CatchImage", "PictureCallback", "FileNotFoundException = " + e.getMessage());
} catch (IOException e) {
new TePVException("CatchImage", "PictureCallback", "IOException = " + e.getMessage());
} finally {
}
}
};
/** A basic Camera preview class */
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera object) {
mCamera = object;
}
public Camera getCamera() {
return mCamera;
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
if (Params.tagFgDebug && fgDebugLocal){Log.d(tagLocal, "Error setting camera preview: " + e.getMessage());}
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
if (Params.tagFgDebug && fgDebugLocal){Log.d(tagLocal, "Error starting camera preview: " + e.getMessage());}
}
}
}
Edit : I saw this question but I already saved the image as a file.
I found a solution :
We stop using Camera camera.takePicture() and only use the preview object. Idea based on #CommonsWare code for the photo display and base on zebra crossing astute solution.
idea :
btTake.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(Params.tagGen, tagLocal + "btTake");};
if (inPreview) {
camera.setOneShotPreviewCallback(previewCallback);
inPreview=false;
}
}
});
The preview code :
final class PreviewCallback implements Camera.PreviewCallback {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i( tagLocal , "onPreviewFrame ");
File photo= new File(strFileName);
if (photo.exists()) {
photo.delete();
}
try {
Camera.Parameters parameters = camera.getParameters();
Size size = parameters.getPreviewSize();
YuvImage image = new YuvImage(data, parameters.getPreviewFormat(),
size.width, size.height, null);
FileOutputStream filecon = new FileOutputStream(photo);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, filecon);
} catch (FileNotFoundException e) {
Toast.makeText(getBaseContext(), e.getMessage(), Toast.LENGTH_SHORT).show();
}
/**
* Restart :
*/
File fileVerif = new File(strFileName);
if (fileVerif.exists() && fileVerif.length()!=0) {
if (Params.tagFgDebug && fgDebugLocal){Log.i(tagLocal , "Photo stockée file lenght = "+ fileVerif.length());};
Intent intent = new Intent(CatchImageCommonsWare.this, Photo.class);
startActivity(intent);
finish();
} else {
new Exception("CatchImageCommonsWare", "PreviewCallback", "Pb stockage image = " + strFileName);
Toast.makeText(context, "Erreur enregistrement de la photo, essayez à nouveau", Toast.LENGTH_LONG).show();
}
}
}