I am working in QR Codes project. I have used ZXING library for generating QR codes. I want to scan QR Code in my app. But for that i am using my own custom camera. In my camera i have captured image and created the bitmap of captured image. Is it possible to use that bitmap for scanning QR Codes by calling the decode functions of ZXING library and passing that bitmap or byte[] in it? I will be thankful to you for helping me. Here is my implementation,
public void onCreate(Bundle savedInstanceState) {
// TODO OnCreate Method
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_layout);
cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
activity = this;
filepath = Environment.getExternalStorageDirectory();
if (checkCameraHardware(this)) {
// Create an instance of Camera
mCamera = getCameraInstance();
setCameraDisplayOrientation(this, cameraId, mCamera);
try {
// Get Camera Parameters
Camera.Parameters params = mCamera.getParameters();
// Set the Focus Mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params);
Toast.makeText(getApplicationContext(), "Camera Available",
Toast.LENGTH_LONG).show();
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview);
} catch (Exception e) {
Toast.makeText(getApplicationContext(),
"Error: " + e.getMessage(), Toast.LENGTH_LONG).show();
}
} else {
Toast.makeText(getApplicationContext(), "Camera Not Available",
Toast.LENGTH_LONG).show();
}
Button captureButton = (Button) findViewById(R.id.button_capture);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// get an image from the camera
mCamera.takePicture(null, null, mPicture);
}
});
}
#Override
protected void onPause() {
// TODO OnPause Method
super.onPause();
releaseCamera();
}
// TODO Detecting Camera Hardware
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
// This device has camera
return true;
} else {
// No Camera on this Device
return false;
}
}
// TODO Accessing Camera
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open();
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private PictureCallback mPicture = new PictureCallback() {
#SuppressLint("InlinedApi")
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// TODO Takes the picture and write to file
File pictureFile = getOutputMediaFile(FileColumns.MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d("PICFILE",
"Error creating media file, check storage permissions");
return;
}
try {
Bitmap bmp = BitmapFactory
.decodeByteArray(data, 0, data.length);
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
updateGallery();
// Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0,
// data.length);
Intent i = new Intent(getApplicationContext(),
TestActivity.class);
i.putExtra("Image", data);
startActivity(i);
} catch (Exception e) {
Log.d("IOEXCEPTION", "Error accessing file: " + e.getMessage());
}
}
};
Yes you can use decode method .Check the below code for implementation.
String detectBarCode(Bitmap bitmap) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
int[] intArray = new int[bitmap.getWidth() * bitmap.getHeight()];
bitmap.getPixels(intArray, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
LuminanceSource source = new RGBLuminanceSource(bitmap.getWidth(), bitmap.getHeight(), intArray);
Reader reader = new QRCodeReader();
try {
Result result = reader.decode(new BinaryBitmap(new HybridBinarizer(source)));
return result.getText();
} catch (NotFoundException e) {
e.printStackTrace();
return null;
} catch (ChecksumException e) {
e.printStackTrace();
return null;
} catch (FormatException e) {
e.printStackTrace();
return null;
}
}
Related
I am using GPUImage android library to apply filters on camera preview and save the image with filters applied after take a picture. The problem is that when I took the pictute, I can't get the image with the filters.
I am using the following code:
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
mGPUImage.setImage(bitmap);
bitmap = mGPUImage.getBitmapWithFilterApplied();
saveImage(bitmap);
}
The sample code in GPUImage's library page (https://github.com/CyberAgent/android-gpuimage/#sample-code) says:
With preview:
#Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity);
Uri imageUri = ...;
mGPUImage = new GPUImage(this);
mGPUImage.setGLSurfaceView((GLSurfaceView) findViewById(R.id.surfaceView));
mGPUImage.setImage(imageUri); // this loads image on the current thread, should be run in a thread ?? (can't understand this line)
mGPUImage.setFilter(new GPUImageSepiaFilter());
// Later when image should be saved saved:
mGPUImage.saveToPictures("GPUImage", "ImageWithFilter.jpg", null);
}
Even in their sample I can't save the image with filter.
Please somebody could explain it to me?
Use this code
///Call this method when you are ready to save image///
//I am calling on click of save button//
private void takePhoto() {
releaseCamera();
new AsyncTask<Void, Void, Void>() {
Bitmap bitmap;
#Override
protected void onPreExecute() {
super.onPreExecute();
try {
bitmap = gpuImageView.capture();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
#Override
protected Void doInBackground(Void... params) {
File dir = Util.getCameraDirectory(); // directory where you want to save image
if (!dir.exists()) {
dir.mkdirs();
}
String filename = getString(R.string.app_name) + System.currentTimeMillis() + ".jpg";
File file = new File(dir, filename);
try {
FileOutputStream fileOutputStream = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fileOutputStream);
fileOutputStream.flush();
fileOutputStream.close();
Intent intent =
new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
intent.setData(Uri.fromFile(file));
sendBroadcast(intent);
} catch (Exception exception) {
exception.printStackTrace();
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
prepareCamera();
Toast.makeText(MyActivity.this, R.string.msg_after_save, Toast.LENGTH_SHORT).show();
}
}.execute();
}
private void prepareCamera() {
camera = Camera.open(cameraId);
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getOptimalPreviewSize(camera.getParameters().getSupportedPreviewSizes(), getWindowManager().getDefaultDisplay().getWidth(), getWindowManager().getDefaultDisplay().getHeight());
parameters.setPreviewSize(size.width, size.height);
if (parameters.getSupportedFocusModes().contains(
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters
.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
parameters.setPreviewFormat(ImageFormat.NV21);
camera.setParameters(parameters);
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int orientation = getCameraDisplayOrientation(info);
boolean flipHorizontal = info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
gpuImageView.getGPUImage().setUpCamera(camera, orientation,
flipHorizontal, false);
}
private void releaseCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
Let me know if you face any further issue.
Also, check again for write permission in manifest
My application got crashed on devices with no SD cards in it, but is working fine with devices which are having SD card in it.When i debugged it, i found that on
mCamera.takePicture(null, null, jpegCallBack);
Method app is getting crashed with above error.I goggled a lot but didn't found any solution , i saw this link :-
http://forums.androidcentral.com/motorola-droid-x/102987-camera-won-t-take-pictures-without-sd-card.html
So is it possible to capture images in background service in device with no SD card in it.
Please provide me some clues
Here are some methods of my hiddenCamera class
#SuppressWarnings("deprecation")
private void startCapturingCall() {
final Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
if (mCamera != null) {
parameters = mCamera.getParameters();
if (FLASH_MODE == null || FLASH_MODE.isEmpty()) {
FLASH_MODE = "auto";
}
parameters.setFlashMode(FLASH_MODE);
pictureSize = getBiggesttPictureSize(parameters);
if (pictureSize != null)
parameters
.setPictureSize(pictureSize.width, pictureSize.height);
// set camera parameters
mCamera.setParameters(parameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#SuppressWarnings("deprecation")
#Override
public void run() {
if (isSDPresent) {
mCamera.takePicture(null, null, jpegCallBack);
} else {
Toast.makeText(getApplicationContext(),
"Please Insert SD card", 1000).show();
}
}
}, 2000);
}
}
#SuppressWarnings("deprecation")
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
IMAGE_DIRECTORY_NAME);
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
}
}
try {
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length);
// set file out stream
FileOutputStream out = new FileOutputStream(mediaFile);
// set compress format quality and stream
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mediaStorageDir.exists()) {
getPathOfCapturedImage();
}
HiddenCamera.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
And also isSDPresent always returns me true value .
Please provide me your suggestions on this. I am really stuck at this point from last 2-3 days.
This is the issue of Device too as in Samsung Grand , my code is working fine even its not having SD card in it.But in Moto E its my application getting crashed.Camera settings plays an important role in it.
Thanks
Finally i am done with this, though i got busy in some other task but today i get time to post my Answer on this topic, As this topis is very general , so i am posting this answer inorder to help others who might have thought of this functionality , So i done this thing by using SurfaceTexture but it will only work for versions greater thet 4 and for versions less than 4 you need to use surfaceView.
So here is my code :-
public class SurfaceTextureActivity extends Activity implements
SurfaceTextureListener {
private Parameters mParameters;
private Camera.Size mPictureSize;
private static final String sIMAGE_DIRECTORY_NAME = "HiddenCapturedPics";
private byte[] mByteArray;
private Camera mCamera;
private TextureView mTextureView;
private File mMediaFile, mMediaStorageDir = null;
private String mEncodedImage, mImageName, mFinalResponse,
mFlashMode;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTextureView = new TextureView(this);
setContentView(mTextureView);
if (checkCameraHardware(getApplicationContext())) {
mTextureView.setSurfaceTextureListener(this);
Bundle extras = getIntent().getExtras();
mFlashMode = extras.getString("FLASH");
} else {
Toast.makeText(getApplicationContext(),
"Your Device dosen't have a Camera !", Toast.LENGTH_LONG)
.show();
}
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width,
int height) {
mCamera = Camera.open();
mTextureView.setLayoutParams(new FrameLayout.LayoutParams(0, 0,
Gravity.CENTER));
try {
mCamera.setPreviewTexture(surface);
} catch (IOException t) {
}
mCamera.startPreview();
startCapturingCall();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width,
int height) {
// Ignored, the Camera does all the work for us
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
mCamera.stopPreview();
mCamera.release();
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
Toast.makeText(getApplicationContext(), "Dfg", Toast.LENGTH_SHORT)
.show();
// Update your view here!
}
Camera.PictureCallback jpegCallBack = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Boolean isSDPresent = android.os.Environment
.getExternalStorageState().equals(
android.os.Environment.MEDIA_MOUNTED);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",
Locale.getDefault()).format(new Date());
// checking for SD card
if (isSDPresent) {
mMediaStorageDir = new File(Environment
.getExternalStorageDirectory().getAbsolutePath(),
sIMAGE_DIRECTORY_NAME);
mMediaFile = new File(mMediaStorageDir.getPath()
+ File.separator + "IMG_" + timeStamp + ".jpg");
if (!mMediaStorageDir.exists()) {
if (!mMediaStorageDir.mkdirs()) {
}
}
try {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 8;
Bitmap userImage = BitmapFactory.decodeByteArray(data, 0,
data.length, options);
FileOutputStream out = new FileOutputStream(mMediaFile);
userImage.compress(Bitmap.CompressFormat.JPEG, 50, out);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
userImage.compress(Bitmap.CompressFormat.JPEG, 50, baos);
mByteArray = baos.toByteArray();
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} else {
Toast.makeText(getApplicationContext(),
"Please insert SD card !", Toast.LENGTH_LONG).show();
}
if (mMediaStorageDir.exists()) {
getPathOfCapturedImage();
}
SurfaceTextureActivity.this.finish();
CameraService.IS_ACTIVITY_FINISHED = true;
}
};
private void startCapturingCall() {
if (mCamera != null) {
mParameters = mCamera.getParameters();
if (mFlashMode == null || mFlashMode.isEmpty()) {
mFlashMode = "auto";
}
mParameters.setFlashMode(mFlashMode);
mPictureSize = getBiggesttPictureSize(mParameters);
if (mPictureSize != null)
mParameters.setPictureSize(mPictureSize.width,
mPictureSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
if (mCamera != null) {
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
} else {
mCamera = getCameraInstance();
mCamera.startPreview();
mCamera.takePicture(null, null, jpegCallBack);
}
}
}, 2000);
}
}
private Camera.Size getBiggesttPictureSize(Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
return (result);
}
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
}
Hope this will help others .......
Here are the links for reference :-
Example of Camera preview using SurfaceTexture in Android
Camera.takePicture throws RunTimeException
Cheers!!!!!
I’m a beginner and I need help. How do I take photos with camera and save or send to next activity?
I've tried a couple of options, i.e. takepicture with picture callback and surfaceview/take with intent. However, neither works properly on Android 2.3.3. Could someone figure out the issues with my code below?
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.sf_foto);
mCamera = getCameraInstance();
mCameraPreview = new SF_CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mCameraPreview);
ImageButton captureButton = (ImageButton) findViewById(R.id.button_capture);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mCamera.takePicture(myShutterCallback, mPicture_RAW, mPicture);
}
});
}
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open(0);
camera.setDisplayOrientation(90);
} catch (Exception e) {
// cannot get camera or does not exist
}
return camera;
}
ShutterCallback myShutterCallback = new ShutterCallback(){
#Override
public void onShutter() {}
};
PictureCallback mPicture_RAW = new PictureCallback(){
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {}
};
PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile();
if (pictureFile == null) {
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Intent i = new Intent(StyloveFoto.this, Filter.class);
startActivity(i);
}
};
protected File getOutputMediaFile() {
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM),"KWAlbum");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("KWAlbum", "failed to create directory");
return null;
}
}
// Create a media file name
File mediaFile = new File(mediaStorageDir.getPath() + File.separator + "KW" + ".jpg");
return mediaFile;
}
my surface view:
public class SF_CameraPreview extends SurfaceView implements SurfaceHolder.Callback{
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
public SF_CameraPreview(Context context, Camera camera) {
super(context);
this.mCamera = camera;
this.mSurfaceHolder = this.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
// left blank for now
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
//mCamera.stopPreview();
mCamera.release();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format,
int width, int height) {
// start preview with new settings
try {
mCamera.setPreviewDisplay(surfaceHolder);
Camera.Parameters parameters = mCamera.getParameters();
parameters.set("orientation", "portrait");
mCamera.setParameters(parameters);
mCamera.startPreview();
} catch (Exception e) {
// intentionally left blank for a test
}
}
}
instead of this line : fos.write(data);
write : fos.write(data,0,data.length);
if you want to pass it to the next activity:
Intent i = new Intent(StyloveFoto.this, Filter.class);
i.putExtra("myImage",data);
startActivity(i);
and then in class filter in the oncreate method
byte[] myImage = getIntent()getIntent().getByteArrayExtra("myImage");
I got the same problem and i just solve it before.
The problem is that mPicture is an object of PictureCallback. You can't set intent directing to Filter.class from StyloveFoto.this, because it is in PictureCallback interface. Try this one:
Intent i = new Intent(getBaseContext() , Filter.class);
startActivity(i);
Such a big trap in java....hope it helps :)
Either you save the picture like this and pass the path to another activity
final File file = new File(Environment.getExternalStorageDirectory() + "/" + System.currentTimeMillis() + "_pic.jpg");
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(data);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
OR
Pass data from camera activity :
Intent intent = new Intent(getApplicationContext(), your_class.class);
intent.putExtra("path", path);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
Receiving Activity
byte[] data = getIntent().getByteArrayExtra("path");
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
image.setImageBitmap(scaleDownBitmapImage(bitmap, 300, 200));
This is my code to take pictures in Android. But, it always return a blank image. What might be the error? I saw a few issues of setting the flash, exposure and focus mode which I set in my code but still the camera returns a blank image even though the photo gets taken.(Atleast I heard the sound of the aperture.)
C
amera.Parameters p = camera.getParameters();
List<Size> sizes = p.getSupportedPictureSizes();
// Choose any one you want among sizes
Size size = sizes.get(0);
p.setPictureSize(size.width, size.height);
p.set("flash-mode","off");
p.set("focus-mode","auto");
p.setExposureCompensation(100);
p.setFocusMode("auto");
camera.setParameters(p);
camera.startPreview();
camera.takePicture(shutterCallback, rawCallback,
jpegCallback);
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream =
// CameraDemo.this.openFileOutput(String.format("%d.jpg",
// System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
#Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
Camera.java
public class Camera extends Activity
{
private static final int CAMERA_REQUEST = 1888;
private String selectedImagePath;
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
Intent cameraIntent = new Intent(ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent, CAMERA_REQUEST);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (resultCode == RESULT_OK) {
if (requestCode == CAMERA_REQUEST)
{
Bitmap photo = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
photo.compress(Bitmap.CompressFormat.JPEG, 40, bytes);
Random randomGenerator = new Random();randomGenerator.nextInt();
String newimagename=randomGenerator.toString()+".jpg";
File f = new File(Environment.getExternalStorageDirectory()
+ File.separator + newimagename);
try {
f.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//write the bytes in file
try {
fo = new FileOutputStream(f.getAbsoluteFile());
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
fo.write(bytes.toByteArray());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
uri=f.getAbsolutePath();
//this is the url that where you are saved the image
}
}
I have developed an android application. In that i have used front facing camera functionality. Its working fine but I need to auto capture. i.e. without click shutter button sound, I want to capture photocode..
my camera activity code is
private Camera openFrontFacingCameraGingerbread() {
int cameraCount = 0;
Camera cam = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for ( int camIdx = 0; camIdx < cameraCount; camIdx++ ) {
Camera.getCameraInfo( camIdx, cameraInfo );
if ( cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT ) {
try {
cam = Camera.open( camIdx );
} catch (RuntimeException e) {
Log.i("Camera failed to open: ",e.getLocalizedMessage());
}
}
}
return cam;
}
Thanks in advance..
Follow the steps outlined in the Android Developer reference pages. There's no requirement to have a 'shutter button'. You can create a dummy SurfaceHolder if you don't want to show the image on the screen, e.g.
SurfaceView surface = new SurfaceView(context);
cam.setPreviewDisplay(surface.getHolder());
public int intPicTaken;
// setPreviewCallback on the camera, wait intil intPicTaken increments to 10, then take the picture
cam.setPreviewCallback(prevCallBack);
public Camera.PreviewCallback prevCallBack = new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
intPicTaken++;
try {
if(intPicTaken == 10) {
doTakePicture();
}
} catch (Exception e) {
System.out.println("onPreviewFrame: " + e.toString());
}
}
};
public Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
System.out.println("PictureCallback onPictureTaken");
try {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 1;
Bitmap picture = BitmapFactory.decodeByteArray(data, 0, data.length, options);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
picture.compress(Bitmap.CompressFormat.JPEG, 100, baos);
baos.close();
System.out.println("PictureCallback onPictureTaken done");
cam.release();
saveFile(picture);
} catch (Exception e) {
System.out.println("onPictureTaken: " + e.toString());
}
}
};
// take the picture
public void doTakePicture() {
try {
cam.stopPreview();
cam.takePicture(null, null, mPicture, mPicture);
} catch(Exception e){
System.out.println("doTakePicture: " + e.toString());
}
}
// saving the file to gallery
public void saveFile(Bitmap bitmap) {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaStorageDir = Environment.getExternalStorageDirectory();
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
System.out.println("saveFile: failed to create directory");
return;
}
}
try {
String saved = MediaStore.Images.Media.insertImage(this.getContentResolver(), bitmap, "title", "description");
Uri sdCardUri = Uri.parse("file://" + Environment.getExternalStorageDirectory());
sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED, sdCardUri));
System.out.println("file saved");
} catch (Exception e) {
System.out.println("saveFile: " + e.toString());
e.printStackTrace();
}
}
write this code in on create to auto capture image
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_custom__camera_activity);
mCamera = getCameraInstance();
mCameraPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mCameraPreview);
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
{
mCamera.takePicture(null, null, mPicture);
}
}
}, 5500);
}