Android Camera and shader effects - android

I am working an android camera app that uses a vertex shader to apply various distortion effects to the camera preview. Although the camera preview shows the desired effect, pictures do not. How do I get the picture/image to show the same effect as the camera preview? I hope my question is clear. Many thanks in anticipation.
Code extract is as follows:
Camera Activity:
public class CameraActivity extends Activity implements SurfaceTexture.OnFrameAvailableListener{
#SuppressWarnings("deprecation")
private Camera mCamera;
private PreviewCallback mPreviewCallback;
private MyGLSurfaceView glSurfaceView;
private SurfaceTexture surface;
MyGL20Renderer renderer;
public boolean mFrameAvailable = false;
public static final int MEDIA_TYPE_IMAGE = 1;
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.screen);
FrameLayout surface = (FrameLayout)findViewById(R.id.camera_preview);
glSurfaceView = new MyGLSurfaceView(this);
renderer = glSurfaceView.getRenderer();
// setContentView(glSurfaceView);
surface.addView(glSurfaceView);
Button captureButton = (Button) findViewById(R.id.button_capture);
captureButton.setOnClickListener(
new View.OnClickListener() {
#SuppressWarnings("deprecation")
#Override
public void onClick(View v) {
// get an image from the camera
// mCamera.takePicture(null, null, mPicture);
// mCamera.setOneShotPreviewCallback(mPreviewCallback);
takeSnapPhoto();
}
}
);
}
#SuppressWarnings("deprecation")
public void startCamera(int texture)
{
surface = new SurfaceTexture(texture);
surface.setOnFrameAvailableListener(this);
renderer.setSurface(surface);
mCamera = Camera.open(CameraInfo.CAMERA_FACING_BACK);
try
{
mCamera.setPreviewTexture(surface);
mCamera.startPreview();
}
catch (IOException ioe)
{
Log.w("MainActivity","CAM LAUNCH FAILED");
}
Camera.Parameters param = mCamera.getParameters();
param.setPictureSize(640, 480);
// param.setColorEffect(Camera.Parameters.EFFECT_SEPIA);
mCamera.setParameters(param);
}
public void onFrameAvailable(SurfaceTexture surfaceTexture)
{
mFrameAvailable = true;
glSurfaceView.requestRender();
}
#SuppressWarnings("deprecation")
#Override
public void onPause()
{
mFrameAvailable = false;
mCamera.stopPreview();
mCamera.release();
System.exit(0);
}
#SuppressWarnings("deprecation")
private PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// Bitmap bitmap = BitmapFactory.decodeByteArray(data , 0, data .length);
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
Log.d("CrazyMirror", "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
// bitmap.compress(Bitmap.CompressFormat.PNG,100, fos);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d("CrazyMirror", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("CrazyMirror", "Error accessing file: " + e.getMessage());
}
refreshPreview();
}
};
/** Create a File for saving an image or video */
#SuppressLint("SimpleDateFormat")
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "CrazyMirror");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("CrazyMirror", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
}
else if(type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
}
else {
return null;
}
return mediaFile;
}
#SuppressWarnings("deprecation")
public void refreshPreview() {
try {
mCamera.stopPreview();
} catch (Exception e) {}
try {
mCamera.startPreview();
} catch (Exception e) {}
}
public void takeSnapPhoto() {
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
int format = parameters.getPreviewFormat();
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "CrazyMirror");
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
try {
// Write to SD Card
// File file = createFileInSDCard(FOLDER_PATH, "Image_"+System.currentTimeMillis()+".jpg");
//Uri uriSavedImage = Uri.fromFile(file);
outStream = new FileOutputStream(mediaFile);
outStream.write(byt);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
});}
}

Related

Camera Preview Android

I'm developing an Android App that has a Camera Preview Activity. It calls takePicture() every 2 second using a timer and does some processing on the captured image in PictureCallback. From the Android documentation, I learnt that PictureCallback happens in the same thread as Camera.open().
Also, it's recommended to call takePicture() in a separate thread. What's the best way to call StartPreview() after an image is captured?
I would want the processing on each capture to happen on separate threads and the camera preview should continue in the main UI thread. What's the best way to implement this using AsyncTask()?
public class CameraActivity extends AppCompatActivity{
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static String TAG = "Exception";
int viewWidth = 0;
int viewHeight = 0;
private Camera mCamera;
private CameraPreview mPreview;
private ImageView iv;
private RelativeLayout rl;
private Camera.PictureCallback mPicture;
private MRZ_OCR mrz = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
rl = (RelativeLayout) findViewById(R.id.rel_camera);
iv = (ImageView) findViewById(R.id.black_above);
viewWidth = iv.getWidth();
viewHeight = rl.getHeight() - 2 * iv.getHeight();
// Create an instance of Camera
mCamera = getCameraInstance();
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
new Timer().schedule(new TimerTask() {
#Override
public void run() {
mCamera.startPreview();
mrz = new MRZ_OCR();
mrz.execute();
}
}, 4000, 4000);
mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// Crop to get only MRZ
Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);
//Verify if it has MRZ
bm = MRZ.getMRZ(bm);
if (bm != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
createImageFile(byteArray);
Toast.makeText(getApplicationContext(), "Pic Saved", Toast.LENGTH_LONG).show();
}
}
};
}
#Override
protected void onPause() {
super.onPause();
releaseCamera(); // release the camera immediately on pause event
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
private class MRZ_OCR extends AsyncTask<Void, Void, Void>
{
private byte[] data;
#Override
protected Void doInBackground(Void... params) {
mCamera.takePicture(null, null, mPicture);
// Sleep for however long, you could store this in a variable and
// have it updated by a menu item which the user selects.
try {
Thread.sleep(3000); // 3 second preview
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
#Override
protected void onPostExecute(Void result) {
// This returns the preview back to the live camera feed
mCamera.startPreview();
}
}
public static int pxFromDp(final Context context, final float dp) {
return (int) (dp * context.getResources().getDisplayMetrics().density);
}
/**
* A safe way to get an instance of the Camera object.
*/
public static Camera getCameraInstance() {
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private static File getOutputMediaFile(int type)
{
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
private static void createImageFile(byte[] byteArray) {
//create empty image type file
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
Log.d(TAG, "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(byteArray);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
}
I don't know about the api takePicture(),but i think what you need to do is put this code in a separate thread.
Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);
//Verify if it has MRZ
bm = MRZ.getMRZ(bm);
if (bm != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
createImageFile(byteArray);
}
decodeBitmap is a time-consuming process, especially in you app,performed once every 2 seconds. it will blocking the main thread. and why it's recommended to call takePicture() in a separate thread, I think it is the same reason.
You already answered your question. Pass byte[] data to an AsyncTask:
private class PictureConverter extends AsyncTask<Void, Void, Void> {
private byte[] data;
private Camera camera;
public PictureConverter(byte[] _data, Camera _camera) {
data = _data;
camera = _camera;
}
protected Void doInBackground(Void... data) {
Camera.Parameters parameters = camera.getParameters();
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), parameters.getPreviewSize().width, parameters.getPreviewSize().height, null);
yuvImage.compressToJpeg(new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height), 90, out);
byte[] imageBytes = out.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
out.flush();
out.close();
//TODO save the image
return null;
}
protected void onProgressUpdate() {
}
protected void onPostExecute() {
//TODO report that the image got saved
}
}

Getting a black screen after save photo - Android Camera

I'm developing an application for Android that captures max fps and saves to the SD card.
The problem is that the saved photo is a black screen, and I can't understand why.
Can anyone tell me where is the problem?
The code where I do this:
public class PhotoFragment extends Fragment {
private Camera cam;
private CameraPreview camPreview;
private boolean recording = false;
private ArrayList<byte[]> fotos;
private ArrayList<String> tempos;
private Thread thread;
public PhotoFragment() {
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_photofinish, container, false);
cam = getCameraInstance();
if(cam != null) {
cam.setDisplayOrientation(90);
// set Camera parameters
Camera.Parameters cameraParameters = cam.getParameters();
//set color efects to none
cameraParameters.setColorEffect(Camera.Parameters.EFFECT_NONE);
//set antibanding to none
if (cameraParameters.getAntibanding() != null) {
cameraParameters.setAntibanding(Camera.Parameters.ANTIBANDING_OFF);
}
// set white ballance
if (cameraParameters.getWhiteBalance() != null) {
cameraParameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
}
//set flash
if (cameraParameters.getFlashMode() != null) {
cameraParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
}
//set zoom
if (cameraParameters.isZoomSupported()) {
cameraParameters.setZoom(0);
}
//set focus mode
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
List<Size> sizes = cameraParameters.getSupportedPictureSizes();
Camera.Size size = sizes.get(0);
cameraParameters.setPictureSize(size.width, size.height);
cam.setParameters(cameraParameters);
fotos = new ArrayList<byte[]>();
tempos = new ArrayList<String>();
camPreview = new CameraPreview(this.getActivity(), cam);
FrameLayout preview = (FrameLayout) rootView.findViewById(R.id.camera_preview);
preview.addView(camPreview);
TextView startRecording = (TextView) rootView.findViewById(R.id.start_record_button);
startRecording.setOnClickListener( new View.OnClickListener() {
public void onClick(View v) {
if(!recording)
{
recording = true;
Size previewSize = cam.getParameters().getPreviewSize();
int dataBufferSize=(int)(previewSize.height*previewSize.width*(ImageFormat.getBitsPerPixel(cam.getParameters().getPreviewFormat())/8.0));
thread.start();
cam.addCallbackBuffer(new byte[dataBufferSize]);
cam.addCallbackBuffer(new byte[dataBufferSize]);
cam.addCallbackBuffer(new byte[dataBufferSize]);
cam.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] imageData, Camera arg1) {
try {
fotos.add(imageData);
tempos.add(new SimpleDateFormat("HH_mm_ss_SSS", Locale.getDefault()).format(new Date()));
} catch(Exception e) {
System.out.println("ERRO: " + e);
}
}
});
}
else
{
recording = false;
try {
thread.join();
} catch (Exception e) {
}
}
}
});
thread = new Thread(new Runnable() {
public void run() {
while(recording) {
if(fotos.size()>0 && tempos.size()>0)
{
File pictureFile = getOutputMediaFile(1, tempos.get(0));
if (pictureFile == null){
System.out.println("Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(fotos.get(0));
fos.close();
pictureFile = null;
cam.addCallbackBuffer(fotos.get(0));
fotos.remove(0);
tempos.remove(0);
} catch (FileNotFoundException e) {
System.out.println("ERRO FILE NOT FOUND! : " + e);
} catch (IOException e) {
System.out.println("ERRO IOException!");
}
}
}
}
});
}
else
{
Toast.makeText(getActivity(), "Camera not available", Toast.LENGTH_LONG).show();
}
return rootView;
}
public static Camera getCameraInstance(){
Camera c = null;
try {
c = Camera.open();
}
catch (Exception e){
}
return c;
}
#Override
public void onDestroyView() {
super.onDestroyView();
cam.release();
}
private static File getOutputMediaFile(int type, String timeStamp){
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "L_P");
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
System.out.println("L_P failed to create directory");
return null;
}
}
String timeStampDay = new SimpleDateFormat("ddMMyyyy", Locale.getDefault()).format(new Date());
new File(mediaStorageDir.getPath() + File.separator + timeStampDay).mkdirs();
File mediaFile;
if (type == 1){
mediaFile = new File(mediaStorageDir.getPath() + File.separator + timeStampDay + File.separator + "IMG_"+ timeStamp + ".jpg");
} else if(type == 2) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator + "VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
}
You are claiming that the file is a JPEG. However, you have done nothing to convert the image to be a JPEG. Preview frames, by default, are not JPEG, but are in NV21 format. Use getSupportedPreviewFormats() to see if JPEG previews are possible, then use setPreviewFormat() to request JPEG previews.
And, as I noted in your previous question, do NOT have two threads working with an ArrayList. Also, do not have your background thread busy-wait looking constantly for an image to show up on an ArrayList. Use a LinkedBlockingQueue or something else that is thread-safe and allows the background thread to block while waiting for an image.

Not able to save Camera Images in Android

I am trying to save a image from camera. but its not working ..
Given permission
Code is pasted below. Not sure why its is not saving the image Code seems fine taken from developer.android site . Please help!
public class CameraActivity extends Activity {
private Camera mCamera;
private CameraPreview mPreview;
private Display display;
private int PreviewSizeWidth = 640;
private int PreviewSizeHeight= 480;
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
// Create an instance of Camera
mCamera = getCameraInstance();
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera,getWindowManager().getDefaultDisplay());
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
Button captureButton = (Button) findViewById(R.id.button1);
captureButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
// get an image from the camera
Log.d("Take","Picture");
mCamera.takePicture(null, null, mPicture);
// mCamera.stopPreview();
// mCamera.startPreview();
}
}
);
}
private PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.d("TAG","Callabaclk start");
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);;
if (pictureFile == null){
Log.d("TAG", "Error creating media file, check storage permissions: ");
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
Log.d("Ok",pictureFile.getAbsolutePath());
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0,data.length);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fos);
//fos.write(data);
fos.flush();
fos.close();
Log.d("TAG","DONE");
} catch (FileNotFoundException e) {
Log.d("Test", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("Test", "Error accessing file: " + e.getMessage());
}
}
};
public static Camera getCameraInstance(){
Camera c = null;
try {
Log.d("Test",Camera.getNumberOfCameras()+"");
c = Camera.open(); // attempt to get a Camera instance
}
catch (Exception e){
Log.d("test", e.toString());
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
public static boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
// File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
// Environment.DIRECTORY_PICTURES), "MyCameraApp");
File sampleDir = Environment.getExternalStorageDirectory();
File mediaStorageDir = new File(sampleDir.getPath()+File.separator+"Path");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
mediaStorageDir.mkdirs();
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
// mediaFile = new File(mediaStorageDir.getPath() + File.separator +"Path_"+ timeStamp + ".jpg");
mediaFile = new File(mediaStorageDir.getPath() ,"Path_"+ timeStamp + ".jpg");
} else if(type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
}
When I do getPath() i get 08-14 02:31:52.153: D/Ok(4279): /storage/emulated/0/Path/Path_20130814_023152.jpg
After heck lot of research found it need to add these lines after fos.close :
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "Path");
sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED,
Uri.parse("file://"+ mediaStorageDir)));
Thanks
Njoy
check permission
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
try to create a simple file
String path = Environment.getExternalStorageDirectory() + "/" + "test.jpg";
File file = new File(path);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Then verify whether you can save your picture

Retrieving pixel array from image taken with a camera

I am working on an app that needs to access an array of pixels from a picture after it is taken. The main Activity is below. I have a good amount of java experience but extremely limited experience with images past displaying them on the screen. I see the byte array being passed to the picture callback method but I do not know how it is formatted. How can I get a pixel array that includes the RGB components from the captured image? I tried to find this through the stack overflow forums however I got a few hundred pages of results and only searched through the first 10 or so, so I am sorry if that has already been asked and I just did not see it.
public class ConverterActivity extends Activity
{
private Camera mCamera;
private CameraPreview mPreview;
private PictureCallback mPicture = new PictureCallback() {
private String TAG;
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null){
// Log.d(TAG, "Error creating media file, check storage permissions: " +
// e.getMessage());
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d(TAG, "Error accessing file: " + e.getMessage());
}
}
};
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Add a listener to the Capture button
Button captureButton = (Button) findViewById(R.id.button_capture);
captureButton.setOnClickListener(
new View.OnClickListener() {
#Override
public void onClick(View v) {
// get an image from the camera
mCamera.takePicture(null, null, mPicture);
}
}
);
// Create an instance of Camera
mCamera = Camera.open(this.getBackCamera());
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
}
#Override
protected void onPause()
{
super.onPause();
releaseCamera(); // release the camera immediately on pause event
}
private void releaseCamera(){
if (mCamera != null){
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
/** Create a file Uri for saving an image or video */
private static Uri getOutputMediaFileUri(int type){
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MyCameraApp");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE){
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
} else if(type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
public int getBackCamera()
{
int numCameras = Camera.getNumberOfCameras();
CameraInfo cInfo = new CameraInfo();
for (int i = 0; i < numCameras; i++)
{
Camera.getCameraInfo(i, cInfo);
if (cInfo.facing == CameraInfo.CAMERA_FACING_BACK)
{
return i;
}
}
return -1;
}
}
If you take the picture using code like this:
imgFile = new File(Environment.getExternalStorageDirectory () + "/somefolder/" + name + ".jpg");
String fileName = imgFile.getAbsolutePath();
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, Uri.fromFile(new File(fileName)));
startActivityForResult(intent, REQUEST_FROM_CAMERA);
Then you should be able to, when you get the result back from this intent, using code like this to access the bitmap
if (imgFile.exists()) {
String fileName = file.getAbsolutePath();
BitmapFactory.Options opts = new BitmapFactory.Options();
Bitmap bm;
opts.inJustDecodeBounds = false;
bm = BitmapFactory.decodeFile(fileName, opts);
return bm;
}
else return null;
Then you can use bitmapfactory tools, such as compress to stream and then
convert to byte[]
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.JPEG, 100, stream); // 100 = max quality
byte[] byteArray = stream.toByteArray();

How to capture and save an image using custom camera in Android?

How do I capture an image in custom camera and then save that image in android?
please see below answer.
Custom_CameraActivity.java
public class Custom_CameraActivity extends Activity {
private Camera mCamera;
private CameraPreview mCameraPreview;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mCamera = getCameraInstance();
mCameraPreview = new CameraPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mCameraPreview);
Button captureButton = (Button) findViewById(R.id.button_capture);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
}
/**
* Helper method to access the camera returns null if it cannot get the
* camera or does not exist
*
* #return
*/
private Camera getCameraInstance() {
Camera camera = null;
try {
camera = Camera.open();
} catch (Exception e) {
// cannot get camera or does not exist
}
return camera;
}
PictureCallback mPicture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getOutputMediaFile();
if (pictureFile == null) {
return;
}
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
}
};
private static File getOutputMediaFile() {
File mediaStorageDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"MyCameraApp");
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date());
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator
+ "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
}
CameraPreview.java
public class CameraPreview extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mSurfaceHolder;
private Camera mCamera;
// Constructor that obtains context and camera
#SuppressWarnings("deprecation")
public CameraPreview(Context context, Camera camera) {
super(context);
this.mCamera = camera;
this.mSurfaceHolder = this.getHolder();
this.mSurfaceHolder.addCallback(this);
this.mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
// left blank for now
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.stopPreview();
mCamera.release();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format,
int width, int height) {
// start preview with new settings
try {
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (Exception e) {
// intentionally left blank for a test
}
}
}
main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="horizontal" >
<FrameLayout
android:id="#+id/camera_preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1" />
<Button
android:id="#+id/button_capture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:text="Capture" />
</LinearLayout>
Add Below Lines to your androidmanifest.xml file
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
showbookimage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// create intent with ACTION_IMAGE_CAPTURE action
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
/**
Here REQUEST_IMAGE is the unique integer value you can pass it any integer
**/
// start camera activity
startActivityForResult(intent, TAKE_PICTURE);
}
}
);
then u can now give the image a file name as follows and then convert it into bitmap and later on to file
private void createImageFile(Bitmap bitmap) throws IOException {
// Create an image file name
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 40, bytes);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
FileOutputStream stream = new FileOutputStream(image);
stream.write(bytes.toByteArray());
stream.close();
// Save a file: path for use with ACTION_VIEW intents
mCurrentPhotoPath = "file:" + image.getAbsolutePath();
fileUri = image.getAbsolutePath();
Picasso.with(getActivity()).load(image).into(showbookimage);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (requestCode == TAKE_PICTURE && resultCode== Activity.RESULT_OK && intent != null){
// get bundle
Bundle extras = intent.getExtras();
// get
bitMap = (Bitmap) extras.get("data");
// showbookimage.setImageBitmap(bitMap);
try {
createImageFile(bitMap);
} catch (IOException e) {
e.printStackTrace();
}
}
}
use picasso for images to display rather fast

Categories

Resources