How can i enable my Camera to take picture programatically in android? - android

I do not want my app to call native Camera app. All I want is to enable my app to take pics programatically when the app starts without any user interaction or involvement.
Here is my code:
public class CameraActivity extends Activity implements SurfaceHolder.Callback{
Camera camera;
PictureCallback rawCallback;
ShutterCallback shutterCallback;
PictureCallback jpegCallback;
ImageView view;
boolean inPreview = false;
#SuppressLint("NewApi")
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
view = (ImageView)findViewById(R.id.pic);
Log.e("Camera Status: ",String.valueOf(checkCameraHardware(getApplicationContext())));
Log.e("Number Of Cameras: ",String.valueOf(Camera.getNumberOfCameras()));
start_camera();
captureImage();
rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d("Log", "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.i("Log", "onShutter'd");
}
};
jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
//Set the Image to ImageView
Bitmap bitmapPicture
= BitmapFactory.decodeByteArray(data, 0, data.length);
//view.setImageBitmap(bitmapPicture);
//Save the Image to SD Card
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d("Log", "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d("Log", "onPictureTaken - jpeg");
/**
* The two lines below are used to refresh the Surface View
* It works quickly then the general refresh by Default
**/
stop_camera();
start_camera();
}
};
//start_camera();
}
/** Check if this device has a camera */
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
public static Camera getCameraInstance(){
Camera c = null;
try {
c = Camera.open(); // attempt to get a Camera instance
}
catch (Exception e){
// Camera is not available (in use or does not exist)
}
return c; // returns null if camera is unavailable
}
private void start_camera()
{
try{
camera = Camera.open();
Log.e("Camera Open Status: ", "Camera opened successfully");
}catch(RuntimeException e){
Log.e("Camera Initialization:", "init_camera: " + e);
return;
}
Camera.Parameters param;
param = camera.getParameters();
//modify parameter
param.setPreviewFrameRate(20);
param.setPreviewSize(176, 144);
camera.setParameters(param);
try {
//camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
//camera.takePicture(shutter, raw, jpeg)
} catch (Exception e) {
Log.e("Camera Initialization:", "init_camera: " + e);
return;
}
inPreview=true;
}
private void captureImage() {
// TODO Auto-generated method stub
AudioManager mgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
mgr.setStreamMute(AudioManager.STREAM_SYSTEM, true);
camera.takePicture(shutterCallback, rawCallback, jpegCallback);
Log.e("Picture Status: ","picture taken successfully");
final Handler handler = new Handler();
Timer t = new Timer();
t.schedule(new TimerTask() {
public void run() {
handler.post(new Runnable() {
public void run() {
AudioManager mgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
mgr.setStreamMute(AudioManager.STREAM_SYSTEM, false);
}
});
}
}, 1000);
}
private void stop_camera()
{
camera.stopPreview();
camera.release();
inPreview = false;
}
#Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
}
#Override
public void surfaceCreated(SurfaceHolder arg0) {
// TODO Auto-generated method stub
}
#Override
public void surfaceDestroyed(SurfaceHolder arg0) {
// TODO Auto-generated method stub
}
}

Download the project form here
Copy the CameraLibrary into your workspace
Add these to your manifest
<uses-sdk
android:minSdkVersion="10"
android:targetSdkVersion="10" />
<uses-features android:name="android.hardware.camera"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<activity android:name="com.girish.cameraLibrary.CameraClass"></activity>
Now in your activity implement the com.girish.cameraLibrary.OnPictureTaken interface and Override the pictureTaken(Bitmap bitmap, File file) method which provides your with the bitmap and File
Now Create and instance of the CustomCamera
private CustomCamera mCustomCamera;
mCustomCamera = new CustomCamera(MainActivity.this);
mCustomCamera.setPictureTakenListner(this);
//To start the custom back camera use this
mCustomCamera.startCamera();
//To send an intent to applications who are listening to Camera request
mCustomCamera.sendCameraIntent();
//To start the front camera use this
mCustomCamera.startCameraFront();
The now you can access the image using pictureTaken(Bitmap bitmap, File file) method, so just use
imageview.setImageBitmap(bitmap);
EDIT
Suppose your main class is called MainActivity the write like this
public class MainActivity extends Activity implements OnPictureTaken {
.....
.....
.....
#Override
protected void onCreate(Bundle savedInstanceState) {
.....
.....
.....
//This is where you get the picture
#Override
public void pictureTaken(Bitmap bitmap, File file) {
imgCapture.setImageBitmap(bitmap);
txtPath.setText(file.getAbsolutePath());
}
Check the sample file as mentioned here

Related

Tried capturing multiple photos in Android but didn't work

I'm a newbie in Android and I've been trying to make an Android camera application that is supposed to be able to capture 5 images when we click the camera button. But the code I'm working on doesn't seem to work. Is there anyone that can help please? So far, this is the code:
public class CameraDemo extends Activity {
private static final String TAG = "CameraDemo";
Camera camera;
Preview preview;
Button buttonClick;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = new Preview(this);
((FrameLayout) findViewById(R.id.preview)).addView(preview);
buttonClick = (Button) findViewById(R.id.buttonClick);
buttonClick.setOnClickListener( new OnClickListener() {
public void onClick(View v) {
int count = 0;
while(count < 5){
preview.camera.takePicture(shutterCallback, rawCallback, jpegCallback);
count++;
try {
wait(1000);
} catch (InterruptedException exception) {
exception.printStackTrace();
}
}
}
});
Log.d(TAG, "onCreate'd");
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream = CameraDemo.this.openFileOutput(String.format("%d.jpg", System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format("/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
And this is the other class named Preview:
class Preview extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "Preview";
SurfaceHolder mHolder;
public Camera camera;
Preview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format("/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Preview.this.invalidate();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
camera.stopPreview();
camera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
camera.startPreview();
}
#Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p= new Paint(Color.RED);
Log.d(TAG,"draw");
canvas.drawText("PREVIEW", canvas.getWidth()/2, canvas.getHeight()/2, p );
}
}

Android camera byte array

I am trying to get the camera working in Android. I successfully created an app which can show camera images in a SurfaceView.
Now I want to receive byte array from camera. Is it possible to receive byte array from camera?
public class AndroidCamera extends Activity implements SurfaceHolder.Callback {
TextView testView;
Camera camera;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
PictureCallback rawCallback;
ShutterCallback shutterCallback;
private final String tag = "VideoServer";
Button start, stop;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
start = (Button) findViewById(R.id.startcamerapreview);
start.setOnClickListener(new Button.OnClickListener() {
public void onClick(View arg0) {
start_camera();
}
});
stop = (Button) findViewById(R.id.stopcamerapreview);
stop.setOnClickListener(new Button.OnClickListener() {
public void onClick(View arg0) {
stop_camera();
}
});
surfaceView = (SurfaceView) findViewById(R.id.surfaceview);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d("Log", "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.i("Log", "onShutter'd");
}
};
}
public void CheckByteArray() {
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
try {
Bitmap bmp = BitmapFactory.decodeResource(getResources(),
R.drawable.ic_launcher);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
if (Arrays.equals(data, byteArray)) {
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
Toast.makeText(getApplicationContext(), "ERROR:" + e, 1)
.show();
}
}
});
}
private void start_camera() {
try {
camera = Camera.open();
} catch (RuntimeException e) {
Log.e(tag, "init_camera: " + e);
return;
}
Camera.Parameters param;
param = camera.getParameters();
// modify parameter
param.setPreviewFrameRate(20);
param.setPreviewSize(176, 144);
camera.setParameters(param);
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
CheckByteArray();
} catch (Exception e) {
Log.e(tag, "init_camera: " + e);
return;
}
}
private void stop_camera() {
camera.stopPreview();
camera.release();
}
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
// TODO Auto-generated method stub
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
}
I used this code to receive the byte array from image.
I want to compare camera's byte array to the images byte array. If cameras byte array contains my images byte array then simple 'hello message' needs to be shown.
To get byte array of taken photo you need to call method takePicture and pass third listener:
camera.takePicture(
null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
//data is what you need
}
});
To compare byte arrays you can use java.util.Arrays.equals() method. But I think you will never get equal arrays...

Camera.takePicture() gives no callback

SOLVED, SEE COMMENT ---
I never get a callback from Camera.takePicture(), I see that in logcat.
What is missing? How do I make takePicture()... take a picture?!
Most of this is directly from Android developers camera guide. I want to take pictures programmatically without any preview or user action. Using the built in camera app works fine. SDK 16.
And in the manifest I do have added:
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera"
android:required="true" />
The code:
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button bTake = (Button)findViewById(R.id.b_take);
boolean boo = safeCameraOpen(camId);
Camera.Parameters parameters = mCamera.getParameters();
mCamera.setParameters(parameters);
bTake.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCamera.takePicture(shutter, null, null, picture);
Log.e(getString(R.string.app_name), "After takePicture");
}
});
}// END onCreate
/* Camera operations */
private ShutterCallback shutter = new ShutterCallback() {
#Override
public void onShutter() {
Log.e(getString(R.string.app_name), "onShutter");
}
};
private PictureCallback picture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
Log.e(getString(R.string.app_name), "onPicTaken");
}
};
/* Starting up and closing down*/
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
releaseCamera();
mCamera = Camera.open(id);
qOpened = (mCamera != null);
} catch (Exception e) {
Log.e(getString(R.string.app_name), "failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
private void releaseCamera() {
if (mCamera != null) {
((Camera) mCamera).release();
mCamera = null;
Log.e(getString(R.string.app_name), "cam released");
}
}}
Try:
mCamera.takePicture(shutter, picture, picture);

Not able to view the picture using camera API in android - screen is blank

I'm using Camera API to take a picture and save the picture in SD card.
But I'm not able to view the picture. The screen is blank.
Here is my code:
public class CameraDemo extends Activity {
private static final String TAG = "CameraDemo";
Camera camera;
CameraPreview preview;
Button buttonClick;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview = new CameraPreview(this);
((FrameLayout) findViewById(R.id.preview)).addView(preview);
buttonClick = (Button) findViewById(R.id.buttonClick);
buttonClick.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
preview.camera.takePicture(shutterCallback, rawCallback,
jpegCallback);
}
});
Log.d(TAG, "onCreate'd");
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
// write to local sandbox file system
// outStream =
// CameraDemo.this.openFileOutput(String.format("%d.jpg",
// System.currentTimeMillis()), 0);
// Or write to sdcard
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
And the CameraPreview class
CameraPreview(Context context) {
super(context);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
CameraPreview.this.invalidate();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
camera.stopPreview();
camera = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
camera.startPreview();
}
#Override
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
Help me!

camera freezes after picture taken on galaxy 7inch tab but not 10inch

I am developing a custom camera application and testing on two galaxy tablets.. one 7inch and one 10inch.. the 10 inch works great but on the 7inch, when i take a picture, it freezes the camera preview and the logCat stops at CAMERA SNAP, CLICKED log in my btn_snap_pic on click in my customcamera class with no error. the app doesn't crash just hangs.. and if i back out of it and open the app again i get a "fail to connect to camera" i assume that this error is because when my app froze and i backed out, the camera never got released.. anyway, below is my both my CustomCamera class and my CamLayer which is my preview..
public class CustomCamera extends Activity{
String camFace ="back";
FrameLayout frame;
RelativeLayout rel;
CamLayer camPreview;
ImageView btn_snap_pic;
ImageView btn_switch_cam;
String TAG = "custom cam";
public void onCreate(Bundle savedInstanceState)
{
Bitmap btnSwitch = BitmapFactory.decodeResource(this.getResources(),
R.drawable.btn_switch_camera);
Bitmap btnSnap = BitmapFactory.decodeResource(this.getResources(),
R.drawable.btn_take_picture);
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
//Set Screen Orientation
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
RelativeLayout.LayoutParams buttonS = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
RelativeLayout.LayoutParams buttonT = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
RelativeLayout.LayoutParams cam = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
cam.addRule(RelativeLayout.BELOW);
buttonS.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
buttonT.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
buttonT.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
try{
//Create Intance of Camera
camPreview = new CamLayer(this.getApplicationContext(),camFace);
//Relative view for everything
rel = new RelativeLayout(this);
// set as main view
setContentView(rel);
//FrameLayOut for camera
frame = new FrameLayout(this);
// add Camera to view
frame.setLayoutParams(cam);
frame.addView(camPreview);
rel.addView(frame);
btn_switch_cam = new ImageView (this);
btn_switch_cam.setImageBitmap(btnSwitch);
btn_switch_cam.setLayoutParams(buttonS);
buttonS.rightMargin = 25;
buttonS.topMargin = 25;
rel.addView(btn_switch_cam);
btn_switch_cam.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Log.v("CAMERA Switch", "CLICKED");
//frame.removeView(camPreview);
if(camFace.equals("front")){
camFace = "back";
}else{
camFace = "front";
}
//camPreview.stopCamera();
frame.removeView(camPreview);
restartCam();
//camPreview.switchCam(camFace);
}
});
btn_snap_pic = new ImageView(this);
btn_snap_pic.setImageBitmap(btnSnap);
btn_snap_pic.setLayoutParams(buttonT);
buttonT.rightMargin = 25;
buttonT.bottomMargin = 25;
rel.addView(btn_snap_pic);
btn_snap_pic.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
Log.v("CAMERA Snap", "CLICKED");
camPreview.camera.takePicture(shutterCallback, rawCallback,
jpegCallback);
}
});
} catch(Exception e){}
}
public void restartCam(){
camPreview = new CamLayer(this.getApplicationContext(),camFace);
frame.addView(camPreview);
}
ShutterCallback shutterCallback = new ShutterCallback() {
public void onShutter() {
Log.d(TAG, "onShutter'd");
}
};
/** Handles data for raw picture */
PictureCallback rawCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, android.hardware.Camera camera) {
Log.d(TAG, "onPictureTaken - raw");
}
};
/** Handles data for jpeg picture */
PictureCallback jpegCallback = new PictureCallback() {
public void onPictureTaken(byte[] data, android.hardware.Camera camera) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format(
"/sdcard/LC/images/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d(TAG, "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Log.d(TAG, "onPictureTaken - jpeg");
}
};
}
AND THE CAMLAYER:
public class CamLayer extends SurfaceView implements SurfaceHolder.Callback {
Camera camera;
SurfaceHolder previewHolder;
String camID;
private static final String TAG = "Cam Preview";
public CamLayer(Context context, String facing)
{
super(context);
camID = facing;
previewHolder = this.getHolder();
previewHolder.addCallback(this);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
startCamera();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Parameters params = camera.getParameters();
//params.setPreviewSize(width, height);
//params.setPictureFormat(PixelFormat.JPEG);
camera.setParameters(params);
camera.startPreview();
}
public void surfaceDestroyed(SurfaceHolder arg0)
{
//camera.stopPreview();
//camera.release();
stopCamera();
}
public void onResume() {
//camera.startPreview();
startCamera();
}
public void onPause() {
// TODO Auto-generated method stub
//camera.stopPreview();
stopCamera();
}
public void switchCam(String newCamId) {
/*camera.stopPreview();
//camera.release();
if(camID.equals("front")){
camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}else{
camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}*/
//camera.startPreview();
//camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
stopCamera();
camID = newCamId;
startCamera();
}
public void stopCamera(){
System.out.println("stopCamera method");
if (camera != null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
previewHolder.removeCallback(this);
previewHolder = null;
}
}
private void startCamera(){
if(camID.equals("front")){
camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}else{
camera=Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}
try {
camera.setPreviewDisplay(previewHolder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
//FileOutputStream outStream = null;
/*try {
//outStream = new FileOutputStream(String.format(
//"/sdcard/%d.jpg", System.currentTimeMillis()));
//outStream.write(data);
//outStream.close();
Log.d(TAG, "onPreviewFrame - wrote bytes: "
+ data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}*/
//CamLayer.this.invalidate();
}
});
}
catch (Throwable e){ Log.w("TAG,", "failed create surface !?!?"); }
}
public void draw(Canvas canvas) {
super.draw(canvas);
Paint p = new Paint(Color.RED);
Log.d(TAG, "draw");
canvas.drawText("PREVIEW", canvas.getWidth() / 2,
canvas.getHeight() / 2, p);
}
}
This thread says raw is not supported
https://groups.google.com/forum/?fromgroups#!topic/android-developers/D43AdrbP9oE
A raw image would consume too much memory is my guess. Other than that, I'm also disappointed its not supported.

Categories

Resources