I found some tutorials to use the camera. There are no errors on the emulator, but it crashes on the device, why? I'm just creating simple code to preview the picture.
The device I'm testing is the HTC Wildfire Android 2.2
Mainclass
private Camera camera;
private boolean isPreviewRunning = false;
private SimpleDateFormat timeStampFormat = new SimpleDateFormat("yyyyMMddHHmmssSS");
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
public void onCreate(Bundle icicle)
{
super.onCreate(icicle);
Log.e(getClass().getSimpleName(), "onCreate");
getWindow().setFormat(PixelFormat.TRANSLUCENT);
setContentView(R.layout.main);
surfaceView = (SurfaceView)findViewById(R.id.surface);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
protected void onRestoreInstanceState(Bundle savedInstanceState)
{
super.onRestoreInstanceState(savedInstanceState);
}
Camera.PictureCallback mPictureCallbackRaw = new Camera.PictureCallback()
{
public void onPictureTaken(byte[] data, Camera c)
{
Log.e(getClass().getSimpleName(), "PICTURE CALLBACK RAW: " + data);
camera.startPreview();
}
};
Camera.PictureCallback mPictureCallbackJpeg= new Camera.PictureCallback()
{
public void onPictureTaken(byte[] data, Camera c)
{
Log.e(getClass().getSimpleName(), "PICTURE CALLBACK JPEG: data.length = " + data);
}
};
Camera.ShutterCallback mShutterCallback = new Camera.ShutterCallback()
{
public void onShutter()
{
Log.e(getClass().getSimpleName(), "SHUTTER CALLBACK");
}
};
public boolean onKeyDown(int keyCode, KeyEvent event)
{
ImageCaptureCallback iccb = null;
if(keyCode == KeyEvent.KEYCODE_DPAD_CENTER)
{
try
{
String filename = timeStampFormat.format(new Date());
ContentValues values = new ContentValues();
values.put(Media.TITLE, filename);
values.put(Media.DESCRIPTION, "Image capture by camera");
Uri uri = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, values);
//String filename = timeStampFormat.format(new Date());
iccb = new ImageCaptureCallback( getContentResolver().openOutputStream(uri));
}
catch(Exception ex )
{
ex.printStackTrace();
Log.e(getClass().getSimpleName(), ex.getMessage(), ex);
}
}
if (keyCode == KeyEvent.KEYCODE_BACK)
{
return super.onKeyDown(keyCode, event);
}
if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER)
{
camera.takePicture(mShutterCallback, mPictureCallbackRaw, iccb);
return true;
}
return false;
}
protected void onResume()
{
Log.e(getClass().getSimpleName(), "onResume");
super.onResume();
}
protected void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
}
protected void onStop()
{
Log.e(getClass().getSimpleName(), "onStop");
super.onStop();
}
public void surfaceCreated(SurfaceHolder holder)
{
Log.e(getClass().getSimpleName(), "surfaceCreated");
camera = Camera.open();
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h)
{
Log.e(getClass().getSimpleName(), "surfaceChanged");
if (isPreviewRunning)
{
camera.stopPreview();
}
Camera.Parameters p = camera.getParameters();
p.setPreviewSize(w, h);
camera.setParameters(p);
try
{
camera.setPreviewDisplay(holder);
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
camera.startPreview();
isPreviewRunning = true;
}
public void surfaceDestroyed(SurfaceHolder holder)
{
Log.e(getClass().getSimpleName(), "surfaceDestroyed");
camera.stopPreview();
isPreviewRunning = false;
camera.release();
}
}
ImageCaptureCallBack
private OutputStream filoutputStream;
public ImageCaptureCallback(OutputStream filoutputStream)
{
this.filoutputStream = filoutputStream;
}
#Override
public void onPictureTaken(byte[] data, Camera camera)
{
try
{
Log.v(getClass().getSimpleName(), "onPictureTaken=" + data + " length = " + data.length);
filoutputStream.write(data);
filoutputStream.flush();
filoutputStream.close();
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
}
and the manifest
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.photo.test"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk android:minSdkVersion="8" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<application
android:icon="#drawable/ic_launcher"
android:label="#string/app_name" >
<activity
android:label="#string/app_name"
android:name=".Main" >
<intent-filter >
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
camera preview is working very differently on different devices. We had a lot of issues in mixare augmented reality engine.
First, it's better to use a compatibility class to let your code work on older devices. You can see the code here:
https://github.com/mixare/mixare/blob/master/src/org/mixare/Compatibility.java
Then, as you can see here, it's your responsibility to choose the correct one among the list of the previews provided by the system:
https://github.com/mixare/mixare/blob/master/src/org/mixare/MixView.java at line 871 and ongoing.
List<Camera.Size> supportedSizes = null;
//On older devices (<1.6) the following will fail
//the camera will work nevertheless
supportedSizes = Compatibility.getSupportedPreviewSizes(parameters);
//preview form factor
float ff = (float)w/h;
Log.d("Mixare", "Screen res: w:"+ w + " h:" + h + " aspect ratio:" + ff);
//holder for the best form factor and size
float bff = 0;
int bestw = 0;
int besth = 0;
Iterator<Camera.Size> itr = supportedSizes.iterator();
//we look for the best preview size, it has to be the closest to the
//screen form factor, and be less wide than the screen itself
//the latter requirement is because the HTC Hero with update 2.1 will
//report camera preview sizes larger than the screen, and it will fail
//to initialize the camera
//other devices could work with previews larger than the screen though
while(itr.hasNext()) {
Camera.Size element = itr.next();
//current form factor
float cff = (float)element.width/element.height;
//check if the current element is a candidate to replace the best match so far
//current form factor should be closer to the bff
//preview width should be less than screen width
//preview width should be more than current bestw
//this combination will ensure that the highest resolution will win
Log.d("Mixare", "Candidate camera element: w:"+ element.width + " h:" + element.height + " aspect ratio:" + cff);
if ((ff-cff <= ff-bff) && (element.width <= w) && (element.width >= bestw)) {
bff=cff;
bestw = element.width;
besth = element.height;
}
}
Log.d("Mixare", "Chosen camera element: w:"+ bestw + " h:" + besth + " aspect ratio:" + bff);
//Some Samsung phones will end up with bestw and besth = 0 because their minimum preview size is bigger then the screen size.
//In this case, we use the default values: 480x320
if ((bestw == 0) || (besth == 0)){
Log.d("Mixare", "Using default camera parameters!");
bestw = 480;
besth = 320;
}
parameters.setPreviewSize(bestw, besth);
HTH
Daniele
Related
I'm using FFmpegFrameRecorder for video broadcast.Problem is audio comes faster than video frame.I'm using following code but unable to produce complete video there is problem in audio video timestamp.
Java Code:
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
import java.io.IOException;
import java.nio.ShortBuffer;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
public class MainActivity extends Activity implements OnClickListener {
private final static String LOG_TAG = "MainActivity";
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "";
private volatile FFmpegFrameRecorder recorder;
boolean recording = false;
long startTime = 0;
private int sampleAudioRateInHz = 16000;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 24;
private Thread audioThread;
volatile boolean runAudioThread = true;
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private CameraView cameraView;
private IplImage yuvIplimage = null;
private Button recordButton;
private LinearLayout mainLayout;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_main);
initLayout();
initRecorder();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
LOG_TAG);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
}
private void initLayout() {
mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);
recordButton = (Button) findViewById(R.id.recorder_control);
recordButton.setText("Start");
recordButton.setOnClickListener(this);
cameraView = new CameraView(this);
LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
mainLayout.addView(cameraView, layoutParam);
Log.v(LOG_TAG, "added cameraView to mainLayout");
}
private void initRecorder() {
Log.w(LOG_TAG, "initRecorder");
if (yuvIplimage == null) {
// Recreated after frame size is set in surface change method
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 2);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);
Log.v(LOG_TAG, "IplImage.create");
}
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth,
imageHeight, 1);
Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: "
+ imageWidth + " imageHeight " + imageHeight);
recorder.setFormat("flv");
Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");
recorder.setSampleRate(sampleAudioRateInHz);
Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");
// re-set in the surface changed method as well
recorder.setFrameRate(frameRate);
Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");
// Create audio recording thread
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
// Start the capture
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
// This should stop the audio thread from running
runAudioThread = false;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Quit when back button is pushed
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
recordButton.setText("Stop");
} else {
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
recordButton.setText("Start");
}
}
// ---------------------------------------------
// audio thread, gets and encodes audio data
// ---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
// Set the thread priority
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
// Audio Capture/Encoding Loop
while (runAudioThread) {
// Read from audioRecord
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
// Log.v(LOG_TAG,"audioRecord bufferReadResult: " +
// bufferReadResult);
// Changes in this variable may not be picked up despite it
// being "volatile"
if (recording) {
try {
// Write to FFmpegFrameRecorder
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished");
/* Capture/Encoding finished, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
class CameraView extends SurfaceView implements SurfaceHolder.Callback,
PreviewCallback {
private boolean previewRunning = false;
private SurfaceHolder holder;
private Camera camera;
private byte[] previewBuffer;
long videoTimestamp = 0;
Bitmap bitmap;
Canvas canvas;
public CameraView(Context _context) {
super(_context);
holder = this.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(this);
Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: "
+ currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);
// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();
bitmap = Bitmap.createBitmap(imageWidth, imageHeight,
Bitmap.Config.ALPHA_8);
/*
* Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth *
* imageHeight *
* ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat
* ())/8); previewBuffer = new byte[imageWidth * imageHeight *
* ImageFormat
* .getBitsPerPixel(currentParams.getPreviewFormat())/8];
* camera.addCallbackBuffer(previewBuffer);
* camera.setPreviewCallbackWithBuffer(this);
*/
camera.startPreview();
previewRunning = true;
} catch (IOException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOG_TAG, "Surface Changed: width " + width + " height: "
+ height);
// We would do this if we want to reset the camera parameters
/*
* if (!recording) { if (previewRunning){ camera.stopPreview(); }
*
* try { //Camera.Parameters cameraParameters =
* camera.getParameters(); //p.setPreviewSize(imageWidth,
* imageHeight); //p.setPreviewFrameRate(frameRate);
* //camera.setParameters(cameraParameters);
*
* camera.setPreviewDisplay(holder); camera.startPreview();
* previewRunning = true; } catch (IOException e) {
* Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } }
*/
// Get the current parameters
Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: " + currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);
// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();
// Create the yuvIplimage if needed
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 1);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
camera.setPreviewCallback(null);
previewRunning = false;
camera.release();
} catch (RuntimeException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (yuvIplimage != null && recording) {
videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
// Put the camera preview frame right into the yuvIplimage
// object
System.out.println("value of data=============" + data);
yuvIplimage.getByteBuffer().put(data);
// FAQ about IplImage:
// - For custom raw processing of data, getByteBuffer() returns
// an NIO direct
// buffer wrapped around the memory pointed by imageData, and
// under Android we can
// also use that Buffer with Bitmap.copyPixelsFromBuffer() and
// copyPixelsToBuffer().
// - To get a BufferedImage from an IplImage, we may call
// getBufferedImage().
// - The createFrom() factory method can construct an IplImage
// from a BufferedImage.
// - There are also a few copy*() methods for
// BufferedImage<->IplImage data transfers.
// Let's try it..
// This works but only on transparency
// Need to find the right Bitmap and IplImage matching types
/*
* bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
* //bitmap.setPixel(10,10,Color.MAGENTA);
*
* canvas = new Canvas(bitmap); Paint paint = new Paint();
* paint.setColor(Color.GREEN); float leftx = 20; float topy =
* 20; float rightx = 50; float bottomy = 100; RectF rectangle =
* new RectF(leftx,topy,rightx,bottomy);
* canvas.drawRect(rectangle, paint);
*
* bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
*/
// Log.v(LOG_TAG,"Writing Frame");
try {
// Get the correct time
recorder.setTimestamp(videoTimestamp);
// Record the image into FFmpegFrameRecorder
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Manifest
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.javacv.stream.test2"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="15" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application
android:icon="#drawable/ic_launcher"
android:label="#string/app_name"
android:theme="#style/AppTheme" >
<activity
android:name="com.example.javacv.stream.test2.MainActivity"
android:label="#string/title_activity_main" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
This is a known issue we battled with on the iOS side as well.. Basically the packets for video get dropped while the audio keeps going, and it all goes to hell. Some hardware under low bandwidth just doesn't play nice and goes out of sync.. I don't believe there is a solid fix, we had to hack it by building our own buffer on top for the audio/video and re-synchronizing using timestamps, frame sizes, and packet counts.
Afraid I can't post that code (it's not mine to post) but if you know the protocol, shouldn't be hard to recreate..
I need to increase fps rate in my app. Now i have between 6 - 10 FPS which is very low imo because i have Nexus4. So i decided to switch from setPreviewCallback to setPreviewCallbackWithBuffer but i do not see ANY difference in framerate. In log i can see that addressess of buffers are changing circular but still i have only 6 - 10 fps (sometimes 12 but rarely)...
Could you try to give me some advices? BTW. I have Android 4.4 and i tried OpenCV but there result is almost the same... My code is:
public class XPreview extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
long mTimer = 0;
public XPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
}
public void surfaceCreated(SurfaceHolder holder) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
try {
int formatt = mCamera.getParameters().getPreviewFormat();
int bytesPerPx = ImageFormat.getBitsPerPixel( formatt );
int width = mCamera.getParameters().getPreviewSize().width;
int height = mCamera.getParameters().getPreviewSize().height;
int size = (int)( ( width * height * bytesPerPx ) / 8.0);
Parameters params = mCamera.getParameters();
mCamera.setParameters(params);
Log.d(TAG, "Data: " + formatt + " " + bytesPerPx + " " + width + "x" + height + " " + size );
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.addCallbackBuffer( new byte[size] );
mCamera.startPreview();
} catch (Exception e){
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
long time = System.currentTimeMillis();
Log.d( TAG, "Time between frames: " + ( time - mTimer ) + "ms, FPS: " + ( 1000.0 / (time - mTimer )) + ", data " + data );
mTimer = time;
camera.addCallbackBuffer(data);
}
}
Activity:
public class RTP extends Activity {
private Camera mCamera;
private XPreview mPreview;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mCamera = Camera.open();
mPreview = new XPreview(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById( R.id.frameLayout1 );
preview.addView(mPreview);
}
#Override
public void onPause(){
super.onPause();
if( mCamera != null ){
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
}
Edit:
Parameters params = mCamera.getParameters();
params.setRecordingHint(true);
mCamera.setParameters(params);
This decreased delay between frames to about ~30ms with version with buffers and decresed delay to about 60 with version without buffer. Anyway i'd be glad if someone could give me some more advices. A bit deeper tests shows that now i have between 24 and 31 fps using version with buffering and two buffers and between 15 and 22 using default version.
Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
Questions concerning problems with code you've written must describe the specific problem — and include valid code to reproduce it — in the question itself. See SSCCE.org for guidance.
Closed 9 years ago.
Improve this question
I don't know that I'm doing wrong, everyrthings seems to be ok, but pictures keep overwritting, how can I change the code in order pictures get tdiferents names each time, even if i don't close the application???
public class MirrorActivity extends Activity implements PictureCallback {
private final static String DEBUG_TAG = "MirrorActivity";
private Camera mCam;
private MirrorView mCamPreview;
private int mCameraId = 0;
private FrameLayout mPreviewLayout;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// do we have a camera?
if (!getPackageManager()
.hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
Toast.makeText(this, "No camera feature on this device",
Toast.LENGTH_LONG).show();
} else {
mCameraId = findFirstFrontFacingCamera();
if (mCameraId >= 0) {
mPreviewLayout = (FrameLayout) findViewById(R.id.camPreview);
mPreviewLayout.removeAllViews();
startCameraInLayout(mPreviewLayout, mCameraId);
Button takePic = (Button) findViewById(R.id.capture);
takePic.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCam.takePicture(null, null, MirrorActivity.this);
}
});
} else {
Toast.makeText(this, "No front facing camera found.",
Toast.LENGTH_LONG).show();
}
}
}
#SuppressLint("NewApi")
private int findFirstFrontFacingCamera() {
int foundId = -1;
// find the first front facing camera
int numCams = Camera.getNumberOfCameras();
for (int camId = 0; camId < numCams; camId++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(camId, info);
if (info.facing == CameraInfo.CAMERA_FACING_BACK) {
Log.d(DEBUG_TAG, "Found back facing camera");
foundId = camId;
break;
}
}
return foundId;
}
private void startCameraInLayout(FrameLayout layout, int cameraId) {
// TODO pull this out of the UI thread.
mCam = Camera.open(cameraId);
if (mCam != null) {
mCamPreview = new MirrorView(this, mCam);
layout.addView(mCamPreview);
}
}
#Override
protected void onResume() {
super.onResume();
if (mCam == null && mPreviewLayout != null) {
mPreviewLayout.removeAllViews();
startCameraInLayout(mPreviewLayout, mCameraId);
}
}
#Override
protected void onPause() {
if (mCam != null) {
mCam.release();
mCam = null;
}
super.onPause();
}
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFileDir = new File(
Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES),
"PhotoGalleryNobattery");
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Log.d(DEBUG_TAG, "Can't create directory to save image");
Toast.makeText(this, "Can't make path to save pic.",
Toast.LENGTH_LONG).show();
return;
}
String filename = pictureFileDir.getPath() + File.separator
+ "Picture.jpg";
File pictureFile = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
Toast.makeText(this, "Image saved as Picture.jpg",
Toast.LENGTH_LONG).show();
} catch (Exception error) {
Log.d(DEBUG_TAG, "File not saved: " + error.getMessage());
Toast.makeText(this, "Can't save image.", Toast.LENGTH_LONG).show();
Intent intent =new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
intent.setData(Uri.fromFile(pictureFile));
sendBroadcast(intent);
}
}
public class MirrorView extends SurfaceView implements
SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public MirrorView(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (Exception error) {
Log.d(DEBUG_TAG,
"Error starting mPreviewLayout: " + error.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
if (mHolder.getSurface() == null) {
return;
}
// can't make changes while mPreviewLayout is active
try {
mCamera.stopPreview();
} catch (Exception e) {
}
try {
// set rotation to match device orientation
setCameraDisplayOrientationAndSize();
// start up the mPreviewLayout
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception error) {
Log.d(DEBUG_TAG,
"Error starting mPreviewLayout: " + error.getMessage());
}
}
public void setCameraDisplayOrientationAndSize() {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(mCameraId, info);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
int degrees = rotation * 90;
/*
* // the above is just a shorter way of doing this, but could break
* if the values change switch (rotation) { case Surface.ROTATION_0:
* degrees = 0; break; case Surface.ROTATION_90: degrees = 90;
* break; case Surface.ROTATION_180: degrees = 180; break; case
* Surface.ROTATION_270: degrees = 270; break; }
*/
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360;
} else {
result = (info.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(result);
Camera.Size previewSize = mCam.getParameters().getPreviewSize();
if (result == 90 || result == 270) {
// swap - the physical camera itself doesn't rotate in relation
// to the screen ;)
mHolder.setFixedSize(previewSize.height, previewSize.width);
} else {
mHolder.setFixedSize(previewSize.width, previewSize.height);
}
}
}
I believe its because you are always using the same filename. See following part of your code inside onPictureTaken:
String filename = pictureFileDir.getPath() + File.separator
+ "Picture.jpg";
In onPictureTaken pass an additional parameter filename that has a different value each time you call that function. It can be currentDate_CuentTimeInMilliSeconds.jpg.
I'm getting an error startPreview failed but not all devices.
In Motorola RAZR and Samsung Galaxy S3, it's working very well.
Some people told me they got the same problem in other devices (Galaxy SII Lite, Galaxy Ace Duos, Samsung Galaxy Y, etc)
I'm trying to test in a Samsung Galaxy Y and here's what I got in Logcat
java.lang.RuntimeException: startPreview failed
at android.hardware.Camera.startPreview(Native Method)
at br.com.timo.tubagram.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
at android.view.SurfaceView.updateWindow(SurfaceView.java:601)
at android.view.SurfaceView.updateWindow(SurfaceView.java:413)
at android.view.SurfaceView.dispatchDraw(SurfaceView.java:358)
at android.view.View.draw(View.java:7083)
at android.view.SurfaceView.draw(SurfaceView.java:344)
at android.view.View.buildDrawingCache(View.java:6842)
at android.view.View.getDrawingCache(View.java:6628)
at android.view.ViewGroup.drawChild(ViewGroup.java:1571)
at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1373)
at android.view.View.draw(View.java:7083)
at android.widget.FrameLayout.draw(FrameLayout.java:357)
at android.view.ViewGroup.drawChild(ViewGroup.java:1646)
at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1373)
at android.view.ViewGroup.drawChild(ViewGroup.java:1644)
at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1373)
at android.view.ViewGroup.drawChild(ViewGroup.java:1644)
at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1373)
at android.view.View.draw(View.java:7083)
at android.widget.FrameLayout.draw(FrameLayout.java:357)
at com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:2108)
at android.view.ViewRoot.draw(ViewRoot.java:1540)
at android.view.ViewRoot.performTraversals(ViewRoot.java:1276)
at android.view.ViewRoot.handleMessage(ViewRoot.java:1878)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:130)
at android.app.ActivityThread.main(ActivityThread.java:3770)
at java.lang.reflect.Method.invokeNative(Native Method)
at java.lang.reflect.Method.invoke(Method.java:507)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:912)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:670)
at dalvik.system.NativeStart.main(Native Method)
And here is my code
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback{
private SurfaceHolder holder;
private Camera camera;
private Camera.Parameters parameters;
boolean front = false;
public CameraSurfaceView(Context context) {
super(context);
this.holder = this.getHolder();
this.holder.addCallback(this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
this.holder = holder;
camera = Camera.open();
camera.setPreviewDisplay(holder);
parameters = parametrosCamera();
camera.setParameters(parameters);
camera.setDisplayOrientation(90);
camera.startPreview();
} catch (IOException ioe) {
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
camera.stopPreview();
camera.release();
camera = null;
}
private Parameters parametrosCamera(){
Parameters parameters = camera.getParameters();
List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
if (sizes != null){
Size min = sizes.get(0);
for (Size size : sizes){
if (size.width < min.width){
min = size;
}else{
parameters.setPreviewSize(min.width, min.height);
parameters.setPictureSize(min.width, min.height);
}
}
parameters.set("orientation", "portrait");
parameters.setRotation(90);
}
if (parameters.getFlashMode() != null){
parameters.setFlashMode(Parameters.FLASH_MODE_AUTO);
}
return parameters;
}
}
And my PrincipalActivity
public class PrincipalActivity extends Activity{
Camera camera;
File sdImageMainDirectory;
CameraSurfaceView cameraSurfaceView;
Button principalActivity_bt_TirarFoto;
Button principalActivity_bt_VirarFoto;
Button principalActivity_bt_Aceitar;
Button principalActivity_bt_Cancelar;
FrameLayout preview;
ImageView principalActivity_iv_UltimaFoto;
HandlePictureStorage hps;
int wid = 0;
boolean imageSelected = false;
boolean front = false;
String caminhoImagens;
String url;
// File storagePath = new File(Environment.getExternalStorageDirectory() + "/Tubagram/");
#SuppressWarnings("deprecation")
#SuppressLint("NewApi")
public void onCreate(Bundle savedInstanceState) {
Log.i("PrincipalActivity","onCreate");
super.onCreate(savedInstanceState);
setContentView(R.layout.principal_activity);
principalActivity_bt_TirarFoto = (Button) findViewById(R.id.principalActivity_bt_TirarFoto);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
cameraSurfaceView = new CameraSurfaceView(principalActivity_bt_TirarFoto.getContext());
cameraSurfaceView.setSoundEffectsEnabled(true);
cameraSurfaceView.setDrawingCacheEnabled(true);
preview = (FrameLayout) findViewById(R.id.principalActivity_fl_Camera);
preview.addView(cameraSurfaceView);
principalActivity_bt_TirarFoto.setSoundEffectsEnabled(true);
principalActivity_bt_TirarFoto.setOnClickListener(new OnClickListener() {
#SuppressLint("NewApi")
#Override
public void onClick(View v) {
Log.i("PrincipalActivity","onClick - TirarFoto");
camera = cameraSurfaceView.getCamera();
camera.takePicture(new ShutterCallback() {
#Override
public void onShutter() {
AudioManager mgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
mgr.playSoundEffect(AudioManager.FLAG_PLAY_SOUND);
}
}, null, hps = new HandlePictureStorage());
imageSelected = false;
mostrarBotoesConfirma();
}
});
principalActivity_bt_Aceitar = (Button) findViewById(R.id.principalActivity_bt_Aceitar);
principalActivity_bt_Aceitar.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.i("PrincipalActivity","onClick - Aceitar");
if (verificaInstagram()){
ByteArrayOutputStream stream = new ByteArrayOutputStream();
if (imageSelected){
Bitmap bitmap = ((BitmapDrawable)cameraSurfaceView.getBackground()).getBitmap();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
}else{
hps.getBitmap().compress(Bitmap.CompressFormat.PNG, 100, stream);
}
salvarImagemSelecionada(stream.toByteArray());
Intent shareIntent = new Intent(android.content.Intent.ACTION_SEND);
shareIntent.setType("image/*");
caminhoImagens = getRealPathFromURI(Uri.parse(url));
Log.i("Caminho imagem: ", caminhoImagens);
shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.parse("file://" + caminhoImagens));
shareIntent.setPackage("com.instagram.android");
startActivity(shareIntent);
// }
}else{
Toast.makeText(v.getContext(), "Você não possui o Instagram no seu smartphone!", Toast.LENGTH_SHORT).show();
}
}
});
principalActivity_bt_Cancelar = (Button) findViewById(R.id.principalActivity_bt_Cancelar);
principalActivity_bt_Cancelar.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.i("PrincipalActivity","onClick - Cancelar");
esconderBotoesConfirma();
cameraSurfaceView.setBackgroundColor(Color.TRANSPARENT);
cameraSurfaceView.voltarCamera();
}
});
int qtdCameras = Camera.getNumberOfCameras();
principalActivity_bt_VirarFoto = (Button) findViewById(R.id.principalActivity_bt_VirarFoto);
if(qtdCameras > 1){
principalActivity_bt_VirarFoto.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
Log.i("PrincipalActivity","onClick - VirarCamera");
Thread t = new Thread(){
#Override
public void run() {
cameraSurfaceView.flipit();
front = cameraSurfaceView.getFront();
}
};
t.start();
}
});
}else{
principalActivity_bt_VirarFoto.setVisibility(View.INVISIBLE);
}
LinearLayout principalActivity_ll_Molduras = (LinearLayout) findViewById(R.id.principalActivity_ll_Molduras);
principalActivity_ll_Molduras.setVisibility(View.VISIBLE);
List<Integer> listaMoldurasMenores = new ArrayList<Integer>();
final List<Integer> listaMoldurasMaiores = new ArrayList<Integer>();
// listaMoldurasMenores.add(R.drawable.moldura_menor0);
listaMoldurasMenores.add(R.drawable.moldura_menor1);
listaMoldurasMenores.add(R.drawable.moldura_menor2);
listaMoldurasMenores.add(R.drawable.moldura_menor3);
listaMoldurasMenores.add(R.drawable.moldura_menor4);
listaMoldurasMenores.add(R.drawable.moldura_menor5);
listaMoldurasMenores.add(R.drawable.moldura_menor6);
listaMoldurasMaiores.add(R.drawable.moldura_maior1);
listaMoldurasMaiores.add(R.drawable.moldura_maior2);
listaMoldurasMaiores.add(R.drawable.moldura_maior3);
listaMoldurasMaiores.add(R.drawable.moldura_maior4);
listaMoldurasMaiores.add(R.drawable.moldura_maior5);
listaMoldurasMaiores.add(R.drawable.moldura_maior6);
for (int i = 0; i < listaMoldurasMenores.size(); i++) {
final ImageView imagem_moldura = new ImageView(this);
imagem_moldura.setScaleType(ScaleType.FIT_XY);
imagem_moldura.setId(i);
imagem_moldura.setImageResource(listaMoldurasMenores.get(i));
imagem_moldura.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.i("PrincipalActivity","onClick - Molduras");
ImageView imagem_moldura_maior = new ImageView(v.getContext());
imagem_moldura_maior.setImageResource(listaMoldurasMaiores.get(imagem_moldura.getId()));
preview.setForeground(imagem_moldura_maior.getDrawable());
}
});
principalActivity_ll_Molduras.addView(imagem_moldura,i);
}
principalActivity_iv_UltimaFoto = (ImageView) findViewById(R.id.principalActivity_iv_UltimaFoto);
principalActivity_iv_UltimaFoto.setAdjustViewBounds(false);
Uri uri = buscaUltimaFotoAlbum();
Drawable backgroundGaleria;
if (uri != null){
String caminhosImagensGaleria = getRealPathFromURI(uri);
backgroundGaleria = Drawable.createFromPath(caminhosImagensGaleria);
principalActivity_iv_UltimaFoto.setBackgroundDrawable(backgroundGaleria);
}else{
principalActivity_iv_UltimaFoto.setBackgroundResource(R.drawable.box_imagem_album);
}
principalActivity_iv_UltimaFoto.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_PICK,android.provider.MediaStore.Images.Media.INTERNAL_CONTENT_URI);
final int ACTION_SELECT_IMAGE = 1;
intent.putExtra("crop", "true");
intent.putExtra("aspectX", 10);
intent.putExtra("aspectY", 10);
intent.putExtra("outputX", 256);
intent.putExtra("outputY", 256);
intent.putExtra("scale", true);
intent.putExtra("return-data", true);
startActivityForResult(intent,ACTION_SELECT_IMAGE);
}
});
}
EDIT 1 - 06/20/2013
I change a little my code and now runs in my Galaxy Y but my FrameLayout with the camera isn't working. I getting a black screen on FrameLayout.
Anyone knows why?
What I change:
I added this line in my constructor
this.holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
I changed my surfaceCreated and surfaceChanged to this
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
try {
camera.stopPreview();
parameters = parametrosCamera();
camera.setParameters(parameters);
camera.setDisplayOrientation(90);
camera.setPreviewDisplay(holder);
camera.startPreview();
}
catch (IOException e) {
}
catch (Exception e){
}
}
#SuppressLint("NewApi")
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera = Camera.open(0);
camera.setPreviewDisplay(this.holder);
camera.startPreview();
} catch (IOException ioe) {
} catch (Exception e){
}
}
P.S.
I added some Log's to get the size my setPreviewSize is getting with my method
private Parameters parametersCamera(){
Parameters parameters = camera.getParameters();
List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
if (sizes != null){
Size min = sizes.get(0);
for (Size size : sizes){
if (size.width < min.width){
min = size;
}else{
parameters.setPreviewSize(min.width, min.height);
parameters.setPictureSize(min.width, min.height);
Log.i("Ultima Camera Width: " + min.width, "Ultima Camera Height: " + min.height);
}
}
parameters.set("orientation", "portrait");
parameters.setRotation(90);
}
if (parameters.getFlashMode() != null){
parameters.setFlashMode(Parameters.FLASH_MODE_AUTO);
}
return parameters;
}
and I added a Log in surfaceChanged to get the width and height (I don't use this width and height) and see if is the same and I get this result's
Motorola RAZR HD (works verywell)
surfaceChanged: 540 x 573
method parametrosCamera: 640 x 480
Samsung Galaxy Y (don't show my preview)
surfaceChanged: 240 x 162 (W x H)
method parametrosCamera: 320 x 240
and in my method parametersCamera I use these two lines
parameters.set("orientation", "portrait");
parameters.setRotation(90);
I think the answer to my question is in this part of my code.
You seem to be looking for the minimal supported preview size. But if this size is not supported as picture size, your code will initialize the camera into an unsupported state. Some devices may choose to fail to open preview in such state.
I am not sure you really need picture size to be equal to preview frame size. If you don't, you can simply iterate separately on the values from parameters.getSupportedPictureSizes().
Another common mistake (not necessarily applicable to your use case) is rooted in that the sizes for rear-facing camera do not match those of the front-facing camera. Therefore, the size must always be recalculated when you switch the camera.
Question isnt using camera manager related library, so it isnt useful to question poster (its ancient I assume he is already fixed or dont care anymore to issue), but this answer may help or fix on user that facing issue startpreview error only on some various devices.
Causes
After deep debug, I found that camera manager library try to use largest suitable resolution if no exact size match.
As in CameraConfiguration.findBestPreviewSizeValue() :
// If no exact match, use largest preview size. This was not a great
// idea on older devices because
// of the additional computation needed. We're likely to get here on
// newer Android 4+ devices, where
// the CPU is much more powerful.
if (!supportedPreviewSizes.isEmpty()) {
Camera.Size largestPreview = supportedPreviewSizes.get(0);
Point largestSize = new Point(largestPreview.width, largestPreview.height);
Log.i(TAG, "Using largest suitable preview size: " + largestSize);
return largestSize;
}
This is good on the device's manufacture is properly set on the supported preview sizes.
But thing sometime happens! Device may report ridiculous high resolution which overwhelming the device on startPreview(), which an internal driver error occur 'preview timeout', only visible on catlog.
Lets fix it!
Use closest resolution to the screen instead!
Modifiying CameraConfiguration.java or maybe inside CameraConfigurationUtils.java:
... Skip lines until you find next pattern (take note that your source may be a slightly different!)
+ Add lines
- Remove lines
...
...
public static Point findBestPreviewSizeValue(Camera.Parameters parameters, Point screenResolution) {
...
...
// Find a suitable size, with max resolution
int maxResolution = 0;
Camera.Size maxResPreviewSize = null;
+ double closestResolutionDrift = 1024*1024;
+ Camera.Size closestResolution = null;
for (Camera.Size size : rawSupportedSizes) {
...
...
Log.i(TAG, "Found preview size exactly matching screen size: " + exactPoint);
return exactPoint;
}
+
+ double drift = (maybeFlippedWidth * maybeFlippedHeight)-(screenResolution.x * screenResolution.y);
+ if (drift < closestResolutionDrift && resolution>(screenResolution.x * screenResolution.y)) {
+ closestResolutionDrift = drift;
+ closestResolution = size;
+ }
...
...
//If no exact match, use largest preview size. This was not a great idea on older devices because
//of the additional computation needed. We're likely to get here on newer Android 4+ devices, where
//the CPU is much more powerful.
- if (maxResPreviewSize != null) {
- Point largestSize = new Point(maxResPreviewSize.width, maxResPreviewSize.height);
- Log.i(TAG, "Using largest suitable preview size: " + largestSize);
- return largestSize;
- }
+ //if (maxResPreviewSize != null) {
+ // Point largestSize = new Point(maxResPreviewSize.width, maxResPreviewSize.height);
+ // Log.i(TAG, "Using largest suitable preview size: " + largestSize);
+ // return largestSize;
+ //}
+
+ // Dont! Some low end device still report ridiculous high resolution which overwhelming startPreview!
+ if(closestResolution!=null){
+ Point closestSize = new Point(closestResolution.width, closestResolution.height);
+ Log.i(TAG, "Using closest suitable preview size: " + closestSize);
+ return closestSize;
+ }
I used the following code to capture image. Everything works fine, when i capture the image it has 2592x1944 size and the image is captured in landscape mode. Now i want to capture the image with the size of 534x534. I changed this parameter values params.setPictureSize(534, 534); Nothing will change. How can i do this. Thanks in advance.
DgCamActivity.java
public class DgCamActivity extends Activity implements SensorEventListener {
private Camera mCamera;
private CameraPreview mPreview;
private SensorManager sensorManager = null;
private int orientation;
private ExifInterface exif;
private int deviceHeight;
private Button ibRetake;
private Button ibUse;
private Button ibCapture;
// private FrameLayout flBtnContainer;
private File sdRoot;
private String dir;
private String fileName;
// private ImageView rotatingImage;
private int degrees = -1;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.test);
// Setting all the path for the image
sdRoot = Environment.getExternalStorageDirectory();
dir = "/SimpleCamera/";
// Getting all the needed elements from the layout
// rotatingImage = (ImageView) findViewById(R.id.imageView1);
ibRetake = (Button) findViewById(R.id.ibRetake);
ibUse = (Button) findViewById(R.id.ibUse);
ibCapture = (Button) findViewById(R.id.ibCapture);
// flBtnContainer = (FrameLayout) findViewById(R.id.flBtnContainer);
// Getting the sensor service.
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
// Selecting the resolution of the Android device so we can create a
// proportional preview
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
deviceHeight = display.getHeight();
// Add a listener to the Capture button
ibCapture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
// Add a listener to the Retake button
ibRetake.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Deleting the image from the SD card/
File discardedPhoto = new File(sdRoot, dir + fileName);
discardedPhoto.delete();
// Restart the camera preview.
mCamera.startPreview();
// Reorganize the buttons on the screen
// flBtnContainer.setVisibility(LinearLayout.VISIBLE);
ibRetake.setVisibility(LinearLayout.GONE);
ibUse.setVisibility(LinearLayout.GONE);
}
});
// Add a listener to the Use button
ibUse.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// Everything is saved so we can quit the app.
finish();
}
});
}
private void createCamera() {
// Create an instance of Camera
mCamera = getCameraInstance();
// Setting the right parameters in the camera
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPictureSizes();
Log.v("SUPORTED SIZE IS>>>>>.", params.getSupportedPictureSizes() + "");
Log.v("SUPORTED SIZE IS>>>>>.", sizes.size() + "");
params.setPictureSize(1600, 1200);
params.setPictureFormat(PixelFormat.JPEG);
params.setJpegQuality(100);
mCamera.setParameters(params);
// Create our Preview view and set it as the content of our activity.
mPreview = new CameraPreview(this, mCamera);
RelativeLayout preview = (RelativeLayout) findViewById(R.id.camera_preview);
// Calculating the width of the preview so it is proportional.
float widthFloat = (float) (deviceHeight) * 4 / 3;
int width = Math.round(widthFloat);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
preview.setLayoutParams(layoutParams);
preview.addView(mPreview, 0);
}
#Override
protected void onResume() {
super.onResume();
createCamera();
sensorManager.registerListener(this,
sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
protected void onPause() {
super.onPause();
releaseCamera();
RelativeLayout preview = (RelativeLayout) findViewById(R.id.camera_preview);
preview.removeViewAt(0);
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
public static Camera getCameraInstance() {
Camera c = null;
try {
// attempt to get a Camera instance
c = Camera.open();
} catch (Exception e) {
// Camera is not available (in use or does not exist)
}
// returns null if camera is unavailable
return c;
}
private PictureCallback mPicture = new PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
ibRetake.setVisibility(View.VISIBLE);
ibUse.setVisibility(View.VISIBLE);
// File name of the image that we just took.
fileName = "IMG_"
+ new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(new Date()).toString() + ".jpg";
File mkDir = new File(sdRoot, dir);
mkDir.mkdirs();
// Main file where to save the data that we recive from the camera
File pictureFile = new File(sdRoot, dir + fileName);
try {
FileOutputStream purge = new FileOutputStream(pictureFile);
purge.write(data);
purge.close();
} catch (FileNotFoundException e) {
Log.d("DG_DEBUG", "File not found: " + e.getMessage());
} catch (IOException e) {
Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
}
try {
exif = new ExifInterface("/sdcard/" + dir + fileName);
exif.setAttribute(ExifInterface.TAG_ORIENTATION, ""
+ orientation);
exif.saveAttributes();
} catch (IOException e) {
e.printStackTrace();
}
}
};
/**
* Putting in place a listener so we can get the sensor data only when
* something changes.
*/
public void onSensorChanged(SensorEvent event) {
synchronized (this) {
if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
RotateAnimation animation = null;
if (event.values[0] < 4 && event.values[0] > -4) {
if (event.values[1] > 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_90) {
// UP
orientation = ExifInterface.ORIENTATION_ROTATE_90;
animation = getRotateAnimation(270);
degrees = 270;
} else if (event.values[1] < 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_270) {
// UP SIDE DOWN
orientation = ExifInterface.ORIENTATION_ROTATE_270;
animation = getRotateAnimation(90);
degrees = 90;
}
} else if (event.values[1] < 4 && event.values[1] > -4) {
if (event.values[0] > 0
&& orientation != ExifInterface.ORIENTATION_NORMAL) {
// LEFT
orientation = ExifInterface.ORIENTATION_NORMAL;
animation = getRotateAnimation(0);
degrees = 0;
} else if (event.values[0] < 0
&& orientation != ExifInterface.ORIENTATION_ROTATE_180) {
// RIGHT
orientation = ExifInterface.ORIENTATION_ROTATE_180;
animation = getRotateAnimation(180);
degrees = 180;
}
}
if (animation != null) {
// rotatingImage.startAnimation(animation);
}
}
}
}
/**
* Calculating the degrees needed to rotate the image imposed on the button
* so it is always facing the user in the right direction
*
* #param toDegrees
* #return
*/
private RotateAnimation getRotateAnimation(float toDegrees) {
float compensation = 0;
if (Math.abs(degrees - toDegrees) > 180) {
compensation = 360;
}
// When the device is being held on the left side (default position for
// a camera) we need to add, not subtract from the toDegrees.
if (toDegrees == 0) {
compensation = -compensation;
}
// Creating the animation and the RELATIVE_TO_SELF means that he image
// will rotate on it center instead of a corner.
RotateAnimation animation = new RotateAnimation(degrees, toDegrees
- compensation, Animation.RELATIVE_TO_SELF, 0.5f,
Animation.RELATIVE_TO_SELF, 0.5f);
// Adding the time needed to rotate the image
animation.setDuration(250);
// Set the animation to stop after reaching the desired position. With
// out this it would return to the original state.
animation.setFillAfter(true);
return animation;
}
/**
* STUFF THAT WE DON'T NEED BUT MUST BE HEAR FOR THE COMPILER TO BE HAPPY.
*/
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
CameraPreview.java
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mHolder.setSizeFromLayout();
mHolder.setFixedSize(100, 100);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, now tell the camera where to draw the
// preview.
try {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
Log.d("DG_DEBUG", "Error setting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// make any resize, rotate or reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d("DG_DEBUG", "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
}
test.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
<RelativeLayout
android:id="#+id/camera_preview"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_alignParentTop="true"
android:layout_centerHorizontal="true" >
</RelativeLayout>
<RelativeLayout
android:id="#+id/relativeLayout1"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="41dp" >
<Button
android:id="#+id/ibCapture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBottom="#+id/relativeLayout1"
android:layout_alignLeft="#+id/camera_preview"
android:text="Capture" />
<Button
android:id="#+id/ibRetake"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_marginLeft="36dp"
android:layout_toRightOf="#+id/ibCapture"
android:text="ReTake" />
<Button
android:id="#+id/ibUse"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_alignParentTop="true"
android:layout_marginRight="38dp"
android:text="Save" />
</RelativeLayout>
</RelativeLayout>
I believe Android will not allow for arbitrary image sizes when taking a picture, you should use the parameters.getSupportedPictureSizes() method to query the supported image sizes.
I suspect you would have to choose a big enough size to cut your desired 534x534 patch from. You could do this by using BitmapFactory methods to decode the picture that was taken and then use bitmap.getPixels() method to extract the desired patch size, or something like bitmap.createScaledBitmap() to scale your picture to the desired size.
After you have your correctly sized bitmap, you could just use bitmap.compress() to save your image, if that's the final format you are going for.