I am developing an application where i do some real time image processing. In my camera view I do the image processing inside the onPreviewFrame where i loop through each pixel and then find the sum Y value of each frame. I then save this into a csv file and everything works perfect. Except, i also want to store the time elapsed between each frame in the csv along with each y-sum.
ImageProcessing
public abstract class ImageProcessing {
public static int YUV420SPtoYSum(byte[] yuv420sp, int width, int height){
if(yuv420sp == null)
return 0;
int sum = 0;
final int ii = 0;
final int ij = 0;
final int di = +1;
final int dj = +1;
int y = 0;
for (int i = 0, ci = ii; i < height; ++i, ci += di) {
for (int j = 0, cj = ij; j < width; ++j, cj += dj) {
y = (0xff & ((int) yuv420sp[ci * width + cj]));
//y = y < 16 ? 16 : y;
sum += y;
}
}
return sum;
}
}
CameraView Class
public class CameraView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CameraView";
Camera.Size mPreviewSize;
List<Camera.Size> mSupportedPreviewSizes;
private SurfaceHolder mHolder;
private Camera mCamera;
int img_Y_Avg, img_U_Avg, img_V_Avg;
public interface PreviewReadyCallback {
void onPreviewFrame(int yAverage, int uAverage, int vAverage); // Any value you want to get
}
PreviewReadyCallback mPreviewReadyCallback = null;
public void setOnPreviewReady(PreviewReadyCallback cb) {
mPreviewReadyCallback = cb;
}
public CameraView(Context context, Camera camera){
super(context);
mCamera = camera;
//mCamera.setDisplayOrientation(90);
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
for(Camera.Size str: mSupportedPreviewSizes)
Log.e(TAG, str.width + "/" + str.height);
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder){
try{
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
}catch(Exception e){
Log.d("ERROR","Camera error on SurfaceCreated" + e.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
if(mHolder.getSurface() == null)
return;
try{
mCamera.stopPreview();
}catch(Exception e) {
Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
}
try {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(176, 144);
mCamera.cancelAutoFocus();
//parameters.setAutoExposureLock(false);
mCamera.setDisplayOrientation(90);
//set fps
parameters.setPreviewFpsRange(16000, 16000);
//on flash
parameters.setFlashMode(parameters.FLASH_MODE_AUTO);
//parameters.setAutoWhiteBalanceLock(true);
parameters.setPreviewFormat(ImageFormat.NV21);
/*if (parameters.getMaxNumMeteringAreas() > 0){ // check that metering areas are supported
List<Camera.Area> meteringAreas = new ArrayList<Camera.Area>();
Rect areaRect1 = new Rect(-50, -50, 50, 50); // specify an area in center of image
meteringAreas.add(new Camera.Area(areaRect1, 1000)); // set weight to 60%
parameters.setMeteringAreas(meteringAreas);
}*/
//mCamera.setDisplayOrientation(90);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(mHolder);
mCamera.setPreviewCallback(this);
mCamera.startPreview();
} catch (IOException e) {
Log.d("ERROR","Camera error on SurfaceChanged" + e.getMessage());
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if (mCamera != null){
//mCamera.stopPreview();
//mCamera.release();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera){
//check if data is null
if (data == null)
throw new NullPointerException();
Camera.Size size = camera.getParameters().getPreviewSize();
//check if size is null
if(size == null)
throw new NullPointerException();
//set resolution of camera view to optimal setting
int width = size.width;
int height = size.height;
Log.d("Resolution ", " "+String.valueOf(width)+" "+String.valueOf(height));
//call ImageProcess on the data to decode YUV420SP to RGB
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg);
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec){
final int width = resolveSize(getSuggestedMinimumWidth(),widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if(mSupportedPreviewSizes != null){
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
Log.d("Resolution ", " "+mPreviewSize);
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h){
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes){
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff){
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null){
minDiff = Double.MIN_VALUE;
for (Camera.Size size : sizes){
if (Math.abs(size.height - targetHeight) < minDiff){
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
}
MainActivity
public class MainActivity extends AppCompatActivity implements CameraView.PreviewReadyCallback {
private static Camera camera = null;
private CameraView image = null;
private LineChart bp_graph;
private static int img_Y_Avg = 0, img_U_Avg = 0, img_V_Avg = 0;
double valueY, valueU, valueV;
Handler handler;
private int readingRemaining = 600;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
bp_graph = (LineChart)findViewById(R.id.graph);
graph_features();
//open camera
try {
camera = Camera.open();
handler = new Handler();
final Runnable runnable = new Runnable() {
#Override
public void run() {
camera.stopPreview();
camera.release();
}
};
handler.postDelayed(runnable, 30000);
} catch (Exception e) {
Log.d("ERROR", "Failed to get camera: " + e.getMessage());
}
if (camera != null) {
image = new CameraView(this, camera);
FrameLayout camera_view = (FrameLayout) findViewById(R.id.camera_view);
camera_view.addView(image);
image.setOnPreviewReady(this);
}
//close camera button
ImageButton imgClose = (ImageButton) findViewById(R.id.imgClose);
imgClose.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
System.exit(0);
}
});
}
#Override
protected void onResume(){
super.onResume();
}
#Override
protected void onPause() {
super.onPause();
}
private void graph_features(){
bp_graph.getDescription().setEnabled(false);
//enable touch gesture
bp_graph.setTouchEnabled(true);
//enable scaling
bp_graph.setDragEnabled(true);
//scale and drag
bp_graph.setScaleEnabled(true);
bp_graph.setDrawGridBackground(false);
//enable pinch zoom in
bp_graph.setPinchZoom(true);
//alternative background color
bp_graph.setBackgroundColor(Color.LTGRAY);
//work on data
LineData lineData = new LineData();
lineData.setValueTextColor(Color.WHITE);
//add data to line chart
bp_graph.setData(lineData);
//animate
bp_graph.animateX(600);
Legend legend = bp_graph.getLegend();
//custom legend
legend.setForm(Legend.LegendForm.LINE);
legend.setTextColor(Color.WHITE);
XAxis x1 = bp_graph.getXAxis();
x1.setTextColor(Color.WHITE);
x1.setDrawGridLines(false);
x1.setAvoidFirstLastClipping(true);
x1.setPosition(XAxis.XAxisPosition.BOTTOM);
YAxis y1 = bp_graph.getAxisLeft();
y1.setTextColor(Color.WHITE);
y1.setAxisMaximum(5000000);
y1.setAxisMinimum(100000);
y1.setDrawGridLines(true);
//y1.setInverted(true);
YAxis y2 = bp_graph.getAxisRight();
y2.setEnabled(false);
}
//method to create set
private ILineDataSet createSet() {
LineDataSet set = new LineDataSet(null, "PPG");
set.setLineWidth(1.0f);
set.setCircleRadius(1.0f);
set.setColor(Color.rgb(240, 99, 99));
set.setCircleColor(Color.rgb(240, 99, 99));
set.setHighLightColor(Color.rgb(190, 190, 190));
set.setAxisDependency(YAxis.AxisDependency.LEFT);
set.setValueTextSize(1.0f);
return set;
}
#Override
public void onPreviewFrame(int ySum, int uSum, int vSum) {
img_Y_Avg = ySum;
img_U_Avg = uSum;
img_V_Avg = vSum;
//set value of Y on the text view
TextView valueOfY = (TextView)findViewById(R.id.valueY);
valueY = img_Y_Avg;
valueOfY.setText(Double.toString(img_Y_Avg));
//set value of U on the text view
TextView valueOfU = (TextView)findViewById(R.id.valueU);
valueU = img_U_Avg;
valueOfU.setText(Double.toString(img_U_Avg));
//set value of V on the text view
TextView valueOfV = (TextView)findViewById(R.id.valueV);
valueV = img_V_Avg;
valueOfV.setText(Double.toString(img_V_Avg));
//store value to array list
ArrayList<Integer> yAverage = new ArrayList<Integer>();
yAverage.add(img_Y_Avg);
//Log.d("MyEntryData", String.valueOf(yAverage));
//store u values to array
ArrayList<Integer> uAverage = new ArrayList<Integer>();
uAverage.add(img_U_Avg);
//Log.d("MyEntryData", String.valueOf(uAverage));
//store u values to array
ArrayList<Integer> vAverage = new ArrayList<Integer>();
vAverage.add(img_V_Avg);
//Log.d("MyEntryData", String.valueOf(vAverage));
float start = System.nanoTime();
int diff = (int) ((System.currentTimeMillis()/1000) - start);
ArrayList<Integer> difference = new ArrayList<Integer>();
difference.add(diff);
Log.d("time", String.valueOf(start));
ArrayList<Integer> getValues = new ArrayList<Integer>();
for(int i = 0; i < uAverage.size(); i++) {
//getValues.add(difference.get(i));
getValues.add(yAverage.get(i));
getValues.add(uAverage.get(i));
getValues.add(vAverage.get(i));
}
String filename = new SimpleDateFormat("yyyyMMddHHmm'.csv'").format(new Date());
File directoryDownload = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
File logDir = new File (directoryDownload, "bpReader"); //Creates a new folder in DOWNLOAD directory
logDir.mkdirs();
File file = new File(logDir, filename);
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file, true);
//outputStream = openFileOutput(filename, Context.MODE_PRIVATE);
for (int i = 0; i < uAverage.size(); i += 3) {
//outputStream.write((getValues.get(i) + ",").getBytes());
outputStream.write((getValues.get(i) + ",").getBytes());
outputStream.write((getValues.get(i + 1) + ",").getBytes());
outputStream.write((getValues.get(i + 2) + "\n").getBytes());
}
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
Log.d("MyEntryData", String.valueOf(getValues));
handler = new Handler();
final Runnable runnable = new Runnable() {
#Override
public void run() {
readingRemaining = readingRemaining -1;
if (readingRemaining > 0){
plotGraph(img_Y_Avg);
//plotGraph(img_U_Avg);
//plotGraph(img_V_Avg);
}
}
};
handler.postDelayed(runnable, 100);
//Log.d("MyEntryData", String.valueOf(img_Y_Avg +" "+ img_U_Avg+" "+img_V_Avg));
}
private void plotGraph(double graph_data){
LineData data = bp_graph.getData();
if (data != null){
ILineDataSet set = data.getDataSetByIndex(0);
if (set == null){
set = createSet();
data.addDataSet(set);
}
//add a new value
int randomDataSetIndex = (int) (Math.random() * data.getDataSetCount());
float yValue = (float) graph_data;
data.addEntry(new Entry(data.getDataSetByIndex(randomDataSetIndex).getEntryCount(), yValue), randomDataSetIndex);
//notify chart data have changed
bp_graph.notifyDataSetChanged();
bp_graph.setVisibleXRangeMaximum(100);
//scroll to last entry
bp_graph.moveViewTo(data.getEntryCount() - 7, 50f, YAxis.AxisDependency.RIGHT);
}
}}
I am doing the image processing inside the CameraView class and then with the help of an interface sending the values to MainActivity, where I generate the .csv file and the graph.
How do i get the time difference between each frame(or 2 consecutive y-sum) that is being generated?
Like this:
long startTime = System.currentTimeMillis();
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
long finishTime = System.currentTimeMillis();
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,finishTime-startTime);
And add this parameter to interface:
public interface PreviewReadyCallback {
void onPreviewFrame(int yAverage, int uAverage, int vAverage, long time);
}
Then save it to CSV like other values...
Update:
That was your calculation time. If you want time between frames:
In class:
long oldTime=System.currentTimeMillis();
and then
img_Y_Avg = ImageProcessing.YUV420SPtoYSum(data, width, height);
img_U_Avg = ImageProcessing.YUV420SPtoUSum(data, width, height);
img_V_Avg = ImageProcessing.YUV420SPtoVSum(data, width, height);
long newTime = System.currentTimeMillis();
mPreviewReadyCallback.onPreviewFrame(img_Y_Avg, img_U_Avg, img_V_Avg,newTime-oldTime);
oldTime=newTime;
Related
Currently I am using the Git-hub library for making a demo-App for AR-Location based project. I am using AR-Location Based Android. The library is working fine in portrait mode but in landscape mode the Ar-points are coming from bottom of screen(like bubbles comes out from bottom) and hide to top of screen, while rotating camera.I raised issues in Git also, but the owner is not responding. I tried to fix it, but unable to do it. Can anyone guide me to solve the problem. Or where to change.
I think this classes are responsible for plotting in portrait and Landscape mode. If not please tell me where to change.
#SuppressLint("ViewConstructor")
#SuppressWarnings("deprecation")
#TargetApi(Build.VERSION_CODES.KITKAT)
public class ARCamera extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "ARCamera";
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
Camera.Size previewSize;
List<Camera.Size> supportedPreviewSizes;
Camera camera;
Camera.Parameters parameters;
Activity activity;
float[] projectionMatrix = new float[16];
int cameraWidth;
int cameraHeight;
private final static float Z_NEAR = 0.5f; //Increase or decrease distance, between poines
private final static float Z_FAR = 2000;
public ARCamera(Context context, SurfaceView surfaceView) {
super(context);
this.surfaceView = surfaceView;
this.activity = (Activity) context;
surfaceHolder = this.surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
this.camera = camera;
if (this.camera != null) {
supportedPreviewSizes = this.camera.getParameters().getSupportedPreviewSizes();
requestLayout();
Camera.Parameters params = this.camera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
this.camera.setParameters(params);
}
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (supportedPreviewSizes != null) {
previewSize = getOptimalPreviewSize(supportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = right - left;
final int height = bottom - top;
int previewWidth = width;
int previewHeight = height;
if (previewSize != null) {
previewWidth = previewSize.width;
previewHeight = previewSize.height;
}
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
try {
if (camera != null) {
parameters = camera.getParameters();
int orientation = getCameraOrientation();
camera.setDisplayOrientation(orientation);
camera.getParameters().setRotation(orientation);
camera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
private int getCameraOrientation() {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Toast.makeText(activity, "Rotation = " + rotation, Toast.LENGTH_SHORT).show();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
Toast.makeText(activity, "Degrees = " + degrees, Toast.LENGTH_SHORT).show();
break;
case Surface.ROTATION_90:
degrees = 90;
Toast.makeText(activity, "Degrees = " + degrees, Toast.LENGTH_SHORT).show();
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int orientation;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
orientation = (info.orientation + degrees) % 360;
orientation = (360 - orientation) % 360;
} else {
orientation = (info.orientation - degrees + 360) % 360;
}
return orientation;
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int width, int height) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) width / height;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = height;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) {
continue;
}
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
if (optimalSize == null) {
optimalSize = sizes.get(0);
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (camera != null) {
this.cameraWidth = width;
this.cameraHeight = height;
Camera.Parameters params = camera.getParameters();
params.setPreviewSize(previewSize.width, previewSize.height);
requestLayout();
try {
camera.setParameters(params);
camera.startPreview();
} catch (Exception e) {
e.printStackTrace();
}
generateProjectionMatrix();
}
}
private void generateProjectionMatrix() {
float ratio = (float) this.cameraWidth / this.cameraHeight;
final int OFFSET = 0;
final float LEFT = -ratio;
final float RIGHT = ratio;
final float BOTTOM = -1; //increases the speed of points going
final float TOP = 1; //increases the speed of points coming
Matrix.frustumM(projectionMatrix, OFFSET, LEFT, RIGHT, BOTTOM, TOP, Z_NEAR, Z_FAR);
}
public float[] getProjectionMatrix() {
return projectionMatrix;
}
}
Activity Class
public class ARActivity extends AppCompatActivity implements SensorEventListener, LocationListener {
final static String TAG = "ARActivity";
private SurfaceView surfaceView;
private FrameLayout cameraContainerLayout;
private AROverlayView arOverlayView;
private Camera camera;
private ARCamera arCamera;
private TextView tvCurrentLocation;
private SensorManager sensorManager;
private final static int REQUEST_CAMERA_PERMISSIONS_CODE = 11;
public static final int REQUEST_LOCATION_PERMISSIONS_CODE = 0;
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 0; // 10 meters
private static final long MIN_TIME_BW_UPDATES = 0;//1000 * 60 * 1; // 1 minute
private LocationManager locationManager;
public Location location;
boolean isGPSEnabled;
boolean isNetworkEnabled;
boolean locationServiceAvailable;
private DisplayMetrics metrics;
private RelativeLayout infoLayout;
private ImageView appLogo;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ar);
Utility.getFullScreenView(this);
sensorManager = (SensorManager) this.getSystemService(SENSOR_SERVICE);
cameraContainerLayout = (FrameLayout) findViewById(R.id.camera_container_layout);
surfaceView = (SurfaceView) findViewById(R.id.surface_view);
tvCurrentLocation = (TextView) findViewById(R.id.tv_current_location);
infoLayout = (RelativeLayout) findViewById(R.id.info_layout);
appLogo = (ImageView) findViewById(R.id.logoApp);
arOverlayView = new AROverlayView(this);
metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
infoLayout.getLayoutParams().width = metrics.widthPixels/4;
appLogo.getLayoutParams().width = metrics.widthPixels/5;
appLogo.getLayoutParams().height = metrics.heightPixels/4;
}
#Override
public void onResume() {
super.onResume();
requestLocationPermission();
requestCameraPermission();
registerSensors();
initAROverlayView();
}
#Override
public void onPause() {
releaseCamera();
super.onPause();
}
public void requestCameraPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
this.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
this.requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS_CODE);
} else {
initARCameraView();
}
}
public void requestLocationPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
this.checkSelfPermission(Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
this.requestPermissions(new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, REQUEST_LOCATION_PERMISSIONS_CODE);
} else {
initLocationService();
}
}
public void initAROverlayView() {
if (arOverlayView.getParent() != null) {
((ViewGroup) arOverlayView.getParent()).removeView(arOverlayView);
}
cameraContainerLayout.addView(arOverlayView);
}
public void initARCameraView() {
reloadSurfaceView();
if (arCamera == null) {
arCamera = new ARCamera(this, surfaceView);
}
if (arCamera.getParent() != null) {
((ViewGroup) arCamera.getParent()).removeView(arCamera);
}
cameraContainerLayout.addView(arCamera);
arCamera.setKeepScreenOn(true);
initCamera();
}
private void initCamera() {
int numCams = Camera.getNumberOfCameras();
if(numCams > 0){
try{
camera = Camera.open();
camera.startPreview();
arCamera.setCamera(camera);
} catch (RuntimeException ex){
Toast.makeText(this, "Camera not found", Toast.LENGTH_LONG).show();
}
}
}
private void reloadSurfaceView() {
if (surfaceView.getParent() != null) {
((ViewGroup) surfaceView.getParent()).removeView(surfaceView);
}
cameraContainerLayout.addView(surfaceView);
}
private void releaseCamera() {
if(camera != null) {
camera.setPreviewCallback(null);
camera.stopPreview();
arCamera.setCamera(null);
camera.release();
camera = null;
}
}
private void registerSensors() {
sensorManager.registerListener(this,
sensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR),
SensorManager.SENSOR_DELAY_FASTEST);
}
#Override
public void onSensorChanged(SensorEvent sensorEvent) {
if (sensorEvent.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
float[] rotationMatrixFromVector = new float[16];
float[] projectionMatrix = new float[16];
float[] rotatedProjectionMatrix = new float[16];
SensorManager.getRotationMatrixFromVector(rotationMatrixFromVector, sensorEvent.values);
if (arCamera != null) {
projectionMatrix = arCamera.getProjectionMatrix();
}
Matrix.multiplyMM(rotatedProjectionMatrix, 0, projectionMatrix, 0, rotationMatrixFromVector, 0);
this.arOverlayView.updateRotatedProjectionMatrix(rotatedProjectionMatrix);
}
}
#Override
public void onAccuracyChanged(Sensor sensor, int i) {
//do nothing
}
private void initLocationService() {
if ( Build.VERSION.SDK_INT >= 23 &&
ContextCompat.checkSelfPermission( this, Manifest.permission.ACCESS_FINE_LOCATION ) != PackageManager.PERMISSION_GRANTED) {
return ;
}
try {
this.locationManager = (LocationManager) this.getSystemService(this.LOCATION_SERVICE);
// Get GPS and network status
this.isGPSEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
this.isNetworkEnabled = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (!isNetworkEnabled && !isGPSEnabled) {
// cannot get location
this.locationServiceAvailable = false;
}
this.locationServiceAvailable = true;
if (isNetworkEnabled) {
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
updateLatestLocation();
}
}
if (isGPSEnabled) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
if (locationManager != null) {
location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
updateLatestLocation();
}
}
} catch (Exception ex) {
Log.e(TAG, ex.getMessage());
}
}
private void updateLatestLocation() {
if (arOverlayView !=null && location != null) {
arOverlayView.updateCurrentLocation(location);
tvCurrentLocation.setText(String.format("lat: %s \nlon: %s \naltitude: %s \n",
location.getLatitude(), location.getLongitude(), location.getAltitude()));
}
}
#Override
public void onLocationChanged(Location location) {
updateLatestLocation();
}
#Override
public void onStatusChanged(String s, int i, Bundle bundle) {
}
#Override
public void onProviderEnabled(String s) {
}
#Override
public void onProviderDisabled(String s) {
}
public void showHideInfo(View view) {
infoLayout.setVisibility(View.VISIBLE);
infoLayout.startAnimation(AnimationUtils.loadAnimation(this,R.anim.slide_in));
}
public void closeInfo(View view) {
infoLayout.setVisibility(View.GONE);
infoLayout.startAnimation(AnimationUtils.loadAnimation(this,android.R.anim.slide_out_right));
}
}
LocationHelper Class
public class LocationHelper {
private final static double WGS84_A = 6378137.0; // WGS 84 semi-major axis constant in meters
private final static double WGS84_E2 = 0.00669437999014; // square of WGS 84 eccentricity
public static float[] WSG84toECEF(Location location) {
double radLat = Math.toRadians(location.getLatitude());
double radLon = Math.toRadians(location.getLongitude());
float clat = (float) Math.cos(radLat);
float slat = (float) Math.sin(radLat);
float clon = (float) Math.cos(radLon);
float slon = (float) Math.sin(radLon);
float N = (float) (WGS84_A / Math.sqrt(1.0 - WGS84_E2 * slat * slat));
float x = (float) ((N + location.getAltitude()) * clat * clon);
float y = (float) ((N + location.getAltitude()) * clat * slon);
float z = (float) ((N * (1.0 - WGS84_E2) + location.getAltitude()) * slat);
return new float[] {x , y, z};
}
public static float[] ECEFtoENU(Location currentLocation, float[] ecefCurrentLocation, float[] ecefPOI) {
double radLat = Math.toRadians(currentLocation.getLatitude());
double radLon = Math.toRadians(currentLocation.getLongitude());
float clat = (float)Math.cos(radLat);
float slat = (float)Math.sin(radLat);
float clon = (float)Math.cos(radLon);
float slon = (float)Math.sin(radLon);
float dx = ecefCurrentLocation[0] - ecefPOI[0];
float dy = ecefCurrentLocation[1] - ecefPOI[1];
float dz = ecefCurrentLocation[2] - ecefPOI[2];
float east = -slon*dx + clon*dy;
float north = -slat*clon*dx - slat*slon*dy + clat*dz;
float up = clat*clon*dx + clat*slon*dy + slat*dz;
return new float[] {east , north, up, 1};
}
}
I think first two classes are responsible for it. But I am not able to do so. Please guide me.
I am new to Android development. I'm making a simple app, which has one Activity. In this Activity I'm trying to get frames from camera and process it real time, but I'm having camera orientation Issue, i.e. image received is 90 degree rotated. There are many solutions available to solve this problem but found no one for the "JavaCameraView". So please help me out how to solve the orientation issue only for "JavaCameraView".
This is my code:
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
javaCameraView.enableView();
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, CAMERA_PERMISSION_REQUEST_CODE);
//}
javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(View.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
}
#Override
protected void onPause() {
super.onPause();
if (javaCameraView != null)
javaCameraView.disableView();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (javaCameraView != null)
javaCameraView.disableView();
}
#Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.i(TAG, "OpenCV loaded successfully.");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
Log.i(TAG, "OpenCV not loaded.");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
frame = new Mat(height, width, CV_8UC4);
}
#Override
public void onCameraViewStopped() {
frame.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame = inputFrame.rgba();
//frame=processFrame();
//Imgcodecs.imwrite("/storage/emulated/0/aaaaa+.jpg", frame);
return frame;
}
}
I have solve this issue :
Use below class instead of javaCameraView :
public class PortraitCameraView extends CameraBridgeViewBase implements Camera.PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
public Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mCameraId;
Handler handler;
boolean callBuffer = false;
Camera.Size bestSize = null;
Camera.Size pictureSize = null;
private LayoutMode mLayoutMode;
private int mCenterPosX = -1;
private int mCenterPosY;
public static enum LayoutMode {
FitToParent, // Scale to the size that no side is larger than the parent
NoBlank // Scale to the size that no side is smaller than the parent
}
public static class JavaCameraSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public PortraitCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public PortraitCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
handler = new Handler();
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
boolean connected = false;
int numberOfCameras = android.hardware.Camera.getNumberOfCameras();
android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
android.hardware.Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
try {
mCamera = Camera.open(i);
mCameraId = i;
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + i + "failed to open: " + e.getMessage());
}
if (connected) break;
}
}
if (mCamera == null) return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
List<Camera.Size> Picturesizes = params.getSupportedPictureSizes();
pictureSize = Picturesizes.get(0);
List<Camera.Size> sizeList = sizes;
bestSize = sizeList.get(0);
Log.d(TAG, "getSupportedPreviewSizes() " + bestSize.width + " " + bestSize.height);
Log.d(TAG, "Picturesizes() " + pictureSize.width + " " + pictureSize.height);
// bestSize.width = GlobalArea.display_width;
//// bestSize.height = GlobalArea.display_height;
for (int i = 1; i < sizeList.size(); i++) {
if ((sizeList.get(i).width * sizeList.get(i).height) > (bestSize.width * bestSize.height)) {
Log.d(TAG, "getSupportedPreviewSizes() " + sizeList.get(i).width + " " + sizeList.get(i).height);
bestSize = sizeList.get(i);
}
}
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), height, width); //use turn around values here to get the correct prev size for portrait mode
params.setPreviewFormat(ImageFormat.NV21);
Log.e(TAG, "Set preview size to " + Integer.valueOf((int) bestSize.width) + " x " + Integer.valueOf((int) bestSize.height));
Log.e(TAG, "Set preview size to " + width + " x " + height);
params.setPreviewSize((int) bestSize.width, (int) bestSize.height);
params.setPictureSize((int) pictureSize.width, (int) pictureSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
boolean hasFlash = SevenBitsDemo.getInstance().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
if (hasFlash) {
// mOpenCvCameraView.flashOn();
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
List<int[]> ints = params.getSupportedPreviewFpsRange();
for (int i = 0; i < ints.size(); i++) {
Log.e("privew size", String.valueOf(ints.get(i).length));
}
// params.setPreviewFpsRange(10000,10000);
mCamera.setParameters(params);
// boolean mSurfaceConfiguring = adjustSurfaceLayoutSize(bestSize, true, width, height);
params = mCamera.getParameters();
GlobalArea.preview_size = params.getPreviewSize();
mFrameWidth = params.getPreviewSize().height; //the frame width and height of the super class are used to generate the cached bitmap and they need to be the size of the resulting frame
mFrameHeight = params.getPreviewSize().width;
int realWidth = mFrameHeight; //the real width and height are the width and height of the frame received in onPreviewFrame ...
int realHeight = mFrameWidth;
if ((getLayoutParams().width == LinearLayout.LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LinearLayout.LayoutParams.MATCH_PARENT))
mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution((int) pictureSize.width, (int) pictureSize.height);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1); //the frame chane is still in landscape
mFrameChain[1] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight); //the camera frame is in portrait
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
} else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
#Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Wating for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
}
public void onPreviewFrame(byte[] frame, Camera arg1) {
synchronized (this) {
mFrameChain[1 - mChainIdx].put(0, 0, frame);
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
private Mat mRotated;
public Mat gray() {
if (mRotated != null) mRotated.release();
mRotated = mYuvFrameData.submat(0, mWidth, 0, mHeight);
//submat with reversed width and height because its done on the
landscape frame
mRotated = mRotated.t();
Core.flip(mRotated, mRotated, 1);
return mRotated;
}
public Mat rgba() {
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
if (mRotated != null) mRotated.release();
mRotated = mRgba.t();
Core.flip(mRotated, mRotated, 1);
return mRotated;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
if (mRotated != null) mRotated.release();
}
}
private class CameraWorker implements Runnable {
public void run() {
do {
synchronized (PortraitCameraView.this) {
try {
PortraitCameraView.this.wait();
} catch (InterruptedException e) {
Log.e(TAG, "CameraWorker interrupted", e);
}
}
if (!mStopThread) {
if (!mFrameChain[mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[mChainIdx]);
mChainIdx = 1 - mChainIdx;
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}
So now use PortraitCameraView in your xml and java file because i have convert javacamera view in portrait mode in this class.
You can use setMaxFrame size function.
javaCameraView.setMaxFrameSize(480, 640);
480 is width and 640 is height. Now javacameraview is portrait.
In some device the the image become stretched, I'm using surfaceView in may camera preview.
public class vp02ImageCapture extends SuperVP {
private static final String TAG = "vp02ImageCapture";
private Camera camera;
#Bind(R.id.TransparentView) SurfaceView transparentView;
#Bind(R.id.sv_camera) SurfaceView sv;
#Bind(R.id.relative) RelativeLayout rl;
#Bind(R.id.mask) FrameLayout mask;
private ProgressDialog dp;
private boolean front = true;
private SurfaceHolder sh,holderTransparent;
int orient = 1;
private Handler h = new Handler();
View v;
//private UserInfo info;
#Nullable
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
v = inflater.inflate(R.layout.vp02imagecapture, null);
ButterKnife.bind(this, v);
sh = sv.getHolder();
holderTransparent = transparentView.getHolder();
holderTransparent.setFormat(PixelFormat.TRANSPARENT);
transparentView.setZOrderMediaOverlay(true);
return v;
}
public int getscrOrientation()
{
Display getOrient = getActivity().getWindowManager().getDefaultDisplay();
int orientation = getOrient.getOrientation();
// Sometimes you may get undefined orientation Value is 0
// simple logic solves the problem compare the screen
// X,Y Co-ordinates and determine the Orientation in such cases
if(orientation==Configuration.ORIENTATION_UNDEFINED){
Configuration config = getResources().getConfiguration();
orientation = config.orientation;
if(orientation== Configuration.ORIENTATION_UNDEFINED){
//if height and widht of screen are equal then
// it is square orientation
if(getOrient.getWidth()==getOrient.getHeight()){
orientation = Configuration.ORIENTATION_SQUARE;
}else{ //if widht is less than height than it is portrait
if(getOrient.getWidth() < getOrient.getHeight()){
orientation = Configuration.ORIENTATION_PORTRAIT;
}else{ // if it is not any of the above it will defineitly be landscape
orientation = Configuration.ORIENTATION_LANDSCAPE;
}
}
}
}
return orientation; // return value 1 is portrait and 2 is Landscape Mode
}
openCamera() method
private void openCamera() {
int cameraId;
if (front) {
cameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
} else{
cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
}
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = getActivity().getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
final Camera.Parameters parameters = camera.getParameters();
List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
Camera.Size optimalSize = getOptimalPreviewSize(sizes, getResources().getDisplayMetrics().widthPixels, getResources().getDisplayMetrics().heightPixels);
parameters.setPreviewSize(optimalSize.width, optimalSize.height);
transparentView.requestLayout();
sv.requestLayout();
h.postDelayed(new Runnable() {
#Override
public void run() {
try {
Draw();
// camera.setParameters(parameters);
camera.setPreviewDisplay(sh);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}, 200);
}
#Override
public void releaseUI() {
rl.setVisibility(View.GONE);
// hideProgressDialog();
camera.release();
}
#Override
public void updateUI() {
super.updateUI();
Log.e("sample","asd " + TAG);
if (TAG.equals("vp02ImageCapture")){
parent.btnNext.setVisibility(View.GONE);
parent.btnBack.setVisibility(View.GONE);
}else{
parent.btnNext.setVisibility(View.VISIBLE);
parent.btnBack.setVisibility(View.VISIBLE);
}
getActivity().setTitle("Image Capture");
rl.setVisibility(View.VISIBLE);
openCamera();
}
#Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean("isfront", front);
}
#OnClick(R.id.btnSwitch)void switchCamera(){
camera.stopPreview();
camera.release();
front = front ? false : true;
openCamera();
}
#OnClick(R.id.btnCapture) void capture(){
// showProgressDialog("Loading...");
camera.takePicture(new Camera.ShutterCallback() {
#Override
public void onShutter() {
}
}, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
}
}, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
// rotate capture image
Display display = getActivity().getWindowManager().getDefaultDisplay();
int rotation = 0;
switch (display.getRotation()) {
case Surface.ROTATION_0: // This is display orientation
rotation = 90;
break;
case Surface.ROTATION_90:
rotation = 0;
break;
case Surface.ROTATION_180:
rotation = 270;
break;
case Surface.ROTATION_270:
rotation = 180;
break;
}
//Crop image size
Bitmap bitmap = ImageFactory.byteArrayToBitmap(data);
if (front) {
bitmap = rotate(bitmap, rotation);
}
int l = sv.getWidth() / 8;
int t = (sv.getHeight() / 2) - (l*4);
// 2.3 Size of rotated bitmap
int bitWidth = bitmap.getWidth();
int bitHeight = bitmap.getHeight();
Log.e("SIZES", "" + sv.getHeight() + "----" + bitHeight + "-----");
// 3. Size of camera preview on screen
int preWidth = sv.getWidth();
int preHeight = sv.getHeight();
// 4. Scale it.
// Assume you draw Rect as "canvas.drawRect(60, 50, 210, 297, paint);" command
// canvas.drawRect(l,t,l*7,t + l*8,paint1);
// int startx = (l * bitWidth / preWidth);
// int starty = (t * bitHeight / preHeight);
// int endx = ((l * 6) * bitWidth / preWidth);
// int endy = (((t * 4) * bitHeight / preHeight));
int startx = (l * bitWidth / preWidth);
int starty = (t * bitHeight / preHeight);
int endx = ((l * 6) * bitWidth / preWidth);
int endy = (((t + (l*7)) * bitHeight / preHeight));
Log.e("ELIBOY!", "" + l + "----" + t + "------" + l * 7 + "----" + t * 5);
Log.e("ELI!!!!", "" + startx + "----" + starty + "------" + endx + "----" + endy);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] byteArray = stream.toByteArray();
setImageByteArray(byteArray);
setStartX(startx);
setStartY(starty);
setEndX(endx);
setEndY(endy);
viewPager.setCurrentItem(viewPager.getCurrentItem() + 1); //set next page to display ; current page + 1
/*Fragment f = new vp03ImagePreview();
f.setArguments(b);
getParentFragment().getChildFragmentManager().beginTransaction().replace(R.id.imagefrag_container,f).commit();*/
//ScreenManager.getInstance().replaceScreen(Screens.Enroll02b,b);
}
});
}
boolean fromPause = false;
#Override
public void onResume() {
super.onResume();
if(fromPause)
if(((ScreenTransaction)getParentFragment()).viewPager.getCurrentItem()==1)
openCamera();
fromPause = false;
}
#Override
public void onPause() {
super.onPause();
try{
camera.release();
}catch (NullPointerException e){
}
h.removeCallbacksAndMessages(null);
fromPause = true;
//viewPager.removeOnPageChangeListener(viewpagerChangeListener);
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.5;
double targetRatio=(double)h / w;
if (sizes == null) return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Camera.Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
#OnClick(R.id.mask)void autofocus(){
camera.autoFocus(new Camera.AutoFocusCallback() {
#Override
public void onAutoFocus(boolean success, Camera camera) {
}
});
}
public void showProgressDialog(String message){
dp = new ProgressDialog(getActivity());
dp.setProgressStyle(ProgressDialog.STYLE_SPINNER);
dp.setCancelable(false);
dp.setMessage(message);
dp.show();
}
public void hideProgressDialog(){
dp.hide();
}
}
I want to support all screen .
How can I solve this?
in some screen its working but in some specially when its default camera is not in full screen.
I have made this code and its working very well.
public class CameraActivity extends Activity implements Constants.Parameters {
FrameLayout previewFrameLayout;
ImageButton captureButton, switchCamera, flashImageButton, backImageButton;
boolean isFlashEnabled = false, isFlashAvailable = false, isFrontCamera = false;
Handler handler;
private int mCameraId;
private Camera mCamera;
private CameraPreview mCameraPreview;
public final int BACK_CAMERA = 0;
public final int FRONT_CAMERA = 1;
String className = "";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
handler = new Handler();
isFlashAvailable = getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
isFrontCamera = getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT);
previewFrameLayout = (FrameLayout) findViewById(R.id.camera_preview);
captureButton = (ImageButton) findViewById(R.id.button_capture);
captureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});
switchCamera = (ImageButton) findViewById(R.id.switchCamera);
switchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Utils.scaleView(switchCamera);
handler.postDelayed(new Runnable() {
#Override
public void run() {
previewFrameLayout.removeAllViews();
if (mCameraId == BACK_CAMERA) {
flashImageButton.setVisibility(View.INVISIBLE);
mCameraId = FRONT_CAMERA;
mCameraPreview = new CameraPreview(CameraNewActivity.this, FRONT_CAMERA);
previewFrameLayout.addView(mCameraPreview);
} else {
flashImageButton.setVisibility(View.VISIBLE);
mCameraId = BACK_CAMERA;
mCameraPreview = new CameraPreview(CameraNewActivity.this, BACK_CAMERA);
previewFrameLayout.addView(mCameraPreview);
}
}
}, 250);
}
});
flashImageButton = (ImageButton) findViewById(R.id.flash);
flashImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Utils.scaleView(flashImageButton);
try {
Camera.Parameters parameters = mCamera.getParameters();
if (!isFlashEnabled) {
isFlashEnabled = true;
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
flashImageButton.setSelected(true);
} else {
isFlashEnabled = false;
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
flashImageButton.setSelected(false);
}
mCamera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
}
});
if (!isFlashAvailable) {
flashImageButton.setVisibility(View.GONE);
}
if (!isFrontCamera) {
switchCamera.setVisibility(View.GONE);
}
backImageButton = (ImageButton) findViewById(R.id.back);
backImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
finish();
}
});
}
private void hideIcons() {
switchCamera.setVisibility(View.GONE);
captureButton.setVisibility(View.GONE);
flashImageButton.setVisibility(View.GONE);
backImageButton.setVisibility(View.GONE);
}
private void showIcons() {
switchCamera.setVisibility(View.VISIBLE);
captureButton.setVisibility(View.VISIBLE);
flashImageButton.setVisibility(View.VISIBLE);
backImageButton.setVisibility(View.VISIBLE);
}
#Override
protected void onResume() {
super.onResume();
mCameraPreview = new CameraPreview(this, mCameraId);
previewFrameLayout.addView(mCameraPreview);
}
#Override
protected void onPause() {
super.onPause();
mCameraPreview.stop();
previewFrameLayout.removeView(mCameraPreview); // This is necessary.
mCameraPreview = null;
}
Camera.PictureCallback mPicture = new Camera.PictureCallback() {
#Override
public void onPictureTaken(final byte[] data, Camera camera) {
final File pictureFile = new File(Utils.profilePicPath + String.valueOf(System.currentTimeMillis()) + ".jpg");
pictureFile.getParentFile().mkdirs();
Utils.deleteCapture();
if (pictureFile == null) {
return;
}
Thread thread = new Thread() {
#Override
public void run() {
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(pictureFile.getPath(), bmOptions);
final int height = bmOptions.outHeight;
final int width = bmOptions.outWidth;
if (height > width) {
if (height / width < 1.5) {
Bitmap bitmap = Picasso.with(CameraNewActivity.this)
.load(pictureFile).resize((int) (height / 1.65), height).centerCrop().get();
if (mCameraId == FRONT_CAMERA) {
bitmap = flip(bitmap);
}
FileOutputStream out = new FileOutputStream(pictureFile.getPath());
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
thread.start();
}
};
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
protected List<Camera.Size> mPreviewSizeList;
protected List<Camera.Size> mPictureSizeList;
protected Camera.Size mPreviewSize;
protected Camera.Size mPictureSize;
private int mSurfaceChangedCallDepth = 0;
private int mCenterPosX = -1;
private int mCenterPosY = 0;
protected boolean mSurfaceChanged = false;
public CameraPreview(Activity activity, int cameraId) {
super(activity);
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
if (Camera.getNumberOfCameras() > cameraId) {
mCameraId = cameraId;
} else {
mCameraId = 0;
}
mCamera = Camera.open(mCameraId);
Camera.Parameters cameraParams = mCamera.getParameters();
mPreviewSizeList = cameraParams.getSupportedPreviewSizes();
mPictureSizeList = cameraParams.getSupportedPictureSizes();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(mHolder);
} catch (IOException e) {
mCamera.release();
mCamera = null;
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mSurfaceChangedCallDepth++;
mCamera.stopPreview();
Camera.Parameters cameraParams = mCamera.getParameters();
if (!mSurfaceChanged) {
Camera.Size previewSize = determinePreviewSize(width, height);
Camera.Size pictureSize = determinePictureSize(previewSize);
mPreviewSize = previewSize;
mPictureSize = pictureSize;
mSurfaceChanged = adjustSurfaceLayoutSize(previewSize, width, height);
if (mSurfaceChanged && (mSurfaceChangedCallDepth <= 1)) {
return;
}
}
if (mCameraId == BACK_CAMERA) {
cameraParams.setRotation(90);
} else {
cameraParams.setRotation(270);
}
mCamera.setDisplayOrientation(90);
cameraParams.set("orientation", "portrait");
cameraParams.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
cameraParams.setPictureSize(mPictureSize.width, mPictureSize.height);
mCamera.setParameters(cameraParams);
mSurfaceChanged = false;
try {
mCamera.startPreview();
} catch (Exception e) {
// Remove failed size
mPreviewSizeList.remove(mPreviewSize);
mPreviewSize = null;
// Reconfigure
if (mPreviewSizeList.size() > 0) { // prevent infinite loop
surfaceChanged(null, 0, width, height);
} else {
Utils.showToast(CameraNewActivity.this, "Can't start preview", Toast.LENGTH_LONG);
}
}
mSurfaceChangedCallDepth--;
}
protected Camera.Size determinePreviewSize(int reqWidth, int reqHeight) {
int reqPreviewWidth = reqHeight; // requested width in terms of camera hardware
int reqPreviewHeight = reqWidth; // requested height in terms of camera hardware
// Adjust surface size with the closest aspect-ratio
float reqRatio = ((float) reqPreviewWidth) / reqPreviewHeight;
float curRatio, deltaRatio;
float deltaRatioMin = Float.MAX_VALUE;
Camera.Size retSize = null;
for (Camera.Size size : mPreviewSizeList) {
curRatio = ((float) size.width) / size.height;
deltaRatio = Math.abs(reqRatio - curRatio);
if (deltaRatio < deltaRatioMin) {
deltaRatioMin = deltaRatio;
retSize = size;
}
}
return retSize;
}
protected Camera.Size determinePictureSize(Camera.Size previewSize) {
Camera.Size retSize = null;
for (Camera.Size size : mPictureSizeList) {
if (size.equals(previewSize)) {
return size;
}
}
// if the preview size is not supported as a picture size
float reqRatio = ((float) previewSize.width) / previewSize.height;
float curRatio, deltaRatio;
float deltaRatioMin = Float.MAX_VALUE;
for (Camera.Size size : mPictureSizeList) {
curRatio = ((float) size.width) / size.height;
deltaRatio = Math.abs(reqRatio - curRatio);
if (deltaRatio < deltaRatioMin) {
deltaRatioMin = deltaRatio;
retSize = size;
}
}
return retSize;
}
protected boolean adjustSurfaceLayoutSize(Camera.Size previewSize,
int availableWidth, int availableHeight) {
float tmpLayoutHeight = previewSize.width;
float tmpLayoutWidth = previewSize.height;
float factH, factW, fact;
factH = availableHeight / tmpLayoutHeight;
factW = availableWidth / tmpLayoutWidth;
if (factH < factW) {
fact = factH;
} else {
fact = factW;
}
FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) this.getLayoutParams();
int layoutHeight = (int) (tmpLayoutHeight * fact);
int layoutWidth = (int) (tmpLayoutWidth * fact);
boolean layoutChanged;
if ((layoutWidth != this.getWidth()) || (layoutHeight != this.getHeight())) {
layoutParams.height = layoutHeight;
layoutParams.width = layoutWidth;
if (mCenterPosX >= 0) {
layoutParams.topMargin = mCenterPosY - (layoutHeight / 2);
layoutParams.leftMargin = mCenterPosX - (layoutWidth / 2);
}
this.setLayoutParams(layoutParams); // this will trigger another surfaceChanged invocation.
layoutChanged = true;
} else {
layoutChanged = false;
}
return layoutChanged;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
stop();
}
public void stop() {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
}
public Bitmap flip(Bitmap bitmap) {
Matrix mtx = new Matrix();
mtx.preScale(-1, 1);
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), mtx, true);
}
}
I am just curious about correctly using setOneShotPreview() callback and correctly using onPreviewFrame(byte[] data, Camera camera) to get an image as it is displayed in preview. I have implemented all the code and its working but the resulting image is of no use. Below is my code please help me with what i am doing wrong.
public class MainActivity extends ActionBarActivity {
private static final String TAG = "CamTestActivity";
Preview preview;
Button buttonClick;
Camera camera;
String fileName;
Activity act;
Context ctx;
int frontCamera;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
act = this;
//requestWindowFeature(Window.FEATURE_NO_TITLE);
//getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
preview = new Preview(this, (SurfaceView)findViewById(R.id.surfaceView));
preview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
((FrameLayout) findViewById(R.id.preview)).addView(preview);
preview.setKeepScreenOn(true);
frontCamera = findFrontFacingCamera();
buttonClick = (Button) findViewById(R.id.button_capture);
//camera.setOneShotPreviewCallback(cameraPreviewCallback);
buttonClick.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
//camera.setOneShotPreviewCallback(cameraPreviewCallback);
//camera.takePicture(shutterCallback, rawCallback, jpegCallback);
camera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
Camera.Parameters parameters = camera.getParameters();
int format = parameters.getPreviewFormat();
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
Log.d("imageFormat",Integer.toString(format));
Log.d("imageNV21",Integer.toString(ImageFormat.NV21));
Log.d("imageYUY2",Integer.toString(ImageFormat.YUY2));
Log.d("imageNV16",Integer.toString(ImageFormat.NV16));
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
try {
// Write to SD Card
File folder = new File(Environment.getExternalStorageDirectory().toString()+"/SilentCamera/Images");
if(folder.exists()){
Log.d("creating folder","Folder already exists");
//Save the path as a string value
String extStorageDirectory = folder.toString();
Log.d("DirPath",extStorageDirectory);
fileName = String.format(extStorageDirectory+"/%d.jpg", System.currentTimeMillis());
Log.d("ImagePath",fileName);
outStream = new FileOutputStream(fileName);
//Uri uriSavedImage = Uri.fromFile(file);
outStream.write(byt);
outStream.flush();
outStream.close();
Log.d("SilentCam","Frame Received");
}
else{
folder.mkdirs();
String extStorageDirectory = folder.toString();
Log.d("DirPath",extStorageDirectory);
fileName = String.format(extStorageDirectory+"/%d.jpg", System.currentTimeMillis());
Log.d("ImagePath",fileName);
outStream = new FileOutputStream(fileName);
outStream.write(byt);
outStream.close();
Log.d("PassMainCamera", "onPictureTaken - wrote bytes: " + data.length);
Log.d("ImageUrl","Image Url added to database");
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
});
//camera.startPreview();
}
});
}
#Override
protected void onResume() {
super.onResume();
camera = Camera.open(frontCamera);
camera.startPreview();
preview.setCamera(camera);
resetCam();
}
#Override
protected void onPause() {
if(camera != null) {
camera.stopPreview();
preview.setCamera(null);
camera.release();
camera = null;
}
super.onPause();
}
private void resetCam() {
camera.startPreview();
preview.setCamera(camera);
}
public static void setCameraDisplayOrientation(Context context,
int cameraId, android.hardware.Camera camera)
{
int result = MainActivity.getCameraDisplayOrientation(context, cameraId, camera);
if (android.os.Build.VERSION.SDK_INT <= 14)
{
camera.stopPreview();
camera.setDisplayOrientation(result);
camera.startPreview();
Log.i("Version<=14","inSettingOrirntation");
} else
{
camera.setDisplayOrientation(90);
Log.i("Version>14","inSettingOrirntation");
}
}// end setCameraDisplayOrientation
public static int getCameraDisplayOrientation(Context context,
int cameraId, android.hardware.Camera camera)
{
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = ((Activity) context).getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation)
{
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else
{ // back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
// frinding id of front facing camera
private int findFrontFacingCamera() {
int foundId = -1;
int numCams = Camera.getNumberOfCameras();
for (int camId = 0; camId < numCams; camId++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(camId, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
foundId = camId;
break;
}
}
return foundId;
}
}
And this is my surface holder implementation,,
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
private Activity activity;
Context context;
Preview(Context _context, SurfaceView sv) {
super(_context);
context = _context;
mSurfaceView = sv;
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();
requestLayout();
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params);
}
}
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height / previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width / previewWidth;
child.layout(0, (height - scaledChildHeight) / 2,
width, (height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
// mCamera.startPreview();
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
// mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
String sceneMode = "SCENE_MODE_PORTRAIT";
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
parameters.setPictureSize(mPreviewSize.width, mPreviewSize.height);
Log.d("Preview Width",Integer.toString(mPreviewSize.width));
Log.d("Preview Height",Integer.toString(mPreviewSize.height));
Log.d("Preview Zoom",Integer.toString(parameters.getZoom()));
//parameters.setSceneMode(sceneMode);
//parameters.se
requestLayout();
//MainActivity.setCameraDisplayOrientation(context,Camera.CameraInfo.CAMERA_FACING_FRONT, mCamera);
mCamera.setParameters(parameters);
//mCamera.startPreview();
//mCamera.setDisplayOrientation(90);
}
}
}
This is all my code,,, Its been 3 days and still stuck at this problem,,,, every one says its working but not with me,,, i am testing it on samsung galaxy s with android 2.3.6 gingerbread,,,, please help me Thanks.
This is because the data provided as parameter is not in jpeg format. You need to convert it first:
Camera.Parameters parameters = camera.getParameters();
final int format = parameters.getPreviewFormat();
YuvImage im = new YuvImage( data, format, width, height, null );
ByteArrayOutputStream out = new ByteArrayOutputStream();
final Rect rect = new Rect( 0, 0, width, height );
im.compressToJpeg( rect, 100, out );
byte[] jpeg = out.toByteArray();
I have implemented my own camera preview layout and functionality. Anyway, when I press the button to take a picture it calls the onPictureTaken method, but the Intent is not fired. The Intent is declared correctly in the manifest file.
Here is my code and I hope someone knows what to do here in order to fire my Intent.
public class TakePicture extends Activity {
private Preview mPreview;
private Camera mCamera;
private int numberOfCameras;
private int defaultCameraId;
private Context context;
AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback() {
#Override
public void onAutoFocus(boolean arg0, Camera arg1) {
// buttonTakePicture.setEnabled(true);
}
};
PictureCallback myPictureCallback_JPG = new PictureCallback() {
#Override
public void onPictureTaken(byte[] arg0, Camera arg1) {
//savePicture(arg0);
Intent ia = new Intent(context, PreviewPhoto.class);
ia.putExtra("groupId", 1);
ia.putExtra("image", arg0);
startActivity(ia);
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_LEFT_ICON);
setFeatureDrawableResource(Window.FEATURE_LEFT_ICON,
R.drawable.zzz_logo);
context = this;
mPreview = new Preview(this);
setContentView(mPreview);
LayoutInflater controlInflater = LayoutInflater.from(getBaseContext());
View viewControl = controlInflater.inflate(R.layout.camera_control,
null);
LayoutParams layoutParamsControl = new LayoutParams(
LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT);
this.addContentView(viewControl, layoutParamsControl);
numberOfCameras = Camera.getNumberOfCameras();
CameraInfo cameraInfo = new CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
defaultCameraId = i;
}
}
Button buttonTakePicture = (Button) findViewById(R.id.takepicture);
buttonTakePicture.setOnClickListener(new Button.OnClickListener() {
#Override
public void onClick(View arg0) {
mCamera.takePicture(null, null,
myPictureCallback_JPG);
}
});
}
#Override
protected void onResume() {
super.onResume();
mCamera = Camera.open(defaultCameraId);
mPreview.setCamera(mCamera);
}
#Override
protected void onPause() {
super.onPause();
if (mCamera != null) {
mPreview.setCamera(null);
mCamera.release();
mCamera = null;
}
}
public void determineDisplayOrientation() {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(defaultCameraId, cameraInfo);
int rotation = this.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int displayOrientation;
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
displayOrientation = (cameraInfo.orientation + degrees) % 360;
displayOrientation = (360 - displayOrientation) % 360;
} else {
displayOrientation = (cameraInfo.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(displayOrientation);
}
private void savePicture(byte[] data) {
File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Toast.makeText(context, "Can't create directory to save image.",
Toast.LENGTH_LONG).show();
return;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
Toast.makeText(context, "New Image saved:" + photoFile,
Toast.LENGTH_LONG).show();
} catch (Exception error) {
Toast.makeText(context, "Image could not be saved.",
Toast.LENGTH_LONG).show();
}
}
private File getDir() {
File sdDir = Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
return new File(sdDir, "Captoom");
}
class Preview extends ViewGroup implements SurfaceHolder.Callback {
private final String TAG = "Preview";
SurfaceView mSurfaceView;
SurfaceHolder mHolder;
Size mPreviewSize;
List<Size> mSupportedPreviewSizes;
Camera mCamera;
Preview(Context context) {
super(context);
mSurfaceView = new SurfaceView(context);
addView(mSurfaceView);
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mSurfaceView.setOnClickListener(new LinearLayout.OnClickListener() {
#Override
public void onClick(View arg0) {
//buttonTakePicture.setEnabled(false);
mCamera.autoFocus(myAutoFocusCallback);
}
});
}
public void setCamera(Camera camera) {
mCamera = camera;
if (mCamera != null) {
mSupportedPreviewSizes = mCamera.getParameters()
.getSupportedPreviewSizes();
requestLayout();
}
}
public void switchCamera(Camera camera) {
setCamera(camera);
//determineDisplayOrientation();
try {
camera.setPreviewDisplay(mHolder);
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
camera.setParameters(parameters);
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int width = resolveSize(getSuggestedMinimumWidth(),
widthMeasureSpec);
final int height = resolveSize(getSuggestedMinimumHeight(),
heightMeasureSpec);
setMeasuredDimension(width, height);
if (mSupportedPreviewSizes != null) {
mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width,
height);
}
}
#Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (changed && getChildCount() > 0) {
final View child = getChildAt(0);
final int width = r - l;
final int height = b - t;
int previewWidth = width;
int previewHeight = height;
if (mPreviewSize != null) {
previewWidth = mPreviewSize.width;
previewHeight = mPreviewSize.height;
}
if (width * previewHeight > height * previewWidth) {
final int scaledChildWidth = previewWidth * height
/ previewHeight;
child.layout((width - scaledChildWidth) / 2, 0,
(width + scaledChildWidth) / 2, height);
} else {
final int scaledChildHeight = previewHeight * width
/ previewWidth;
child.layout(0, (height - scaledChildHeight) / 2, width,
(height + scaledChildHeight) / 2);
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
try {
if (mCamera != null) {
mCamera.setPreviewDisplay(holder);
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.stopPreview();
}
}
private Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null)
return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
continue;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
requestLayout();
mCamera.setParameters(parameters);
mCamera.startPreview();
}
}
}
The line ia.putExtra("image", arg0) is causing the error failed binder transaction. I must pass the image path, not the byte[] of data to the new intent :)