Manual camera focus Android Studio - android

I have a SurfaceView showing a CameraSource and I want the camera to focus automatically. Also, when the user taps the SurfaceView, I want the camera to focus on the tapped area. The autofocus is already enabled, but the manual focus is not and I dont know how to add that, any tips?
My code so far:
public class MyActivity extends AppCompatActivity {
SurfaceView cameraPreview;
BarcodeDetector barcodeDetector;
CameraSource cameraSource;
final Integer requestCameraPermissionID = 1001;
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.my_activity_layout);
final int height = getSurfaceViewHeight();
final int width = getSurfaceViewWidth();
cameraPreview = findViewById(R.id.surfaceview_scanner);
barcodeDetector = new BarcodeDetector.Builder(AddScanningActivity.this)
.setBarcodeFormats(Barcode.ALL_FORMATS)
.build();
cameraSource = new CameraSource.Builder(AddScanningActivity.this, barcodeDetector)
.setAutoFocusEnabled(false)
.setRequestedPreviewSize(width, height)
.build();
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
// handle detections
}
});
cameraPreview.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
// handle manual focus here?
}
});
cameraPreview.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
#Override
public void run() {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(AddScanningActivity.this,
new String[]{Manifest.permission.CAMERA}, requestCameraPermissionID);
return;
}
try {
cameraSource.start(cameraPreview.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}, 100);
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
cameraSource.stop();
}
});
}
}
If it is not possible to have autofocus and manual focus in the same SurfaceView, I would rather have manual focus than autofocus.

As you are making barcode app i will suggest you to use FOCUS_MODE_FIXED,
or
FOCUS_MODE_MACRO

package com.example.yaumanualcamera;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import java.util.Arrays;
public class MainActivity extends AppCompatActivity {
TextureView textureview;
CameraDevice cameraDevice;
String cameraId;
Size imageDimensions;
CaptureRequest.Builder captureRequestBuilder;
CameraCaptureSession cameraSession;
Handler backgroundHandler;
HandlerThread handleThread;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textureview = (TextureView) findViewById(R.id.texture);
textureview.setSurfaceTextureListener(surfaceTextureListener);
}
TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private void openCamera() throws CameraAccessException {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
cameraId = cameraManager.getCameraIdList()[0];
CameraCharacteristics cc = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = cc.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimensions = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
cameraManager.openCamera(cameraId, stateCallback, null);
}
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
try {
startCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
private void startCameraPreview() throws CameraAccessException {
SurfaceTexture texture = textureview.getSurfaceTexture();
texture.setDefaultBufferSize(imageDimensions.getWidth(), imageDimensions.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
System.out.println("Focus 1 ====== "+captureRequestBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE));
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (cameraDevice == null) {
return;
}
cameraSession = session;
try {
updatePreview();
} catch (CameraAccessException e) {
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
}, null);
}
private void initPreview() {
float valueAF;
valueAF = 10.0f;
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE,CameraMetadata.CONTROL_MODE_AUTO);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_OFF);
captureRequestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, valueAF);
}
private void updatePreview() throws CameraAccessException {
if (cameraDevice == null) {
return;
}
initPreview();
System.out.println("Focus 2 ====== "+captureRequestBuilder.get(CaptureRequest.LENS_FOCUS_DISTANCE));
cameraSession.setRepeatingRequest(captureRequestBuilder.build(), null, backgroundHandler);
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (textureview.isAvailable()) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
} else {
textureview.setSurfaceTextureListener(surfaceTextureListener);
}
}
private void startBackgroundThread() {
handleThread = new HandlerThread("CAMERA BACKGROUND");
handleThread.start();
backgroundHandler = new Handler(handleThread.getLooper());
}
#Override
protected void onPause() {
try {
stopBackgroundThread();
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
private void stopBackgroundThread() throws InterruptedException {
handleThread.quitSafely();
handleThread.join();
backgroundHandler = null;
handleThread = null;
}
}

Related

Android CameraX - preview black/not visible after fragment resume

I am using CameraX, preview, image and video capture. I have created a fragment class that uses binding to lifecycle to avoid all manual camera preview state management etc (as recommended by Google). Problem is that sometimes if I put the app into background and then foreground, the camera preview is black and does not show preview. There is no way to restore it apart from recreating the fragment. Any hints what I may be doing wrong?
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.Preview;
import androidx.camera.core.UseCaseGroup;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.vision.face.Face;
import
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
public class CameraViewFragment extends BaseFragment {
private static final int REQUEST_CAMERA_PERMISSIONS = 100;
private CameraViewFragmentListener listener;
private final Executor executor = Executors.newSingleThreadExecutor();
private Camera camera;
private Handler handler = new Handler();
private boolean isRecording;
private Preview preview;
private ImageCapture imageCapture;
private VideoCapture videoCapture;
private ImageAnalysis imageAnalysis;
private FaceDetector faceDetector;
public interface CameraViewFragmentListener {
void onCameraPictureTaken(byte[] buffer);
void onCameraPermissionsRejected();
void onCameraVideoRecorded(Uri file);
void onCameraVideoRecordError(Throwable ex);
void onCameraFacesDetect(List<Face> faces);
void onCameraFacesDetectError(Exception e);
enum CameraFeature {
PREVIEW,
IMAGE_CAPTURE,
VIDEO_CAPTURE,
FACE_DETECTION
}
Set<CameraFeature> cameraGetRequestedFeatures();
}
private FragmentCameraViewBinding binding;
#Nullable
#Override
public View onCreateView(#NonNull LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
binding = FragmentCameraViewBinding.inflate(inflater, container, false);
return binding.getRoot();
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
int rc = ActivityCompat.checkSelfPermission(requireContext(), Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
startCamera();
} else {
requestCameraPermission();
}
}
private void requestCameraPermission() {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS);
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
if (permissions.length == 1 && permissions[0].equals(Manifest.permission.CAMERA) && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Logger.log(Logger.info, "[VRM] Camera permission granted.");
startCamera();//createCameraSource();
} else {
Logger.log(Logger.error, "[VRM] Camera permission NOT granted.");
if (listener != null) {
listener.onCameraPermissionsRejected();
}
requestCameraPermission();
}
}
}
/**
* Restarts the camera.
*/
#Override
public void onResume() {
super.onResume();
//startCamera();
}
/**
* Stops the camera.
*/
#Override
public void onPause() {
super.onPause();
}
/**
* Releases the resources associated with the camera source, the associated detector, and the
* rest of the processing pipeline.
*/
#Override
public void onDestroy() {
super.onDestroy();
}
#Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof CameraViewFragmentListener) {
listener = (CameraViewFragmentListener) context;
} else if (getParentFragment() instanceof CameraViewFragmentListener) {
listener = (CameraViewFragmentListener) getParentFragment();
}
}
#Override
public void onDetach() {
super.onDetach();
listener = null;
}
private void startCamera() {
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext());
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
Set<CameraViewFragmentListener.CameraFeature> features = listener.cameraGetRequestedFeatures();
bindUseCases(cameraProvider, features);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
Logger.log(Logger.error, "Exception while initializing CameraX: {}", e);
}
}, ContextCompat.getMainExecutor(requireContext()));
}
void bindUseCases(#NonNull ProcessCameraProvider cameraProvider, Set<CameraViewFragmentListener.CameraFeature> features) {
cameraProvider.unbindAll();
final CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_FRONT)
.build();
final WindowManager wm = (WindowManager) requireContext().getSystemService(Context.WINDOW_SERVICE);
final UseCaseGroup.Builder useCaseGroupBuilder = new UseCaseGroup.Builder();
if (features.contains(CameraViewFragmentListener.CameraFeature.PREVIEW)) {
preview = new Preview.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
//.setTargetResolution(new Size(480, 640))
.build();
binding.preview.setScaleType(PreviewView.ScaleType.FILL_CENTER);
preview.setSurfaceProvider(binding.preview.getSurfaceProvider());
useCaseGroupBuilder.addUseCase(preview);
} else {
preview = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.IMAGE_CAPTURE)) {
imageCapture = new ImageCapture.Builder()
.setCameraSelector(cameraSelector)
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
//.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetResolution(new Size(480, 640))
.setTargetRotation(wm.getDefaultDisplay().getRotation())
.build();
useCaseGroupBuilder.addUseCase(imageCapture);
} else {
imageCapture = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.VIDEO_CAPTURE)) {
videoCapture = new VideoCapture.Builder()
.setCameraSelector(cameraSelector)
//.setTargetResolution(new Size(480, 640))
.setBitRate(1000)
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(wm.getDefaultDisplay().getRotation())
.build();
useCaseGroupBuilder.addUseCase(videoCapture);
} else {
videoCapture = null;
}
if (features.contains(CameraViewFragmentListener.CameraFeature.FACE_DETECTION)) {
imageAnalysis = new ImageAnalysis.Builder()
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
faceDetector = new FaceDetector(new FaceDetector.FaceDetectorListener() {
#Override
public void onFaceDetectSuccess(List<Face> faces) {
if (listener!=null)
listener.onCameraFacesDetect(faces);
}
#Override
public void onFaceDetectError(Exception e) {
if (listener!=null)
listener.onCameraFacesDetectError(e);
}
});
imageAnalysis.setAnalyzer(executor, faceDetector.createAnalyzer());
useCaseGroupBuilder.addUseCase(imageAnalysis);
} else {
imageAnalysis = null;
}
camera = cameraProvider.bindToLifecycle(this, cameraSelector, useCaseGroupBuilder.build());
}
public void requestTakePicture() {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(bos).build();
imageCapture.takePicture(outputFileOptions, executor, new ImageCapture.OnImageSavedCallback() {
#Override
public void onImageSaved(#NonNull ImageCapture.OutputFileResults outputFileResults) {
handler.post(() -> {
//Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
if (listener != null) {
listener.onCameraPictureTaken(bos.toByteArray());
}
});
}
#Override
public void onError(#NonNull ImageCaptureException error) {
error.printStackTrace();
}
});
}
public void startVideoRecording(File file) {
VideoCapture.OutputFileOptions outputFileOptions = new VideoCapture.OutputFileOptions
.Builder(file)
.build();
videoCapture.startRecording(outputFileOptions, executor, new VideoCapture.OnVideoSavedCallback() {
#Override
public void onVideoSaved(#NonNull VideoCapture.OutputFileResults outputFileResults) {
handler.post(() -> {
isRecording = false;
//Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
if (listener != null) {
listener.onCameraVideoRecorded(outputFileResults.getSavedUri());
}
});
}
#Override
public void onError(int videoCaptureError, #NonNull String message, #Nullable Throwable cause) {
isRecording = false;
if (listener != null) {
listener.onCameraVideoRecordError(cause);
}
}
});
isRecording = true;
}
public void stopVideoRecording() {
videoCapture.stopRecording();
isRecording = false;
}
public boolean isRecording() {
return isRecording;
}
}

How to get autofocus working with CameraSource from Android's Mobile Vision API?

I'm trying get the Camera to autofocus using the CameraSource class from Android Mobile Vision API.
I've activated autofocus as follows:
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.setRequestedFps(24.0f)
.build();
But the SurfaceView rendering the camera is often blurred.
Here my full Activity's code:
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
import java.util.Arrays;
public class MainActivity extends Activity {
private BarcodeDetector barcodeDetector;
private CameraSource cameraSource;
private SurfaceView cameraView;
private TextView barcodeInfo;
public static int REQUEST_PERMISSION_CAMERA = 1;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.camera_view);
barcodeInfo = (TextView) findViewById(R.id.code_info);
barcodeDetector =
new BarcodeDetector.Builder(this)
.setBarcodeFormats(Barcode.QR_CODE)
.build();
cameraSource = new CameraSource
.Builder(this, barcodeDetector)
.setRequestedPreviewSize(640, 480)
.setAutoFocusEnabled(true)
.setRequestedFps(24.0f)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_PERMISSION_CAMERA);
return;
} else {
cameraSource.start(cameraView.getHolder());
}
} else {
cameraSource.start(cameraView.getHolder());
}
} catch (IOException ie) {
Log.e("CAMERA SOURCE", ie.getMessage());
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> barcodes = detections.getDetectedItems();
if (barcodes.size() != 0) {
barcodeInfo.post(new Runnable() { // Use the post method of the TextView
public void run() {
barcodeInfo.setText( // Update the TextView
barcodes.valueAt(0).displayValue
);
}
});
}
}
});
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
int[] checkValues = new int[1];
if (requestCode == REQUEST_PERMISSION_CAMERA) {
if (Arrays.equals(grantResults, checkValues)) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#Override
protected void onDestroy() {
super.onDestroy();
cameraSource.release();
barcodeDetector.release();
}
}

making text scanner but shows black screen instead of camera

I am trying to make a text scanner application with camera that recognizes text and shows in screen.But instead of showing camera it shows a black screen. How can I solve this.
My code is here
package com.myapp.game.easynepalirecharge;
import android.Manifest;
import android.app.ActionBar;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.util.SparseArray;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import android.widget.TextView;
import com.google.android.gms.vision.CameraSource;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
SurfaceView cameraView;
TextView textView;
CameraSource cameraSource;
final int REQUESTCAMERAPERMISSION = 105;
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull final int[] grantResults) {
switch (requestCode) {
case REQUESTCAMERAPERMISSION:
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.checkSelfPermission(getApplication(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
try {
cameraSource.start(cameraView.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
cameraView = (SurfaceView) findViewById(R.id.surfaceView);
textView = (TextView) findViewById(R.id.textView);
TextRecognizer textRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!textRecognizer.isOperational()) {
Log.v("haha", "error not operational");
} else {
cameraSource = new CameraSource.Builder(getApplicationContext(), textRecognizer).
setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(3840, 2160)
.setRequestedFps(2.0f)
.setAutoFocusEnabled(true)
.build();
cameraView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
if (ActivityCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUESTCAMERAPERMISSION);
cameraSource.start(cameraView.getHolder());
return;
}
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
cameraSource.stop();
}
});
textRecognizer.setProcessor(new Detector.Processor<TextBlock>() {
#Override
public void release() {
}
#Override
public void receiveDetections(Detector.Detections<TextBlock> detections) {
final SparseArray<TextBlock> items = detections.getDetectedItems();
if (items.size() != 0) {
textView.post(new Runnable() {
#Override
public void run() {
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i <= items.size(); i++) {
TextBlock item = items.valueAt(i);
stringBuilder.append(item.getValue());
stringBuilder.append("\n");
}
textView.setText(stringBuilder.toString());
}
});
}
}
}
);
}
}
}
and another thing I wanna ask. Which one is better. This or the tess two library?
have you stopped camera properly. Try to do camera functions in thread so that your single task does not be expensive. The reason i thought problem is that get your permission at run time.
try this
https://developer.android.com/training/permissions/requesting.html

Camera preview stops on activity restart

I made a camera preview app for android and everything works fine even when I press the power button to make device asleep, and waking it up again. But when my activity goes to background (like pressing home button) and then it comes to foreground again the program stops.
This is my activity code:
package com.example.campreview;
import com.example.campreview.CameraPreview;
import com.example.campreview.R;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.os.Handler;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.view.Menu;
import android.view.View;
public class ScanActivity extends Activity {
private CameraPreview CamPrev = null;
private FrameLayout PreviewFrm;
private Camera cam = null;
private Handler atfcs;
private ImageScanner scnr;
private boolean hascam = false;
private boolean prvng = true;
private boolean paused = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scan);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
PreviewFrm = (FrameLayout)findViewById(R.id.PreviewFrm);
StartCamera();
if (cam != null) {
hascam = true;
atfcs = new Handler();
CamPrev = new CameraPreview(this, cam, PreviewCB, AutoFocusCB);
PreviewFrm.addView(CamPrev);
}
}
#Override
public void onPause() {
ReleaseCamera();
paused = true;
super.onPause();
}
#Override
public void onResume() {
super.onResume();
if (paused) StartPreview();
}
private boolean StartCamera() {
boolean r = true;
if (cam == null) {
try {
cam = Camera.open();
} catch (Exception e) {
cam = null;
r = false;
}
if (cam != null) {
try {
Camera.Parameters p = cam.getParameters();
if (p.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO))
p.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
if (p.getSupportedFlashModes().contains(Camera.Parameters.FLASH_MODE_OFF))
p.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
if (p.getSupportedPreviewFormats().contains(ImageFormat.NV21))
p.setPreviewFormat(ImageFormat.NV21);
Camera.Size s = null;
int a = 0, b;
for (Camera.Size z : p.getSupportedPreviewSizes()) {
b = z.width * z.height;
if (Math.abs(b - 307200) < Math.abs(a - 307200)) { //640x480 is the best
s = z;
a = b;
}
}
if (a != 0) p.setPreviewSize(s.width, s.height);
cam.setParameters(p);
cam.setDisplayOrientation(90);
if (CamPrev != null) cam.setPreviewDisplay(CamPrev.getHolder());
} catch (Exception e) {
r = false;
cam.release();
cam = null;
}
}
}
return r;
}
private void ReleaseCamera() {
if (cam != null) {
StopPreview();
cam.release();
cam = null;
}
}
public void StartPreview() {
if ((!prvng) & hascam) {
if (StartCamera()) {
cam.setPreviewCallback(PreviewCB);
cam.startPreview();
cam.autoFocus(AutoFocusCB);
prvng = true;
}
}
}
public void StopPreview() {
if (prvng) {
cam.stopPreview();
cam.setPreviewCallback(null);
prvng = false;
}
}
private Runnable DoAutoFocus = new Runnable() {
public void run() {
if (prvng) cam.autoFocus(AutoFocusCB);
}
};
AutoFocusCallback AutoFocusCB = new AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
atfcs.postDelayed(DoAutoFocus, 1000);
}
};
PreviewCallback PreviewCB = new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
//
}
};
}
And this is the preview code:
package com.example.campreview;
import java.io.IOException;
import android.view.SurfaceView;
import android.view.SurfaceHolder;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.AutoFocusCallback;
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder hldr;
private Camera cam;
private PreviewCallback pcb;
private AutoFocusCallback afcb;
public CameraPreview(Context context, Camera camera, PreviewCallback previewCb, AutoFocusCallback autoFocusCb) {
super(context);
cam = camera;
pcb = previewCb;
afcb = autoFocusCb;
hldr = getHolder();
hldr.addCallback(this);
//hldr.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
cam.setPreviewDisplay(holder);
} catch (IOException e) {
// No Code
}
}
public void surfaceDestroyed(SurfaceHolder holder) {}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (hldr.getSurface() == null) return;
try {
cam.stopPreview();
} catch (Exception e){
// No Code
}
try {
cam.setPreviewDisplay(hldr);
cam.setPreviewCallback(pcb);
cam.startPreview();
cam.autoFocus(afcb);
} catch (Exception e) {
// No Code
}
}
}
Is there any idea how can I solve this problem?
I found the problem. It seems the SurfaceView that is made in program destroys on activity stop. So I replaced it with a SurfaceView in my layout.
package com.example.campreview;
import com.example.campreview.R;
import java.io.IOException;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.os.Handler;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
public class ScanActivity extends Activity implements OnClickListener {
private SurfaceView PreviewSfc;
private Camera cam = null;
private Handler atfcs;
private boolean hascam = false;
private boolean validdisplay = false;
private boolean prvng = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scan);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
PreviewSfc = (SurfaceView)findViewById(R.id.PreviewSfc);
PreviewSfc.getHolder().addCallback(SurfaceCB);
//PreviewSfc.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
StartCamera();
if (cam != null) {
hascam = true;
atfcs = new Handler();
}
}
#Override
public void onPause() {
ReleaseCamera();
super.onPause();
}
#Override
public void onResume() {
super.onResume();
StartPreview();
}
private boolean StartCamera() {
boolean r = true;
if (cam == null) {
try {
cam = Camera.open();
} catch (Exception e) {
cam = null;
r = false;
}
if (cam != null) {
try {
Camera.Parameters p = cam.getParameters();
if (p.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO))
p.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
if (p.getSupportedFlashModes().contains(Camera.Parameters.FLASH_MODE_OFF))
p.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
if (p.getSupportedPreviewFormats().contains(ImageFormat.NV21))
p.setPreviewFormat(ImageFormat.NV21);
Camera.Size s = null;
int a = 0, b;
for (Camera.Size z : p.getSupportedPreviewSizes()) {
b = z.width * z.height;
if (Math.abs(b - 307200) < Math.abs(a - 307200)) { //640x480 is the best
s = z;
a = b;
}
}
if (a != 0) p.setPreviewSize(s.width, s.height);
cam.setParameters(p);
cam.setDisplayOrientation(90);
if (validdisplay) cam.setPreviewDisplay(PreviewSfc.getHolder());
} catch (Exception e) {
r = false;
cam.release();
cam = null;
}
}
}
//if (!r) Error message that failed to start camera
return r;
}
private void ReleaseCamera() {
if (cam != null) {
StopPreview();
cam.release();
cam = null;
}
}
public void StartPreview() {
if ((!prvng) & (hascam) & (validdisplay)) {
if (StartCamera()) {
cam.setPreviewCallback(PreviewCB);
cam.startPreview();
cam.autoFocus(AutoFocusCB);
prvng = true;
}
}
}
public void StopPreview() {
if (prvng) {
cam.stopPreview();
cam.setPreviewCallback(null);
prvng = false;
}
}
private Runnable DoAutoFocus = new Runnable() {
public void run() {
if (prvng) cam.autoFocus(AutoFocusCB);
}
};
AutoFocusCallback AutoFocusCB = new AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
atfcs.postDelayed(DoAutoFocus, 1000);
}
};
PreviewCallback PreviewCB = new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
//
}
};
SurfaceHolder.Callback SurfaceCB = new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
if (cam != null) {
try {
cam.setPreviewDisplay(holder);
} catch (IOException e) {
// No Code
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
validdisplay = true;
StartPreview();
}
public void surfaceDestroyed(SurfaceHolder holder) {
validdisplay = false;
}
};
}

prepare failed: -1 media recorder in android

When i try to record video from camera which seems with an error prepare failed: -1.
I search lot and I cant get any acceptable answers from anywhere. here is my code below.
I don't know it by parameter? and by unlocking camera. Any one who can it will be grateful.
package com.camara.activity;
import java.io.IOException;
import android.content.Context;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class CamcorderView extends SurfaceView implements
SurfaceHolder.Callback {
MediaRecorder recorder;
SurfaceHolder holder;
String outputFile = "/sdcard/default.mp4";
public CamcorderView(Context context, AttributeSet attrs) {
super(context, attrs);
holder = getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);
recorder.setVideoSize(480, 320);
recorder.setVideoFrameRate(10);
recorder.setMaxDuration(10000);
}
public void surfaceCreated(SurfaceHolder holder) {
recorder.setOutputFile(outputFile);
recorder.setPreviewDisplay(holder.getSurface());
if (recorder != null) {
try {
recorder.prepare();
} catch (IllegalStateException e) {
Log.e("IllegalStateException", e.toString());
} catch (IOException e) {
Log.e("IOException", e.toString());
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void setOutputFile(String filename) {
outputFile = filename;
recorder.setOutputFile(filename);
}
public void startRecording() {
recorder.start();
}
public void stopRecording() {
recorder.stop();
recorder.release();
}
}
following is the output from the logcat
01-10 17:33:00.450: I/MediaRecorderJNI(14046): prepare: surface=0x243398 (id=1)
01-10 17:33:00.500: E/MediaRecorder(14046): prepare failed: -1
01-10 17:33:00.500: E/IOException(14046): java.io.IOException: prepare failed.
thanks in advance.
Ok I actually found my mistake. Because of course there was a mistake. Actually I wanted to have a preview and recording at the same time, and thought I had to use the object Camera for that. But Actually the MediaRecorder does that by itself using SetDisplayPreview and .... MediaRecord.Prepare :)
I'm in a good mood, so here's all the class ;)
import java.io.File;
import java.io.IOException;
import android.content.Context;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.media.MediaRecorder.OnInfoListener;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.FrameLayout;
public class ModuleVideo implements SurfaceHolder.Callback, OnInfoListener
{
public static final int STATE_NONE = -1;
public static final int STATE_STOPPED = 0;
public static final int STATE_PREVIEW = 1;
public static final int STATE_CAPTURE = 2;
public static final int STATE_RECORDING = 3;
private SurfaceHolder mHolder;
private MediaRecorder mRecorder;
private SurfaceView mCameraView;
private Context mContext;
private FrameLayout mParent;
private int mState;
private boolean mRecording;
public ModuleVideo(Context context, FrameLayout parent)
{
//Initiate the Surface Holder properly
mParent = parent;
mContext = context;
mRecorder = null;
mState = STATE_NONE;
mRecording = false;
}
private void Init()
{
mRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mRecorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile camcorderProfile_HQ = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mRecorder.setProfile(camcorderProfile_HQ);
mRecorder.setOutputFile("/sdcard/MY_VIDEO.mp4");
}
private void Prepare()
{
try
{
mRecorder.setPreviewDisplay(mHolder.getSurface());
mRecorder.prepare();
}
catch (IllegalStateException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
}
public void UI_StartPreview()
{
if(mState == STATE_STOPPED || mState == STATE_NONE)
{
mRecorder= new MediaRecorder();
Init();
mCameraView= new SurfaceView(mContext);
mParent.addView(mCameraView);
this.mHolder = mCameraView.getHolder();
this.mHolder.addCallback(this);
this.mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
public void UI_StopPreview()
{
if(mState == STATE_PREVIEW)
{
if(mRecording)
{
UI_StopRecord();
}
if(mRecorder != null)
{
mRecorder.release();
mRecorder= null;
}
mParent.removeView(mCameraView);
//mCameraView= null;
}
}
public boolean UI_StartRecord()
{
if(mState != STATE_PREVIEW )
{
return false;
}
String path= "/sdcard/PLUS_VIDEO.mp4";
String state = android.os.Environment.getExternalStorageState();
if(!state.equals(Environment.MEDIA_MOUNTED))
{
return false;
}
File directory = new File(path).getParentFile();
if(!directory.exists() && !directory.mkdirs())
{
return false;
}
mRecorder.start();
mRecording= true;
mState= STATE_RECORDING;
return true;
}
public void UI_StopRecord()
{
if(mRecorder != null)
{
mRecorder.stop();
mRecorder.reset();
Init();
Prepare();
mRecording= false;
mState= STATE_PREVIEW;
}
}
public boolean UI_IsRecording()
{
return mRecording;
}
#Override
public void onInfo(MediaRecorder mr, int what, int extra)
{
// TODO Auto-generated method stub
Log.i(null, "onInfo");
}
#Override
public void surfaceCreated(SurfaceHolder holder)
{
Prepare();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
mState= STATE_PREVIEW;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder)
{
mState= STATE_STOPPED;
}
}

Categories

Resources