I'm trying to create an app using the camera2 api, what I need is to create a burst of 30 fps which I was able to create.
The problems is both the preview images and the saved images interlaced (I'm photographing some blinking leds so its easy to see).
I tried to disable the auto exposure and set the sensitivity myself but that didn't work.
private void captureStillPicture() {
try {
final Activity activity = getActivity();
mPictureCounter = 0;
if (null == activity || null == mCameraDevice) {
return;
}
List<CaptureRequest> captureList = new ArrayList<CaptureRequest>();
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 2000);
//Auto focus - should keep that
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, Consts.aeMode);
captureBuilder.addTarget(mImageReader.getSurface());
for(int i = 0; i < Consts.frameAmount; i++) {
captureList.add(captureBuilder.build());
}
CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
mPictureCounter++;
unlockFocus();
}
};
mCaptureSession.stopRepeating();
mCaptureSession.captureBurst(captureList, CaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Any thought?
Related
I am using Camera2 API (https://github.com/googlearchive/android-Camera2Basic) for my project.
I want to use Front and Rear both camera, It’s working properly on my device
Realme5
Android: 10
But when I am trying to use front camera on
One Plus 6, Android: 10 or Samsung Galaxy J7 Nxt, Android 7.0 Or Redmi.
Then I am going on onConfigureFailed method and it’s redirect me firstActivity
Manifest ScreenShots:enter image description here
Log ScreenShots:enter image description here
Whole code in googlearchive GitHub link is given above
Variable Initialisation
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int REQUEST_CAMERA_PERMISSION = 1;
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
public static int mSelectedFacing = 1;
flip camera button click
if (mTextureView.isAvailable()) {
if(mSelectedFacing ==0){
mSelectedFacing = 1;
}else {
mSelectedFacing = 0;
}
closeCamera();
openCamera(mTextureView.getWidth(), mTextureView.getHeight(), mSelectedFacing);
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
Open camera
private void openCamera(int width, int height,int mSelectedFacing) {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs(width, height,mSelectedFacing);
configureTransform(width, height);
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
Close camera
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
CameraPreviewSession
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
setAutoFlash(mPreviewRequestBuilder);
// Finally, we start displaying the camera preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Please help me …
Thanks in advance
I have do like it will capture image in background without any preview screen using Camera2 API in background service.
Visit https://codepalyers.blogspot.com/2020/12/capture-image-in-background-without.html
If you wish to use rear camera use
private final String frontCamera = "1";
private final String backCamera = "0";
as already define in code.
I am not able to get a constant framerate with the camera2 API using MediaRecorder on the Galaxy S9 front camera.
Essentially I am using the example Project from https://github.com/googlesamples/android-Camera2Video but stripped it down to find the error. I removed the mTextureView and use only the mediaRecorder surface.
Here are the relevant code snippets:
#Override
protected void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
// Camcorder Profile QUALITY_HIGH doenst work
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncodingBitRate(30000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(2560, 1440);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.prepare();
}
#Override
protected void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
Range<Integer> range = getRange();
builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, range);
}
// get highest range
private Range<Integer> getRange() {
CameraManager mCameraManager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics chars = null;
try {
chars = mCameraManager.getCameraCharacteristics(mCameraManager.getCameraIdList()[1]);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Range<Integer>[] ranges = chars.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
Range<Integer> result = null;
for (Range<Integer> range : ranges) {
if (result == null) {
result = range;
} else {
int upper = range.getUpper();
int lower = range.getLower();
if (upper >= result.getUpper() && lower >= result.getLower()) {
result = range;
}
}
}
return result;
}
protected void startRecordingVideo() {
if (null == mCameraDevice) {
return;
}
try {
closeCaptureSession();
setUpMediaRecorder();
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
setUpCaptureRequestBuilder(mPreviewBuilder);
if (null == mCameraDevice) {
return;
}
try {
mCaptureSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
getRange() returns a Range of [30,30]. So it should record at 30fps. If I move the camera to some darker areas the frame rate suddenly drops. If locking the AE while moving to a light area the framerate is stable, even in dark areas.
So it seems to have something to do with AE but can not figure out what. Any help?
Hi I'm using camera2basic example to implement my camera2 application. I can't find any good example to implement touch to focus with camera2 api. Currently the code i'm using for touch to focus is this:
private void setFocusArea(MotionEvent event) {
if (mCameraId == null) return;
CameraManager cm = (CameraManager)getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics cc = null;
try {
cc = cm.getCameraCharacteristics(mCameraId);
} catch (CameraAccessException e) {
e.printStackTrace();
}
int myX = (int)event.getX();
int myY = (int)event.getY();
MeteringRectangle focusArea = new MeteringRectangle(myX-100,myY-100,200,200,MeteringRectangle.METERING_WEIGHT_DONT_CARE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
try {
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
// After this, the camera will go back to the normal state of preview.
mState = STATE_PREVIEW;
} catch (CameraAccessException e){
// log
}
if (isMeteringAreaAESupported(cc)) {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS,
new MeteringRectangle[]{focusArea});
}
if (isMeteringAreaAFSupported(cc)) {
mPreviewRequestBuilder
.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusArea});
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_AUTO);
}
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_START);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
mManualFocusEngaged = true;
} catch (CameraAccessException e) {
// error handling
}
}
But the problem is that it shows strange behavior, with auto-flash on it keeps repeating the auto-focus sequence for unlimited times also it doesnot seem to focus on the touched area. I tried changing
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
to:
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
this stopped the repeating auto-focus sequence but it still doesn't focus on the touched area and the flash just blinks for less than a second instead of a normal focus sequence. Please help me with this or guide me to a working touch to focus example. Thanks
Your problem is setting the AF region's control.
Calculate the region that you want to set Focus
stop current session mPreviewSession.stopRepeating()
Start AF trigger!!!
3.1. Safely start to make the AF region IDLE
3.2. then start AF trigger
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
Capture once to apply your settings
Check if AF and AE regions are supported or not is supported If supported then apply this region
if ( isMeteringAreaAESupported()) {
//System.out.println("AE regions are supported");
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{focusArea});
}
if ( isMeteringAreaAFSupported()) {
//System.out.println("AF regions are supported");
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusArea});
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
}
Again capture once to set the focus
mPreviewCaptureSession.capture(mCaptureRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
inside mCaptureCallback you should cancel AF trigger, but the documentation says AF trigger can be null in some device so I did like
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null);
The last things is mPreviewCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
EDIT
Here is the working example
private void setFocusArea(int focus_point_x, int focus_point_y) throws CameraAccessException {
if (cameraId == null || mManualFocusEngaged) return;
if (mCameraManager == null){
mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
}
MeteringRectangle focusArea = null;
if (mCameraManager != null) {
if (mCameraCharacteristics == null) {
mCameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
}
final Rect sensorArraySize = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
int y = focus_point_x;
int x = focus_point_y;
if (sensorArraySize != null) {
y = (int)(((float)focus_point_x / currentWidth) * (float)sensorArraySize.height());
x = (int)(((float)focus_point_y / currentHeight) * (float)sensorArraySize.width());
}
final int halfTouchLength = 150;
focusArea = new MeteringRectangle(Math.max(x - halfTouchLength, 0),
Math.max(y - halfTouchLength, 0),
halfTouchLength * 2,
halfTouchLength * 2,
MeteringRectangle.METERING_WEIGHT_MAX - 1);
}
CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
mManualFocusEngaged = false;
if (request.getTag().equals(FOCUS_TAG)) { // previously getTag == "Focus_tag"
//the focus trigger is complete -
//resume repeating (preview surface will get frames), clear AF trigger
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null);// As documentation says AF_trigger can be null in some device
try {
mCurrentCameraCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
// error handling
}
}
}
#Override
public void onCaptureFailed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
mManualFocusEngaged = false;
}
};
mCurrentCameraCaptureSession.stopRepeating(); // Destroy current session
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
mCurrentCameraCaptureSession.capture(mCaptureRequestBuilder.build(), mCaptureCallback, mBackgroundHandler); //Set all settings for once
if ( isMeteringAreaAESupported()) {
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{focusArea});
}
if ( isMeteringAreaAFSupported()) {
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusArea});
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
}
mCaptureRequestBuilder.setTag(FOCUS_TAG); //it will be checked inside mCaptureCallback
mCurrentCameraCaptureSession.capture(mCaptureRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
mManualFocusEngaged = true;
}
private boolean isMeteringAreaAFSupported() { // AF stands for AutoFocus
Integer afRegion = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
return afRegion != null && afRegion >= 1;
}
private boolean isMeteringAreaAESupported() {//AE stands for AutoExposure
Integer aeState = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
return aeState!=null && aeState >=1;
}
Hope it helps.
Enjoy coding
I'm building a camera app based on Camera2, but the picture I save is not matching the latest one I saw on my surface view. It seems that the Preview session works but when I ask for a capture, the new request stop the preview and capture the image. The surface view freeze on the latest pic and that create a gap between the time I press the shutter button (preview running and request capture) and the onCaptureCompleted from the capture request.
Here is the preview session
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Log.d(TAG, "here is the width of texture" + mPreviewSize.getWidth());
Log.d(TAG, "here is the height of texture" +mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
the mCaptureCallback is defined as below :
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW:
break;
case STATE_CAPTURE:
mState = STATE_PREVIEW;
capturePicture();
break;
}
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
TotalCaptureResult iResult = result;
Log.d(TAG, "Frame on Completed: "+result.getFrameNumber());
process(result);
}
}
What's happening is that I repeating the preview and it works. the process is just used to keep it running and nothing happened until the mState is set to CAPTURE.
It's set to capture when we click on the shutter button. When I click on the button, I call:
private void takePicture(){
try {
mFile = ImageSaver.generateNewFileImage();
mState = STATE_CAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG,"Camera exception",e);
}
}
I call then CapturePicture as mState is in Capture as defined in the mCaptureCallback
private void capturePicture() {
mTakePictureRunnable = new Runnable() {
#Override
public void run() {
takePictureNow();
}
};
mBackgroundHandler.post(mTakePictureRunnable);
}
the takePicutreNow is defined
private void takePictureNow() {
Log.d(TAG, "Running captureStillPicture");
try {
if (null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Log.d(TAG, "here is the width of texture" + mPreviewSize.getWidth());
Log.d(TAG, "here is the height of texture" + mPreviewSize.getHeight());
Surface surface = new Surface(texture);
captureBuilder.addTarget(surface);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
//Location if needed
boolean Location_Saved = CameraSettings.Instance().getBoolean(CameraSettings.SAVE_LOCATION,
getResources().getBoolean(R.bool.action_camera_settings_dflt_location));
if(Location_Saved == true) {
captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, mLocationManager.getCurrentLocation());
} else {
captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, null);
}
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull long timestamp,
#NonNull long framenumber) {
playShutterSound();
showShutterAnimation();
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
Log.d(TAG, mFile.toString());
mState = STATE_PREVIEW;
}
};
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
mCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
This process is working but I got a lag between the press and the imagesave and the latest pic I saw on the screen from the preview is not exactly the same saved if I move quickly.
It seems that the capture do not update the surface and the surface only show when we were in Preview
Any idea to see what I save ?
Thanks
The surface will freeze when you take an image, that might not be the image that gets saved. If you still want to show camera preview on the screen, you just have to restart the camera preview by calling the function you used to create preview at the first time.
I'm trying to make an app which broadcast video through internet, currently I am using the deprecated Camera API, adding a Camera.PreviewCallback to the Camera object and then sending the byte array which comes in the onPreviewFrame() method from Camera.PreviewCallback.
But now I want to test the new Camera2 API, I am watching at the Camera2BasicTutorial , and I think that I need to make a CameraCaptureSession.CaptureCallback object to get the image byte array, something like the tutorial says:
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
showToast("Saved: " + mFile);
Log.d(TAG, mFile.toString());
unlockFocus();
}
};
And then add it to the CameraCaptureSession:
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
The problem is that I don't know how to retrieve each image byte array from any of the parameters in onCaptureCompleted() from the CaptureCallback.
Any help?
You're kind of right- you can't get the image data from the onCaptureCompleted() method. That callback only returns the metadata about the exposure for your own bookkeeping. The actual image information gets sent to whatever Surface you indicated in the exposure's CaptureRequest.
At least I realized how to do what I wanted, from the Camera2BasicTutorial, I did the following changes to the Camera2BasicFragment class:
Modify captureStillPicture() method to delete stuff which I determined that was unneccessary with my broadcast needs, also don't allow this method to stop the repeating mode:
private void captureStillPicture() {
try {
final Activity activity = getActivity();
if (null == activity || null == mCameraDevice) {
return;
}
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
}
};
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
In createCameraPreviewSession() method, disable the automaticall flash:
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
// CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start displaying the camera preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
I created a boolean var to detect if there is an image currently being under process, to not queue all the frames that the camera captures; and another boolean to track if there is a frame sending through internet:
private boolean mWorking = false;
private boolean mNetworkWorking = false;
Modify the CaptureCallback object to run the captureStillPicture() method in each frame (only if there is no frame processing at the moment).
case STATE_PREVIEW: {
if (!mWorking){
Log.d(TAG, "capturing..");
mWorking = true;
mBackgroundHandler.post(new Runnable() {
#Override
public void run() {
captureStillPicture();
}
});
} else {
Log.d(TAG, "thread working, doing nothing");
}
break;
Finally, read the frame and send it; I achieved this modifying the OnImageAvailableListener object:
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(final ImageReader reader) {
// Process the image.
Image image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
image.close();
if (!mNetworkWorking){
Thread thread = new Thread(){
#Override
public void run(){
mNetworkWorking = true;
HttpResponse response = null;
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost(mBroadcastUrl);
post.setEntity(new ByteArrayEntity(bytes));
try {
response = client.execute(post);
} catch (ClientProtocolException e) {
if (BuildConfig.LOCAL_LOG)
Log.w(TAG, "ClientProtocolException: "+e.getMessage());
} catch (IOException e) {
if (BuildConfig.LOCAL_LOG)
Log.w(TAG, "IOException: "+e.getMessage());
}
mNetworkWorking = false;
}
};
thread.setName("networkThread");
thread.setPriority(Thread.MAX_PRIORITY);
thread.start();
}
mWorking = false;
}
};
That's all.