How to continue capturing video after screen is turned off? - android

In my app i capture a video using MediaRecorder.
But when i turn off the screen while the app is recording, the resulting video is black at that time. Is there a way to prevent this and make the camera continue to record? Here is my Code:
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
mMediaRecorder = new MediaRecorder();
outputFile = new File(dir, "temp");
try {
String[] ids = manager.getCameraIdList();
CameraCharacteristics characteristics = manager.getCameraCharacteristics(ids[0]);
StreamConfigurationMap configs = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = configs.getOutputSizes(MediaCodec.class);
final Size sizeHigh = sizes[0];
manager.openCamera(ids[0], new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCamera = camera;
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);
mMediaRecorder.setVideoSize(sizeHigh.getWidth(), sizeHigh.getHeight());
mMediaRecorder.setMaxFileSize(0);
try {
mMediaRecorder.setOutputFile(outputFile.getAbsolutePath());
mMediaRecorder.prepare();
List<Surface> list = new ArrayList<>();
list.add(mMediaRecorder.getSurface());
final CaptureRequest.Builder captureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
captureRequest.addTarget(mMediaRecorder.getSurface());
mCaptureRequest = captureRequest.build();
mCamera.createCaptureSession(list, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mSession = session;
mMediaRecorder.start();
try {
mSession.setRepeatingRequest(mCaptureRequest,
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
if(frameNumber > 2*60*25) {
mSession.stopRepeating();
mMediaRecorder.stop();
mSession=null;
mCamera.close();
}
}
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
mSession = session;
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}

Related

Camera preview and Camera capture Use Camera2 API, But occur ;Request targets Surface that is not part of current capture session' message on android

I want Camera preview and Camera Capture use Camera2 API.
this code is works, but there is one problem.
problem is When click the capture button, capture succeeds, camera preview stops.
if I press the capture button again, the camera preview is still paused.
if I do not press the capture button after running the app, camera preview is success work.
Error log when the capture button is pressed.
java.lang.IllegalArgumentException: submitRequestList:208: Request targets Surface that is not part of current capture session
at android.hardware.camera2.CameraManager.throwAsPublicException(CameraManager.java:650)
at android.hardware.camera2.impl.ICameraDeviceUserWrapper.submitRequestList(ICameraDeviceUserWrapper.java:86)
at android.hardware.camera2.impl.CameraDeviceImpl.submitCaptureRequest(CameraDeviceImpl.java:935)
at android.hardware.camera2.impl.CameraDeviceImpl.setRepeatingRequest(CameraDeviceImpl.java:974)
at android.hardware.camera2.impl.CameraCaptureSessionImpl.setRepeatingRequest(CameraCaptureSessionImpl.java:243)
at com.bilal.androidthingscameralib.CameraHelper$4.onCaptureCompleted(CameraHelper.java:273)
at java.lang.reflect.Method.invoke(Native Method)
at android.hardware.camera2.dispatch.InvokeDispatcher.dispatch(InvokeDispatcher.java:39)
at android.hardware.camera2.dispatch.HandlerDispatcher$1.run(HandlerDispatcher.java:65)
at android.os.Handler.handleCallback(Handler.java:790)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:164)
at android.app.ActivityThread.main(ActivityThread.java:6494)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:438)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:807)
Caused by: android.os.ServiceSpecificException: submitRequestList:208: Request targets Surface that is not part of current capture session (code 3)
at android.os.Parcel.readException(Parcel.java:2018)
at android.os.Parcel.readException(Parcel.java:1950)
at android.hardware.camera2.ICameraDeviceUser$Stub$Proxy.submitRequestList(ICameraDeviceUser.java:334)
at android.hardware.camera2.impl.ICameraDeviceUserWrapper.submitRequestList(ICameraDeviceUserWrapper.java:84)
at android.hardware.camera2.impl.CameraDeviceImpl.submitCaptureRequest(CameraDeviceImpl.java:935) 
at android.hardware.camera2.impl.CameraDeviceImpl.setRepeatingRequest(CameraDeviceImpl.java:974) 
at android.hardware.camera2.impl.CameraCaptureSessionImpl.setRepeatingRequest(CameraCaptureSessionImpl.java:243) 
at com.bilal.androidthingscameralib.CameraHelper$4.onCaptureCompleted(CameraHelper.java:273) 
at java.lang.reflect.Method.invoke(Native Method) 
at android.hardware.camera2.dispatch.InvokeDispatcher.dispatch(InvokeDispatcher.java:39) 
at android.hardware.camera2.dispatch.HandlerDispatcher$1.run(HandlerDispatcher.java:65) 
at android.os.Handler.handleCallback(Handler.java:790) 
at android.os.Handler.dispatchMessage(Handler.java:99) 
at android.os.Looper.loop(Looper.java:164) 
at android.app.ActivityThread.main(ActivityThread.java:6494) 
at java.lang.reflect.Method.invoke(Native Method) 
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:438) 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:807) 
and my source (MainActivity.class);
private InitializeCamera mInitializeCamera;
TextureView mTextureView;
ImageView imgCaptureImage;
#Override
public void onCreate(Bundle savedInstanceState) {
Button capBtn = (Button) findViewById(R.id.capBtn); //capture button
#Override
public void onClick(View v) {
mInitializeCamera.captureImage();
}
}
//camera open texture
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mInitializeCamera = new InitializeCamera(getApplicationContext(), mOnPictureAvailableListener, mTextureView, 640, 480, 1);
}
...
};
//picture taken
private OnPictureAvailableListener mOnPictureAvailableListener =
new OnPictureAvailableListener() {
#Override
public void onPictureAvailable(byte[] imageBytes) {
onPictureTaken(imageBytes);
}
};
//get picture
private void onPictureTaken(final byte[] imageBytes) {
if (imageBytes != null) {
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
imgCaptureImage.setImageBitmap(bitmap);
}
}
(InitializeCamera.class) :)
OnPictureAvailableListener mOnPictureAvailableListener;
private HandlerThread mCameraHandlerThread;
//Initialize camera library class
private CameraHelper mCameraHelper;
//Handler for running Camera Task in the background
private Handler mCameraHandler = new Handler();
public InitializeCamera(Context mContext, OnPictureAvailableListener mOnPictureAvailableListener, TextureView textureView, int imageHeight, int imageWidth, int maxSize) {
this.mOnPictureAvailableListener = mOnPictureAvailableListener;
//create new handler thread for camera operations.
mCameraHandlerThread = new HandlerThread("CameraBackground");
mCameraHandlerThread.start();
//Initialize Camera class.
mCameraHelper = CameraHelper.getInstance();
mCameraHelper.initializeCameraHelper(mContext, mCameraHandler, textureView, this, imageHeight, imageWidth, maxSize);
}
//capture image
public void captureImage() {
mCameraHelper.takePicture();
}
//get imagereader available
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
final byte[] imageBytes = new byte[imageBuf.remaining()];
image.close();
//post image bytes data to main UI Thread for displaying it in image view
mCameraHandler.post(new Runnable() {
#Override
public void run()( {
mOnPictureAvailableListener.onPictureAvailable(imageBytes);
}
});
}
(CameraHelper.class) (thanks)
TextureView textureView;
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private ImageReader mImageReader;
Size imageDimension;
private CaptureRequest.Builder captureRequestBuilder;
//Lazy-loaded singleton, so only one instance of the camera is created.
private CameraHelper() {
}
private static class InstanceHolder {
private static CameraHelper mCamera = new CameraHelper();
}
public static CameraHelper getInstance() {
return InstanceHolder.mCamera;
}
//Initialize the camera device.
public void initializeCameraHelper(Context context, Handler backgroundHandler, TextureView textureView, ImageReader.OnImageAvailableListener imageAvailableListener, int imageWidth, int imageHeight, int maxImages) {
this.textureView = textureView;
//discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
if (camIds.length < 1) {
return;
}
String id = camIds[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimension = map.getOutputSizes(SurfaceTexture.class)[2];
//Initialize the image processor
mImageReader = ImageReader.newInstance(imageWidth, imageHeight, ImageFormat.JPEG, maxImages);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
//Open camera resource
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Callback handling device state changes
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
Log.d(TAG, "Opened camera.");
mCameraDevice = cameraDevice;
createCameraPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
Log.d(TAG, "Camera disconnected, closing.");
closeCaptureSession();
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
Log.d(TAG, "Camera device error, closing.");
closeCaptureSession();
cameraDevice.close();
}
#Override
public void onClosed(#NonNull CameraDevice cameraDevice) {
Log.d(TAG, "Closed camera, releasing");
mCameraDevice = null;
}
};
private void createCameraPreview() {
Log.d(TAG, "createCameraPreview --" + textureView);
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
if (mCameraDevice == null) {
return;
}
mCaptureSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
//ready preview screen
private void updatePreview() {
if (mCameraDevice == null) {
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
mCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Begin a still image capture
*/
public void takePicture() {
if (mCameraDevice == null) {
Log.w(TAG, "Cannot capture image. Camera not initialized.");
return;
}
// Here, we create a CameraCaptureSession for capturing still images.
try {
mCameraDevice.createCaptureSession(
Collections.singletonList(mImageReader.getSurface()),
mSessionCallback,
null);
} catch (CameraAccessException cae) {
Log.d(TAG, "access exception while preparing pic", cae);
}
}
/**
* Callback handling session state changes
*/
private CameraCaptureSession.StateCallback mSessionCallback =
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (mCameraDevice == null) {
return;
}
// When the session is ready, we start capture.
mCaptureSession = cameraCaptureSession;
triggerImageCapture();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Log.w(TAG, "Failed to configure camera");
}
};
/**
* Execute a new capture request within the active session
*/
private void triggerImageCapture() {
try {
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
Log.d(TAG, "Capture request created.");
mCaptureSession.capture(captureBuilder.build(), mCaptureCallback, null);
} catch (CameraAccessException cae) {
Log.d(TAG, "camera capture exception");
}
}
/**
* Callback handling capture session events
*/
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
Log.d(TAG, "Partial result");
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
//session.close();
//mCaptureSession = null;
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
try {
mCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), mCaptureCallback, null); //OCCUR ERROR LOG
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.d(TAG, "CaptureSession closed");
}
};
Occur error log CameraHelper.class
Request targets Surface that is not part of current capture session
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
//session.close();
//mCaptureSession = null;
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
try {
mCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), mCaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.d(TAG, "CaptureSession closed");
}
};
this part
I think after capture, there seems to be a problem with mCaptureCallback
How to fix this problem?
if you know please advice for me..
thanks.
You're creating a new capture session in takePicture with just the one ImageReader Surface; after that, you can no longer use the TextureView Surface as a capture request target. You have to create yet another session to resume preview.
It's better to just add the JPEG ImageReader Surface to the initial capture session, and not have to be continually tearing down sessions to take pictures (which is quite slow). For regular preview, don't target the ImageReader in the repeating request, and then for still capture, just issue a capture request that targets the ImageReader.
See the camera2basic sample for examples of this.

Surface had no valid native Surface for Camera2

I've integrated camera2 with textureVIew. It works on all devices but on tablet when we capture image for first time, it crashesand displays following logs.
Fatal Exception: java.lang.IllegalArgumentException: Surface had no valid native Surface.
at android.hardware.camera2.legacy.LegacyCameraDevice.nativeGetSurfaceId(LegacyCameraDevice.java)
at android.hardware.camera2.legacy.LegacyCameraDevice.getSurfaceId(LegacyCameraDevice.java:658)
at android.hardware.camera2.legacy.LegacyCameraDevice.containsSurfaceId(LegacyCameraDevice.java:678)
at android.hardware.camera2.legacy.RequestThreadManager$2.onPictureTaken(RequestThreadManager.java:220)
at android.hardware.Camera$EventHandler.handleMessage(Camera.java:1248)
at android.os.Handler.dispatchMessage(Handler.java:111)
at android.os.Looper.loop(Looper.java:207)
at android.hardware.camera2.legacy.CameraDeviceUserShim$CameraLooper.run(CameraDeviceUserShim.java:136)
at java.lang.Thread.run(Thread.java:818)
Following code is used to capture the image.
protected void takePicture() {
if (getContext() == null || cameraDevice == null) return;
lockFocus();
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
if (characteristics != null) {
sizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
ImageReader reader = getImageReader();
if (reader == null) return;
List<Surface> outputSurfaces = getSurfaces(reader);
final CaptureRequest.Builder captureBuilder = getCaptureBuilder(reader);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, null);
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}
}
Any help would be appreciated.
That can happen if the ImageReader gets garbage collected before the camera picture capture completes.
Does the getImageReader method store the image reader somewhere permanent (like as a class member)? If not, the Surface from the ImageReader is like a weak reference, and will not keep it from being removed.

Poor camera2 video recording performance

I'm attempting to record video via a mediarecorder and the camera2 API and I'm having some difficulty getting good quality recordings out of it.
I am also getting a few different errors in my logcat like "App passed a NULL surface" when starting up the textureview.
Other than that it seems to work, but the captured videos are zoomed in and have a very low framerate ~2 fps.
This is the code that I'm working with:
public class VideoTest extends AppCompatActivity {
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Size mPreviewSize;
private Handler backgroundHandler;
private HandlerThread thread;
private MediaRecorder mMediaRecorder;
private String mVideoPath;
private boolean mIsRecordingVideo;
private static final String TAG = "VideoTest";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_test);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mMediaRecorder = new MediaRecorder();
}
#Override
public void onResume() {
super.onResume();
registerReceiver(buttonReceiver, new IntentFilter("ACTION_PRESSED"));
}
#Override
public void onPause() {
Log.d(TAG, "onPause");
super.onPause();
closeCamera();
stopBackgroundThread();
}
private BroadcastReceiver buttonReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "Got Button Press!");
try {
if (mIsRecordingVideo) {
i2cRequest(IndicatorControlReceiver.INDICATOR_OFF);
stopRecordingVideo();
} else {
i2cRequest(IndicatorControlReceiver.INDICATOR_ON);
startRecordingVideo();
}
} catch (Exception ex) {
Log.d(TAG, "ERROR BLAH CAMERA SUX");
}
}
};
private void openCamera() {
CameraManager camManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "Opening Camera");
try {
String camId = camManager.getCameraIdList()[0];
CameraCharacteristics cameraChars = camManager.getCameraCharacteristics(camId);
StreamConfigurationMap map = cameraChars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[14];
camManager.openCamera(camId, cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
//Log.e(TAG, "onSurfaceTextureSizeChanged");
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.d(TAG, "onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "onError code: " + error);
}
};
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Error Starting Preview. ABORTED!");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG, "Cannot create texture. ABORTED!");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
i2cRequest(I2CRequestReceiver.VIDEO_READY);
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "onConfigureFailed");
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "Camera Device is Null! ABORT!");
return;
}
/* mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,CameraMetadata.CONTROL_AF_MODE_OFF);*/
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30000,30000));
thread = new HandlerThread("CameraPreview");
thread.start();
backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
thread.quitSafely();
try {
thread.join();
thread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (null != mPreviewSession) {
mPreviewSession.close();
}
}
private void closeCamera(){
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
/*
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
*/
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
profile.audioBitRate = 128000;
profile.audioCodec = MediaRecorder.AudioEncoder.AAC;
profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2048000;
profile.videoFrameRate = 30;
mMediaRecorder.setProfile(profile);
/* mMediaRecorder.setVideoEncodingBitRate(2048000);
mMediaRecorder.setAudioEncodingBitRate(128000);
mMediaRecorder.setVideoSize(1280, 720);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);*/
if (mVideoPath == null || mVideoPath.isEmpty()) {
mVideoPath = getVideoFilePath();
}
mMediaRecorder.setOutputFile(mVideoPath);
mMediaRecorder.prepare();
}
#SuppressLint("SdCardPath")
private String getVideoFilePath() {
return "/sdcard/LIVE/video/" + System.currentTimeMillis() + ".mp4";
}
Surface recorderSurface;
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Cannot bind camera, textureView, or previewSize");
return;
}
try {
closePreviewSession();
setupMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture!= null;
texture.setDefaultBufferSize(1280, 720);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
new Thread(new Runnable() {
#Override
public void run() {
i2cRequest(I2CRequestReceiver.VIDEO_RECORDING);
mIsRecordingVideo = true;
mMediaRecorder.start();
}
}).start();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "Capture failed!");
runOnUiThread(new Runnable() {
#Override
public void run() {
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
mIsRecordingVideo = false;
}
});
}
},backgroundHandler);
} catch (IOException | CameraAccessException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Video saved: " + mVideoPath);
}
private void i2cRequest(String request) {
Intent sendI2cRequest = new Intent();
sendI2cRequest.setAction(I2CRequestReceiver.NOWSPEAK_REQUEST_ACTION);
switch (request) {
case I2CRequestReceiver.VIDEO_READY:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.VIDEO_READY);
Log.d(TAG, "VIDEO READY!!");
break;
case I2CRequestReceiver.E_CAMERA_ERROR:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.E_CAMERA_ERROR);
Log.d(TAG, "VIDEO ERROR!!");
break;
case IndicatorControlReceiver.INDICATOR_ON:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_ON);
break;
case IndicatorControlReceiver.INDICATOR_OFF:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_OFF);
break;
}
LocalBroadcastManager.getInstance(this).sendBroadcast(sendI2cRequest);
}
}
CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE takes frame rates in units of FPS, not 1/1000 of FPS.
So try
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30,30));
or even better, pick from the list provided by CameraCharacteristics.CONTROL_AVAILABLE_TARGET_FPS_RANGES
After talking with the board manufacturer, It seems that there are some issues with the implementation and they are working on fixing them.
I would like to thank #CommonsWare and #EddyTalvala for their expertise in helping me find the issue here.
-Rob

Enable a preview during a capture session on Camera2

I'm building a camera app based on Camera2, but the picture I save is not matching the latest one I saw on my surface view. It seems that the Preview session works but when I ask for a capture, the new request stop the preview and capture the image. The surface view freeze on the latest pic and that create a gap between the time I press the shutter button (preview running and request capture) and the onCaptureCompleted from the capture request.
Here is the preview session
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Log.d(TAG, "here is the width of texture" + mPreviewSize.getWidth());
Log.d(TAG, "here is the height of texture" +mPreviewSize.getHeight());
Surface surface = new Surface(texture);
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(
#NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
the mCaptureCallback is defined as below :
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW:
break;
case STATE_CAPTURE:
mState = STATE_PREVIEW;
capturePicture();
break;
}
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
TotalCaptureResult iResult = result;
Log.d(TAG, "Frame on Completed: "+result.getFrameNumber());
process(result);
}
}
What's happening is that I repeating the preview and it works. the process is just used to keep it running and nothing happened until the mState is set to CAPTURE.
It's set to capture when we click on the shutter button. When I click on the button, I call:
private void takePicture(){
try {
mFile = ImageSaver.generateNewFileImage();
mState = STATE_CAPTURE;
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG,"Camera exception",e);
}
}
I call then CapturePicture as mState is in Capture as defined in the mCaptureCallback
private void capturePicture() {
mTakePictureRunnable = new Runnable() {
#Override
public void run() {
takePictureNow();
}
};
mBackgroundHandler.post(mTakePictureRunnable);
}
the takePicutreNow is defined
private void takePictureNow() {
Log.d(TAG, "Running captureStillPicture");
try {
if (null == mCameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(mImageReader.getSurface());
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Log.d(TAG, "here is the width of texture" + mPreviewSize.getWidth());
Log.d(TAG, "here is the height of texture" + mPreviewSize.getHeight());
Surface surface = new Surface(texture);
captureBuilder.addTarget(surface);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
//Location if needed
boolean Location_Saved = CameraSettings.Instance().getBoolean(CameraSettings.SAVE_LOCATION,
getResources().getBoolean(R.bool.action_camera_settings_dflt_location));
if(Location_Saved == true) {
captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, mLocationManager.getCurrentLocation());
} else {
captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, null);
}
CameraCaptureSession.CaptureCallback CaptureCallback
= new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureStarted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull long timestamp,
#NonNull long framenumber) {
playShutterSound();
showShutterAnimation();
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session,
#NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
Log.d(TAG, mFile.toString());
mState = STATE_PREVIEW;
}
};
mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
mCaptureSession.stopRepeating();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
This process is working but I got a lag between the press and the imagesave and the latest pic I saw on the screen from the preview is not exactly the same saved if I move quickly.
It seems that the capture do not update the surface and the surface only show when we were in Preview
Any idea to see what I save ?
Thanks
The surface will freeze when you take an image, that might not be the image that gets saved. If you still want to show camera preview on the screen, you just have to restart the camera preview by calling the function you used to create preview at the first time.

How can I show image taken from camera

I'm trying to write an android app, that works with the camera.
I show a preview of the camera in a textureview, that is working. If you press a button the camera will take a picture and show it on an imageview on the second half of the screen.
But everytime I press the button my app stops working, it tells me the error
android.view.ViewRootImpl$CalledFromWrongThreadException: Only the
original thread that created a view hierarchy can touch its views.
So I looked it up, and it seems I can't change the image of the imageview as I try to. But I don't know how else I could solve my problem. Can you give me some advise? Here is my code: The image is changed in the "ImageReader.OnImageAvailableListener" in the method takePicture().
public class MainActivityOld extends Activity {
private TextureView mTextureView;
private ImageView mImageView;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Button mBtnShot;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//no titlebar
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceListener);
mImageView = (ImageView) findViewById(R.id.lastPicture);
mBtnShot = (Button) findViewById(R.id.btn_takepicture);
mBtnShot.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void takePicture() {
Log.e("MyTag", "Take picture");
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = jpegSizes[0].getWidth();
int height = jpegSizes[0].getHeight();
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mImageView.setImageBitmap(bitmap);
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroundHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener,backgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[0];
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e("MyTag", "openCamera");
}
private TextureView.SurfaceTextureListener mSurfaceListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e("myLog", "onSurfaceTextureAvailable, width=" + width + ",height=" + height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Log.e("myTag", "onOpened");
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
Log.e("myTag", "onDisconnected");
}
#Override
public void onError(CameraDevice camera, int error) {
Log.e("myTag", "onDisconnected");
}
};
protected void startPreview() {
if (mCameraDevice == null || mTextureView.isAvailable() || mPreviewSize == null) {
Log.e("MyTag", "startPreview failed, still working, so just ignore it");
// return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(MainActivityOld.this, "onConfigureFailed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
As the Exception says CalledFromWrongThreadException, your readerListener is working on another thread, from where you cannot change your views.
Change your code to:
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(final ImageReader reader) {
MainActivityOld.this.runOnUIThread(new Runnable() {
#Override
public void run() {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mImageView.setImageBitmap(bitmap);
}
});
}
};
I think you will be better of trying to stream to a surface view, not sure if this can even be done with a imageview.
try this tutorial it should get you going in the right direction
or this for a more up to date example provided by google

Categories

Resources