I transmit the images frame-by-frame from the camera via the socket after clicking on the form. I do not understand how to make an uninterrupted transfer of images after clicking on the form. Tried to put in a cycle, but in this case the image displayed on the form hangs. How to do it right? Here is the code with which I work.
/**
* Called when the user clicks on our {#code SurfaceView}, which has ID {#code mainSurfaceView}
* as defined in the {#code mainactivity.xml} layout file. <p>Captures a full-resolution image
* and saves it to permanent storage.</p>
*/
public void onClickOnSurfaceView(View v) {
if (mCaptureSession != null) {
try {
CaptureRequest.Builder requester =
mCamera.createCaptureRequest(mCamera.TEMPLATE_STILL_CAPTURE);
requester.addTarget(mCaptureBuffer.getSurface());
try {
// This handler can be null because we aren't actually attaching any callback
mCaptureSession.capture(requester.build(), /*listener*/null, /*handler*/null);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to file actual capture request", ex);
}
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to build actual capture request", ex);
}
} else {
Log.e(TAG, "User attempted to perform a capture outside our session");
}
// Control flow continues in mImageCaptureListener.onImageAvailable()
}
/**
* Callbacks invoked upon state changes in our {#code SurfaceView}.
*/
final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
/** The camera device to use, or null if we haven't yet set a fixed surface size. */
private String mCameraId;
/** Whether we received a change callback after setting our fixed surface size. */
private boolean mGotSecondCallback;
#Override
public void surfaceCreated(SurfaceHolder holder) {
// This is called every time the surface returns to the foreground
Log.i(TAG, "Surface created");
mCameraId = null;
mGotSecondCallback = false;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "Surface destroyed");
holder.removeCallback(this);
// We don't stop receiving callbacks forever because onResume() will reattach us
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// On the first invocation, width and height were automatically set to the view's size
if (mCameraId == null) {
// Find the device's back-facing camera and set the destination buffer sizes
try {
for (String cameraId : mCameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics =
mCameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(cameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_BACK) {
Log.i(TAG, "Found a back-facing camera");
StreamConfigurationMap info = cameraCharacteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Bigger is better when it comes to saving our image
Size largestSize = Collections.max(
Arrays.asList(info.getOutputSizes(ImageFormat.JPEG)),
new CompareSizesByArea());
// Prepare an ImageReader in case the user wants to capture images
Log.i(TAG, "Capture size: " + largestSize);
mCaptureBuffer = ImageReader.newInstance(640,
480, ImageFormat.JPEG, /*maxImages*/2);
mCaptureBuffer.setOnImageAvailableListener(
mImageCaptureListener, mBackgroundHandler);
Log.i(TAG, "SurfaceView size: " +
mSurfaceView.getWidth() + 'x' + mSurfaceView.getHeight());
Size optimalSize = chooseBigEnoughSize(
info.getOutputSizes(SurfaceHolder.class), width, height);
// Set the SurfaceHolder to use the camera's largest supported size
Log.i(TAG, "Preview size: " + optimalSize);
SurfaceHolder surfaceHolder = mSurfaceView.getHolder();
surfaceHolder.setFixedSize(optimalSize.getWidth(),
optimalSize.getHeight());
mCameraId = cameraId;
return;
}
}
} catch (CameraAccessException ex) {
Log.e(TAG, "Unable to list cameras", ex);
}
Log.e(TAG, "Didn't find any back-facing cameras");
// This is the second time the method is being invoked: our size change is complete
} else if (!mGotSecondCallback) {
if (mCamera != null) {
Log.e(TAG, "Aborting camera open because it hadn't been closed");
return;
}
// Open the camera device
try {
mCameraManager.openCamera(mCameraId, mCameraStateCallback,
mBackgroundHandler);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to configure output surface", ex);
}
mGotSecondCallback = true;
// Control flow continues in mCameraStateCallback.onOpened()
}
}
};
/**
* Calledbacks invoked upon state changes in our {#code CameraDevice}. <p>These are run on
* {#code mBackgroundThread}.</p>
*/
final CameraDevice.StateCallback mCameraStateCallback =
new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Log.i(TAG, "Successfully opened camera");
mCamera = camera;
try {
List<Surface> outputs = Arrays.asList(
mSurfaceView.getHolder().getSurface(), mCaptureBuffer.getSurface());
camera.createCaptureSession(outputs, mCaptureSessionListener,
mBackgroundHandler);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to create a capture session", ex);
}
// Control flow continues in mCaptureSessionListener.onConfigured()
}
#Override
public void onDisconnected(CameraDevice camera) {
Log.e(TAG, "Camera was disconnected");
}
#Override
public void onError(CameraDevice camera, int error) {
Log.e(TAG, "State error on device '" + camera.getId() + "': code " + error);
}
};
/**
* Callbacks invoked upon state changes in our {#code CameraCaptureSession}. <p>These are run on
* {#code mBackgroundThread}.</p>
*/
final CameraCaptureSession.StateCallback mCaptureSessionListener =
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
Log.i(TAG, "Finished configuring camera outputs");
mCaptureSession = session;
SurfaceHolder holder = mSurfaceView.getHolder();
if (holder != null) {
try {
// Build a request for preview footage
CaptureRequest.Builder requestBuilder =
mCamera.createCaptureRequest(mCamera.TEMPLATE_PREVIEW);
requestBuilder.addTarget(holder.getSurface());
CaptureRequest previewRequest = requestBuilder.build();
// Start displaying preview images
try {
session.setRepeatingRequest(previewRequest, /*listener*/null,
/*handler*/null);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to make repeating preview request", ex);
}
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to build preview request", ex);
}
} else {
Log.e(TAG, "Holder didn't exist when trying to formulate preview request");
}
}
#Override
public void onClosed(CameraCaptureSession session) {
mCaptureSession = null;
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.e(TAG, "Configuration error on device '" + mCamera.getId());
}
};
/**
* Callback invoked when we've received a JPEG image from the camera.
*/
final ImageReader.OnImageAvailableListener mImageCaptureListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
// Save the image once we get a chance
mBackgroundHandler.post(new CapturedImageSaver(reader.acquireNextImage()));
}
};
static class CapturedImageSaver implements Runnable {
/**
* The image to save.
*/
private Image mCapture;
public CapturedImageSaver(Image capture) {
mCapture = capture;
}
#Override
public void run() {
try {
// Choose an unused filename under the Pictures/ directory
File file = File.createTempFile(CAPTURE_FILENAME_PREFIX, ".jpg",
Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES));
try (FileOutputStream ostream = new FileOutputStream(file)) {
Log.i(TAG, "Retrieved image is" +
(mCapture.getFormat() == ImageFormat.JPEG ? "" : "n't") + " a JPEG");
ByteBuffer buffer = mCapture.getPlanes()[0].getBuffer();
Log.i(TAG, "Captured image size: " +
mCapture.getWidth() + 'x' + mCapture.getHeight());
// Write the image out to the chosen file
byte[] jpeg = new byte[buffer.remaining()];
buffer.get(jpeg);
//ostream.write(jpeg);
//send image via socket
System.out.println(SERVER_IP);
InetAddress serverAddr = InetAddress.getByName(SERVER_IP);
System.out.println("Created serverAddr " + SERVER_IP);
Socket socket = new Socket(serverAddr, SERVER_PORT);
System.out.println("Socket created..");
System.out.println(mCapture.getWidth() + "^" + mCapture.getHeight());
try (DataOutputStream dOut = new DataOutputStream(socket.getOutputStream())) {
dOut.writeInt(jpeg.length);
dOut.write(jpeg);
dOut.flush();
}
} catch (FileNotFoundException ex) {
Log.e(TAG, "Unable to open output file for writing", ex);
} catch (IOException ex) {
Log.e(TAG, "Failed to write the image to the output file", ex);
}
} catch (IOException ex) {
Log.e(TAG, "Unable to create a new output file", ex);
} finally {
mCapture.close();
}
}
}
}
a bit remade code. The function added a call to receive a snapshot on the timer every 170 milliseconds. If I set the time less than the specified, the interface hangs ... Someone can tell me how to do it right? Now I have 5.88 frames per second. I would like to achieve at least 15 frames per second.
public void onClickOnSurfaceView(View v) {
int i = 0;
Timer myTimer = new Timer(); // Создаем таймер
final Handler uiHandler = new Handler();
myTimer.schedule(new TimerTask() { // Определяем задачу
#Override
public void run() {
uiHandler.post(new Runnable() {
#Override
public void run() {
if (mCaptureSession != null) {
//while(true){
try {
CaptureRequest.Builder requester =
mCamera.createCaptureRequest(mCamera.TEMPLATE_STILL_CAPTURE);
requester.addTarget(mCaptureBuffer.getSurface());
//while(i<20) {
try {
// This handler can be null because we aren't actually attaching any callback
mCaptureSession.capture(requester.build(), /*listener*/null, /*handler*/null);
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to file actual capture request", ex);
}
//}
} catch (CameraAccessException ex) {
Log.e(TAG, "Failed to build actual capture request", ex);
}
//}
} else {
Log.e(TAG, "User attempted to perform a capture outside our session");
}
}
});
};
}, 0L, 1L * 180); // интервал - 500 миллисекунд, 0 миллисекунд до первого запуска.
// Control flow continues in mImageCaptureListener.onImageAvailable()
}
I could not solve the problem in this way, so I proceeded as follows: at the time when I needed to transfer the image, I got the current picture from the SurfaceView (screenshot) and sent it through the socket.
Related
I am using the camera2 API. I need to take a photo in the service without a preview. It works, but the photos have a bad exposure. The pictures are very dark or sometimes very light. How can I fix my code so that the photos are of high quality? I'm using the front camera.
public class Camera2Service extends Service
{
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigured(CameraCaptureSession session) {
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
processImage(img);
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation != CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
#Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
#Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
Sergey, I copied your code and indeed I was able to reproduce the issue. I got totally black pictures out of Google Pixel 2 (Android 8.1).
However, I have successfully resolved the black-pic issue as follows:
First, in case anyone is wondering, you actually do NOT need any Activity, or any preview UI element as many other threads about the Camera API claim! That used to be true for the deprecated Camera v1 API. Now, with the new Camera v2 API, all I needed was a foreground service.
To start the capturing process, I used this code:
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest (CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
builder.set (CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
builder.set (CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
builder.addTarget (imageReader.getSurface ());
captureRequest = builder.build ();
Then, in ImageReader.onImageAvailable, I skipped the first N pictures (meaning I did not save them). I let the session run, and capture more pics without saving them.
That gave the camera enough time to automatically gradually adjust the exposition parameters. Then, after N ignored photos, I saved a photo, which was normally exposed, not black at all.
The value of the N constant will depend on characteristics of your hardware. So you will need to determine the ideal value of N experimentally for your hardware. You can also use histogram-based heuristic automation. At the beginning of experiments, don't be afraid to start saving only after hundreds of milliseconds of calibration have passed.
Finally, in a lot of similar threads people suggest to just wait e.g. 500 ms after creating the session and only then taking a single picture. That does not help. One really has to let the camera run and let it take many pictures rapidly (at the fastest rate possible). For that, simply use the setRepeatingRequest method (as in your original code).
Hope this helps. :)
EDITED TO ADD: When skipping the initial N pictures, you need to call the acquireLatestImage method of ImageReader for each of those skipped pictures too. Otherwise, it won't work.
Full original code with my changes incorporated that resolved the issue, tested and confirmed as working on Google Pixel 2, Android 8.1:
public class Camera2Service extends Service
{
protected static final int CAMERA_CALIBRATION_DELAY = 500;
protected static final String TAG = "myLog";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected static long cameraCaptureStartTime;
protected CameraDevice cameraDevice;
protected CameraCaptureSession session;
protected ImageReader imageReader;
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "CameraDevice.StateCallback onOpened");
cameraDevice = camera;
actOnReadyCameraDevice();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
}
};
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onReady(CameraCaptureSession session) {
Camera2Service.this.session = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
cameraCaptureStartTime = System.currentTimeMillis ();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigured(CameraCaptureSession session) {
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
}
};
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if (System.currentTimeMillis () > cameraCaptureStartTime + CAMERA_CALIBRATION_DELAY) {
processImage(img);
}
img.close();
}
}
};
public void readyCamera() {
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(1920, 1088, ImageFormat.JPEG, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
public String getCamera(CameraManager manager){
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e){
e.printStackTrace();
}
return null;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "onStartCommand flags " + flags + " startId " + startId);
readyCamera();
return super.onStartCommand(intent, flags, startId);
}
#Override
public void onCreate() {
Log.d(TAG,"onCreate service");
super.onCreate();
}
public void actOnReadyCameraDevice()
{
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
#Override
public void onDestroy() {
try {
session.abortCaptures();
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
session.close();
}
private void processImage(Image image){
//Process image data
ByteBuffer buffer;
byte[] bytes;
boolean success = false;
File file = new File(Environment.getExternalStorageDirectory() + "/Pictures/image.jpg");
FileOutputStream output = null;
if(image.getFormat() == ImageFormat.JPEG) {
buffer = image.getPlanes()[0].getBuffer();
bytes = new byte[buffer.remaining()]; // makes byte array large enough to hold image
buffer.get(bytes); // copies image from buffer to byte array
try {
output = new FileOutputStream(file);
output.write(bytes); // write the byte array to file
j++;
success = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close(); // close this to free up buffer for other images
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
protected CaptureRequest createCaptureRequest() {
try {
CaptureRequest.Builder builder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(imageReader.getSurface());
return builder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
}
I am new to camera2 api. I want to build an image processing framework on my Android phone.
Step1: use the Camera2 API to open a camera preview stream
Step2: feed the preview frame data to OpenCV for processing
Step3: display the processed result live on the screen
Currently, I have finished Step1 using ImageReader and C++ OpenCV code. However, I don't know how to do step3.
How do I display the processed image on the screen? (I want to display the normal image, and overlay an icon if I detect predefined object)
Here are some key codes:
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
// Surface surface = new Surface(texture);
Surface mImageSurface = mImageReader.getSurface();
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// captureRequestBuilder.addTarget(surface);
captureRequestBuilder.addTarget(mImageSurface);
cameraDevice.createCaptureSession(Arrays.asList(mImageSurface), new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
if(null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.e(TAG, "onImageAvailable: " + count++);
Image img = null;
img = reader.acquireNextImage();
try {
if (img == null) throw new NullPointerException("cannot be null");
ByteBuffer buffer = img.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
int width = img.getWidth();
int height = img.getHeight();
// ****try to get captured img for display here (synchronized)
// ****try to process image for detecting the object here (asynchronized)
} catch (NullPointerException ex) {
ex.printStackTrace();
}finally {
Log.e(TAG, "in the finally! ------------");
if (img != null)
img.close();
}
}
};
I am trying get JPEG image from both camera parallel on Snapdragon 820 platform.
I not getting first camera Image callback. I only getting second camera JPEG callback.
Here is my code :
protected void takePictureBack() {
Log.d(TAG, "takePictureBack() called");
if (null == cameraDeviceBack) {
Log.e(TAG, "cameraDeviceBack is null");
return;
}
try {
final File file_back = new File(Environment.getExternalStorageDirectory() + "/pic_back.jpg");
final CaptureRequest.Builder captureBuilderBack = cameraDeviceBack.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
List<Surface> outputSurfaces = new ArrayList<Surface>(3);
outputSurfaces.add(new Surface(mTextureViewBack.getSurfaceTexture()));
ImageReader reader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 1);
outputSurfaces.add(reader.getSurface());
captureBuilderBack.addTarget(reader.getSurface());
ImageReader.OnImageAvailableListener readerListenerBack = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d(TAG, "onImageAvailable() called with: reader = [" + reader + "]");
if (reader.getImageFormat() == ImageFormat.JPEG) {
Log.d(TAG, "onImageAvailable() called with back: reader = JPEG");
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file_back);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListenerBack, mBackgroundHandlerBack);
captureBuilderBack.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
final CameraCaptureSession.CaptureCallback captureListenerBack = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if (DEBUG) Log.d(TAG, "onCaptureCompleted: take picture back successfully");
//Toast.makeText(getActivity(), "Take picture successfully", Toast.LENGTH_SHORT).show();
createCameraPreviewBack();
mCaptureResultBack = result;
}
};
cameraDeviceBack.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilderBack.build(), captureListenerBack, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandlerBack);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
Front camera capture code is also same.
Individual single camera JPEG capture work fine.
Any idea, why I am not getting bot JPEG Images callback ?
I find solution.We need take "ImageReader reader" as global variable.
By this change I am able to get two JPEG from both camera.
I am developing a custome camera but when trying to capture an image using Camera 2 apithen i am getting black image.i am using below code for capture
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = "";
if (cameraFront) {
cameraId = "" + findFrontFacingCamera();
} else {
cameraId = "" + findBackFacingCamera();
}
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(
ImageFormat.JPEG);
}
int width = 720;
int height = 640;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest =
Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
ImageReader reader =
ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /* maxImages */1);
// ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder =
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
if (cameraFront) {
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation) + 180);
} else {
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
}
final File file = getOutputMediaFile();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener()
{
#Override
public void onImageAvailable(ImageReader reader)
{
Image image = null;
try {
image = reader.acquireLatestImage();
// ByteBuffer buffer = image.getPlanes()[0].getBuffer();
// final byte[] bytes = new byte[buffer.capacity()];
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
save(bytes);
buffer.clear();
runOnUiThread(new Runnable()
{
#Override
public void run()
{
mThumbnail.setVisibility(View.VISIBLE);
filePathLabel.setVisibility(View.VISIBLE);
filePathValue.setText(file.getAbsolutePath());
Bitmap bmp =
UtilityMethods.getScaledBitmap(CameraImageTestActivityLoliipop.this, bytes);
mThumbnail.setImageBitmap(bmp);
}
});
} catch (FileNotFoundException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
} catch (IOException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException
{
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback()
{
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result)
{
super.onCaptureCompleted(session, request, result);
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(CameraCaptureSession session)
{
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session)
{
}
}, backgroudHandler);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
and below are the methods for camera preview
protected void startPreview()
{
try {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "startPreview fail, return");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if (null == texture) {
Log.e(TAG, "texture is null, return");
return;
}
Log.e(TAG, "Width: " + mPreviewSize.getWidth() + " Hieght : " + mPreviewSize.getHeight());
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback()
{
#Override
public void onConfigured(CameraCaptureSession session)
{
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session)
{
}
}, null);
} catch (CameraAccessException e) {
AppLogger.exception(myContext, getClass().getSimpleName(), e);
// e.printStackTrace();
}
}
protected void updatePreview()
{
try {
if (null == mCameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
}
its working in all devices but when i am runnig on Micromax Q382 device then getting black image with below warning in logcat
I/Choreographer: Skipped 37 frames! The application may be doing too much work on its main thread.
W/ImageReader_JNI: Unable to acquire a lockedBuffer, very likely client tries to lock more than maxImages buffers
I am not getting any idea that what happening.Please Help me.
You're passing a null argument to createCaptureSession inside startPreview. Make backgroudHandler previously used visible to that method (pass as parameter or initialize it as a class variable) and use there as well.
You could try setting some delay before starting the capture (and after opening the camera). Something like:
new Handler().postDelayed(() -> {
//takePicture();
}, 500);
If you want, I've created a service that massively facilitates photos capturing with Android Camera 2 API: https://github.com/hzitoun/android-camera2-secret-picture-taker . Usage is described in the readme file.
Hope that helped!
You're setting the AE mode to 'always flash', but then you don't run the precapture sequence to allow the camera device to meter for that flash; this will likely not work very well on any device, and on some devices you may end up with some default exposure value (bad).
If you want to fire the flash, you need to use a precapture sequence first (send a single request with AE_PRECAPTURE_TRIGGER set, wait for the PRECAPTURE AE_STATE to end, then issue the capture request), on non-LEGACY devices. If the device is LEGACY-level, then your current code should be OK for those.
i publish my app in google play.
this activity record video by front camera and if it not avaliable so it use in back camera
in crash section i received this output :
java.lang.RuntimeException: stop failed.
at android.media.MediaRecorder.stop(Native Method)
at com.example.uploadvideo.MainActivity.onStopped(MainActivity.java:192)
at com.google.android.youtube.player.internal.s$4.c(Unknown Source)
at com.google.android.youtube.player.internal.f$a.onTransact(Unknown Source)
at android.os.Binder.transact(Binder.java:326)
at com.google.android.youtube.player.internal.n.c(SourceFile:144)
at com.google.android.youtube.api.jar.client.e.run(SourceFile:797)
at android.os.Handler.handleCallback(Handler.java:615)
at android.os.Handler.dispatchMessage(Handler.java:92)
at android.os.Looper.loop(Looper.java:137)
at android.app.ActivityThread.main(ActivityThread.java:4921)
at java.lang.reflect.Method.invokeNative(Native Method)
at java.lang.reflect.Method.invoke(Method.java:511)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1027)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:794)
at dalvik.system.NativeStart.main(Native Method)
I have no idea what this caused and no search did help...
but i belive this couse by surfaceCreated methood
i think this couse from this section :
/** A basic Camera preview class */
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private String TAG = "CameraPreview";
private SurfaceHolder mHolder;
private Camera mCamera ;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder){
// The Surface has been created, now tell the camera where to draw the
// preview.
Log.d(TAG, "surfaceCreated camera id" + mCamera);
try {
CamcorderProfile profile ;
int numCameras = Camera.getNumberOfCameras();
if (numCameras > 1) {
profile = (CamcorderProfile
.get(Camera.CameraInfo.CAMERA_FACING_FRONT,CamcorderProfile.QUALITY_HIGH));
}
else{
profile = (CamcorderProfile
.get(Camera.CameraInfo.CAMERA_FACING_BACK,CamcorderProfile.QUALITY_HIGH));
}
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(profile.videoFrameWidth, profile.videoFrameHeight);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
}}
public void surfaceDestroyed(SurfaceHolder holder) {
// empty. Take care of releasing the Camera preview in your activity.
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
Log.d(TAG, "surfaceChanged to " + "," + w + "," + h);
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e){
Log.d(TAG, "Error starting camera preview: " + e.getMessage());
}
}
}
private boolean prepareVideoRecorder() {
mMediaRecorder = new MediaRecorder();
// Step 1: Unlock and set camera to MediaRecorder
mCamera.unlock();
mMediaRecorder.setCamera(mCamera);
// Step 2: Set sources
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Step 3: Set a CamcorderProfile (requires API Level 8 or higher)
int numCameras = Camera.getNumberOfCameras();
if (numCameras > 1) {
mMediaRecorder.setProfile(CamcorderProfile
.get(Camera.CameraInfo.CAMERA_FACING_FRONT,CamcorderProfile.QUALITY_HIGH));
}
else{
mMediaRecorder.setProfile(CamcorderProfile
.get(Camera.CameraInfo.CAMERA_FACING_BACK,CamcorderProfile.QUALITY_HIGH));
}
// Step 4: Set output file
mMediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO)
.toString());
outputFileName = getOutputMediaFile(MEDIA_TYPE_VIDEO).toString();
Log.d(TAG,"idan outputFileName" + outputFileName);
// Step 5: Set the preview output
// mMediaRecorder.setVideoSize(640, 480); //try
mMediaRecorder.setPreviewDisplay(mPreview.getHolder().getSurface());
// Step 6: Prepare configured MediaRecorder
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d(TAG,
"IllegalStateException preparing MediaRecorder: "
+ e.getMessage());
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
}
return true;
}
/** Create a file Uri for saving an image or video */
private Uri getOutputMediaFileUri(int type) {
return Uri.fromFile(getOutputMediaFile(type));
}
U can use this code for your video record plz check
public class VideoCaptureActivity extends Activity {
private static final String TAG = "VideoCaptureActivity";
///////////////--------------------------
CountDownTimer ctimer;
//private static final int H264 = 0;
TextView timerText;
Camera camera;
ImageButton recordButton;
ImageButton stopButton;
FrameLayout cameraPreviewFrame;
CameraPreview cameraPreview;
MediaRecorder mediaRecorder;
File file;
public String holdVideoName = new String();
////////////////----------------------------
private String vpath;
#Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
super.setContentView(R.layout.video_capture);
this.cameraPreviewFrame = (FrameLayout)super.findViewById(R.id.camera_preview);
this.recordButton = (ImageButton)super.findViewById(R.id.recordButton);
this.stopButton = (ImageButton)super.findViewById(R.id.stopButton);
this.timerText=(TextView)super.findViewById(R.id.textViewtimer);
this.toggleButtons(false);
//------------- we'll enable this button once the camera is ready
this.recordButton.setEnabled(false);
}
#Override
protected void onResume() {
super.onResume();
// initialize the camera in background, as this may take a while
new AsyncTask<Void, Void, Camera>() {
#Override
protected Camera doInBackground(Void... params) {
Camera camera=null;
try {
int numCameras = Camera.getNumberOfCameras();
System.out.println("number of cameara is"+numCameras );
if (numCameras ==1) {
System.out.println("camera 1");
camera = Camera.open(CameraInfo.CAMERA_FACING_BACK);
}
else{
camera = Camera.open(CameraInfo.CAMERA_FACING_FRONT);
}
return camera == null ? Camera.open() : camera;
} catch (RuntimeException e) {
return null;
}
}
#Override
protected void onPostExecute(Camera camera) {
if (camera == null) {
Toast.makeText(VideoCaptureActivity.this, R.string.cannot_record,
Toast.LENGTH_SHORT);
} else {
VideoCaptureActivity.this.initCamera(camera);
}
}
}.execute();
}
void initCamera(Camera camera) {
// we now have the camera
this.camera = camera;
// create a preview for our camera
this.cameraPreview = new CameraPreview(VideoCaptureActivity.this, this.camera);
// add the preview to our preview frame
this.cameraPreviewFrame.addView(this.cameraPreview, 0);
// enable just the record button
this.recordButton.setEnabled(true);
}
void releaseCamera() {
if (this.camera != null) {
this.camera.lock(); // unnecessary in API >= 14
this.camera.stopPreview();
this.camera.release();
this.camera = null;
this.cameraPreviewFrame.removeView(this.cameraPreview);
}
}
void releaseMediaRecorder() {
if (this.mediaRecorder != null) {
this.mediaRecorder.reset(); // clear configuration (optional here)
this.mediaRecorder.release();
this.mediaRecorder = null;
}
}
void releaseResources() {
this.releaseMediaRecorder();
this.releaseCamera();
}
#Override
public void onPause() {
super.onPause();
this.releaseResources();
}
// gets called by the button press
public void startRecording(View v) {
Log.d(TAG, "startRecording()");
// we need to unlock the camera so that mediaRecorder can use it
this.camera.unlock(); // unnecessary in API >= 14
// now we can initialize the media recorder and set it up with our
// camera
ctimer = new CountDownTimer(10000, 1000) {
#Override
public void onTick(long millisUntilFinished) {
// TODO Auto-generated method stub
timerText.setText("seconds remaining \n 00: " + millisUntilFinished / 1000);
}
#Override
public void onFinish() {
// TODO Auto-generated method stub
timerText.setText("done!");
stopRecordingg();
}
};
ctimer.start();
this.mediaRecorder = new MediaRecorder();
this.mediaRecorder.setCamera(this.camera);
this.mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
this.mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
this.mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT);
// this.mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_480P));
this.mediaRecorder.setMaxDuration(10000);
this.mediaRecorder.setOutputFile(this.initFile().getAbsolutePath());
this.mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
this.mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
this.mediaRecorder.setVideoSize(640,480);
// this.mediaRecorder.setVideoFrameRate(12);
// this.mediaRecorder.setOnInfoListener(null);
// this.mediaRecorder.setOutputFile("/sdcard/videocapture_example.mp4");
try {
this.mediaRecorder.setPreviewDisplay(this.cameraPreview.getHolder().getSurface());
this.mediaRecorder.prepare();
// start the actual recording
// throws IllegalStateException if not prepared
this.mediaRecorder.start();
Toast.makeText(this, R.string.recording, Toast.LENGTH_SHORT).show();
// enable the stop button by indicating that we are recording
this.toggleButtons(true);
} catch (Exception e) {
Log.wtf(TAG, "Failed to prepare MediaRecorder", e);
Toast.makeText(this,"record nathi thatu...", Toast.LENGTH_SHORT).show();
this.releaseMediaRecorder();
}
}
protected void stopRecordingg() {
// TODO Auto-generated method stub
Log.d(TAG, "stopRecording()");
assert this.mediaRecorder != null;
try {
// this.mediaRecorder.stop();
Toast.makeText(this, R.string.saved, Toast.LENGTH_SHORT).show();
// we are no longer recording
this.toggleButtons(false);
} catch (RuntimeException e) {
// the recording did not succeed
Log.w(TAG, "Failed to record", e);
if (this.file != null && this.file.exists() && this.file.delete()) {
Log.d(TAG, "Deleted " + this.file.getAbsolutePath());
}
return;
} finally {
this.releaseMediaRecorder();
}
if (this.file == null || !this.file.exists()) {
Log.w(TAG, "File does not exist after stop: " + this.file.getAbsolutePath());
} else {
Log.d(TAG, "Going to display the video: " + this.file.getAbsolutePath());
Intent intent = new Intent(this, VideoPlayBack.class);
intent.setData(Uri.fromFile(file));
intent.putExtra("hold",getIntent().getExtras().getString("hold"));
intent.putExtra("cwi",getIntent().getExtras().getString("cwi"));
// intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
this.finish();
}
}
// gets called by the button press
public void stopRecording(View v) {
Log.d(TAG, "stopRecording()");
ctimer.cancel();
assert this.mediaRecorder != null;
try {
this.mediaRecorder.stop();
Toast.makeText(this, R.string.saved, Toast.LENGTH_SHORT).show();
// we are no longer recording
this.toggleButtons(false);
} catch (RuntimeException e) {
// the recording did not succeed
Log.w(TAG, "Failed to record", e);
if (this.file != null && this.file.exists() && this.file.delete()) {
Log.d(TAG, "Deleted " + this.file.getAbsolutePath());
}
return;
} finally {
this.releaseMediaRecorder();
}
if (this.file == null || !this.file.exists()) {
Log.w(TAG, "File does not exist after stop: " + this.file.getAbsolutePath());
} else {
Log.d(TAG, "Going to display the video: " + this.file.getAbsolutePath());
Intent intent = new Intent(this, VideoPlayBack.class);
intent.setData(Uri.fromFile(file));
this.finish();
super.startActivity(intent);
}
}
private File initFile() {
// holdVideoName= getIntent().getExtras().getString("vpath","");
//System.out.println("vpath is:"+vpath);
// File dir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES), "namefolder");
File dir = new File("/sdcard/test/");
if (!dir.exists() && !dir.mkdirs()) {
Log.wtf(TAG, "Failed to create storage directory: " + dir.getAbsolutePath());
this.file = null;
} else {
this.file = new File("/sdcard/test/"+getIntent().getExtras().getString("hold")+".mp4");
}
return this.file;
}
}