I'm using ffmpeg to video capture for 30 seconds.
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (yuvIplimage != null && recording && rec)
{
new SaveFrame().execute(data);
}
}
}
save frame class is below
private class SaveFrame extends AsyncTask<byte[], Void, File> {
long t;
protected File doInBackground(byte[]... arg) {
t = 1000 * (System.currentTimeMillis() - firstTime - pausedTime);
toSaveFrames++;
File pathCache = new File(Environment.getExternalStorageDirectory()+"/DCIM", (System.currentTimeMillis() / 1000L)+ "_" + toSaveFrames + ".tmp");
BufferedOutputStream bos;
try {
bos = new BufferedOutputStream(new FileOutputStream(pathCache));
bos.write(arg[0]);
bos.flush();
bos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
pathCache = null;
toSaveFrames--;
} catch (IOException e) {
e.printStackTrace();
pathCache = null;
toSaveFrames--;
}
return pathCache;
}
#Override
protected void onPostExecute(File filename)
{
if(filename!=null)
{
savedFrames++;
tempList.add(new FileFrame(t,filename));
}
}
}
finally i add all frames with crop and rotation
private class AddFrame extends AsyncTask<Void, Integer, Void> {
private int serial = 0;
#Override
protected Void doInBackground(Void... params) {
for(int i=0; i<tempList.size(); i++)
{
byte[] bytes = new byte[(int) tempList.get(i).file.length()];
try {
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(tempList.get(i).file));
buf.read(bytes, 0, bytes.length);
buf.close();
IplImage image = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
// final int startY = 640*(480-480)/2;
// final int lenY = 640*480;
// yuvIplimage.getByteBuffer().put(bytes, startY, lenY);
// final int startVU = 640*480+ 640*(480-480)/4;
// final int lenVU = 640* 480/2;
// yuvIplimage.getByteBuffer().put(bytes, startVU, lenVU);
if (tempList.get(i).time > recorder.getTimestamp()) {
recorder.setTimestamp(tempList.get(i).time);
}
image = cropImage(image);
image = rotate(image, 270);
// image = rotateImage(image);
recorder.record(image);
Log.i(LOG_TAG, "record " + i);
image = null;
serial++;
publishProgress(serial);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (com.googlecode.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
}
}
return null;
}
#Override
protected void onProgressUpdate(Integer... serial) {
int value = serial[0];
creatingProgress.setProgress(value);
}
#Override
protected void onPostExecute(Void v)
{
creatingProgress.dismiss();
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
finish();
startActivity(new Intent(RecordActivity.this,AnswerViewActivity.class));
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
}
my crop and rotate method are below
private IplImage cropImage(IplImage src)
{
cvSetImageROI(src, r);
IplImage cropped = IplImage.create(imageHeight, imageHeight, IPL_DEPTH_8U, 2);
cvCopy(src, cropped);
return cropped;
}
public static IplImage rotate(IplImage image, double angle) {
IplImage copy = opencv_core.cvCloneImage(image);
IplImage rotatedImage = opencv_core.cvCreateImage(opencv_core.cvGetSize(copy), copy.depth(), copy.nChannels());
CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );
//Define Mid Point
CvPoint2D32f centerPoint = new CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.height()/2);
//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1.0, mapMatrix);
//Rotate the Image
opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
my final video crop and rotate but green frames and colored frames mixed with this.
How to fix this problem. I'm not aware of iplimage. In some blogs they mention its YUV format. first u need to convert Y and then convert UV.
How to solve this problem?
I have modified the onPreviewFrame method of this Open Source Android Touch-To-Record library to take transpose and resize a captured frame.
I defined "yuvIplImage" as following in my setCameraParams() method.
IplImage yuvIplImage = IplImage.create(mPreviewSize.height, mPreviewSize.width, opencv_core.IPL_DEPTH_8U, 2);
Also initialize your videoRecorder object as following, giving width as height and vice versa.
//call initVideoRecorder() method like this to initialize videoRecorder object of FFmpegFrameRecorder class.
initVideoRecorder(strVideoPath, mPreview.getPreviewSize().height, mPreview.getPreviewSize().width, recorderParameters);
//method implementation
public void initVideoRecorder(String videoPath, int width, int height, RecorderParameters recorderParameters)
{
Log.e(TAG, "initVideoRecorder");
videoRecorder = new FFmpegFrameRecorder(videoPath, width, height, 1);
videoRecorder.setFormat(recorderParameters.getVideoOutputFormat());
videoRecorder.setSampleRate(recorderParameters.getAudioSamplingRate());
videoRecorder.setFrameRate(recorderParameters.getVideoFrameRate());
videoRecorder.setVideoCodec(recorderParameters.getVideoCodec());
videoRecorder.setVideoQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioCodec(recorderParameters.getAudioCodec());
videoRecorder.setVideoBitrate(1000000);
videoRecorder.setAudioBitrate(64000);
}
This is my onPreviewFrame() method:
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
long frameTimeStamp = 0L;
if(FragmentCamera.mAudioTimestamp == 0L && FragmentCamera.firstTime > 0L)
{
frameTimeStamp = 1000L * (System.currentTimeMillis() - FragmentCamera.firstTime);
}
else if(FragmentCamera.mLastAudioTimestamp == FragmentCamera.mAudioTimestamp)
{
frameTimeStamp = FragmentCamera.mAudioTimestamp + FragmentCamera.frameTime;
}
else
{
long l2 = (System.nanoTime() - FragmentCamera.mAudioTimeRecorded) / 1000L;
frameTimeStamp = l2 + FragmentCamera.mAudioTimestamp;
FragmentCamera.mLastAudioTimestamp = FragmentCamera.mAudioTimestamp;
}
synchronized(FragmentCamera.mVideoRecordLock)
{
if(FragmentCamera.recording && FragmentCamera.rec && lastSavedframe != null && lastSavedframe.getFrameBytesData() != null && yuvIplImage != null)
{
FragmentCamera.mVideoTimestamp += FragmentCamera.frameTime;
if(lastSavedframe.getTimeStamp() > FragmentCamera.mVideoTimestamp)
{
FragmentCamera.mVideoTimestamp = lastSavedframe.getTimeStamp();
}
try
{
yuvIplImage.getByteBuffer().put(lastSavedframe.getFrameBytesData());
IplImage bgrImage = IplImage.create(mPreviewSize.width, mPreviewSize.height, opencv_core.IPL_DEPTH_8U, 4);// In my case, mPreviewSize.width = 1280 and mPreviewSize.height = 720
IplImage transposed = IplImage.create(mPreviewSize.height, mPreviewSize.width, yuvIplImage.depth(), 4);
IplImage squared = IplImage.create(mPreviewSize.height, mPreviewSize.height, yuvIplImage.depth(), 4);
int[] _temp = new int[mPreviewSize.width * mPreviewSize.height];
Util.YUV_NV21_TO_BGR(_temp, data, mPreviewSize.width, mPreviewSize.height);
bgrImage.getIntBuffer().put(_temp);
opencv_core.cvTranspose(bgrImage, transposed);
opencv_core.cvFlip(transposed, transposed, 1);
opencv_core.cvSetImageROI(transposed, opencv_core.cvRect(0, 0, mPreviewSize.height, mPreviewSize.height));
opencv_core.cvCopy(transposed, squared, null);
opencv_core.cvResetImageROI(transposed);
videoRecorder.setTimestamp(lastSavedframe.getTimeStamp());
videoRecorder.record(squared);
}
catch(com.googlecode.javacv.FrameRecorder.Exception e)
{
e.printStackTrace();
}
}
lastSavedframe = new SavedFrames(data, frameTimeStamp);
}
}
This code uses a method "YUV_NV21_TO_BGR", which I found from this link
Basically this method is used to resolve, which I call as, "The Green Devil problem on Android", just like yours. I was having the same issue and wasted almost 3-4 days. Before adding "YUV_NV21_TO_BGR" method when I just took transpose of YuvIplImage, more importantly a combination of transpose, flip (with or without resizing), there was greenish output in resulting video. This "YUV_NV21_TO_BGR" method saved the day. Thanks to #David Han from above google groups thread.
Also you should know that all this processing (transpose, flip and resize), in onPreviewFrame, takes much time which causes a very serious hit on Frames Per Second (FPS) rate. When I used this code, inside onPreviewFrame method, the resulting FPS of the recorded video was down to 3 frames/sec from 30fps.
I would advise not to use this approach. Rather you can go for post-recording processing (transpose, flip and resize) of your video file using JavaCV in an AsyncTask. Hope this helps.
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
//IplImage newImage = cvCreateImage(cvGetSize(yuvIplimage), IPL_DEPTH_8U, 1);
if (recording) {
videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
yuvimage = IplImage.create(imageWidth, imageHeight * 3 / 2, IPL_DEPTH_8U,1);
yuvimage.getByteBuffer().put(data);
rgbimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 3);
opencv_imgproc.cvCvtColor(yuvimage, rgbimage, opencv_imgproc.CV_YUV2BGR_NV21);
IplImage rotateimage=null;
try {
recorder.setTimestamp(videoTimestamp);
int rot=0;
switch (degrees) {
case 0:
rot =1;
rotateimage=rotate(rgbimage,rot);
break;
case 180:
rot = -1;
rotateimage=rotate(rgbimage,rot);
break;
default:
rotateimage=rgbimage;
}
recorder.record(rotateimage);
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
}
IplImage rotate(IplImage IplSrc,int angle) {
IplImage img= IplImage.create(IplSrc.height(), IplSrc.width(), IplSrc.depth(), IplSrc.nChannels());
cvTranspose(IplSrc, img);
cvFlip(img, img, angle);
return img;
}
}
after many searches this works for me.
Related
I'm trying to get the Android camera2 running in the background service, then process the frame in the callback ImageReader.OnImageAvailableListener. I already use the suggested raw format YUV_420_888 to get max fps, however I only get around 7fps on the resolution 640x480. This is even slower than what I get using the old Camera interface( I want to upgrade to Camera2 to get higher fps ) or with the OpenCV JavaCameraView( I can't use this because I need to run processing in the background service ).
Below is my service class. What am I missing?
My phone is Redmi Note 3 running Android 5.0.2
public class Camera2ServiceYUV extends Service {
protected static final String TAG = "VideoProcessing";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession captureSession;
protected ImageReader imageReader;
// A semaphore to prevent the app from exiting before closing the camera.
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
public static final String RESULT_RECEIVER = "resultReceiver";
private static final int JPEG_COMPRESSION = 90;
public static final int RESULT_OK = 0;
public static final int RESULT_DEVICE_NO_CAMERA= 1;
public static final int RESULT_GET_CAMERA_FAILED = 2;
public static final int RESULT_ALREADY_RUNNING = 3;
public static final int RESULT_NOT_RUNNING = 4;
private static final String START_SERVICE_COMMAND = "startServiceCommands";
private static final int COMMAND_NONE = -1;
private static final int COMMAND_START = 0;
private static final int COMMAND_STOP = 1;
private boolean mRunning = false;
public Camera2ServiceYUV() {
}
public static void startToStart(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_START);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
public static void startToStop(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_STOP);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
// SERVICE INTERFACE
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
switch (intent.getIntExtra(START_SERVICE_COMMAND, COMMAND_NONE)) {
case COMMAND_START:
startCamera(intent);
break;
case COMMAND_STOP:
stopCamera(intent);
break;
default:
throw new UnsupportedOperationException("Cannot start the camera service with an illegal command.");
}
return START_STICKY;
}
#Override
public void onDestroy() {
try {
captureSession.abortCaptures();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
captureSession.close();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
// CAMERA2 INTERFACE
/**
* 1. The android CameraManager class is used to manage all the camera devices in our android device
* Each camera device has a range of properties and settings that describe the device.
* It can be obtained through the camera characteristics.
*/
public void startCamera(Intent intent) {
final ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (mRunning) {
resultReceiver.send(RESULT_ALREADY_RUNNING, null);
return;
}
mRunning = true;
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String pickedCamera = getCamera(manager);
Log.e(TAG,"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA " + pickedCamera);
manager.openCamera(pickedCamera, cameraStateCallback, null);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(pickedCamera);
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.YUV_420_888);
}
int width = 640;
int height = 480;
// if (jpegSizes != null && 0 < jpegSizes.length) {
// width = jpegSizes[jpegSizes.length -1].getWidth();
// height = jpegSizes[jpegSizes.length - 1].getHeight();
// }
// for(Size s : jpegSizes)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
// DEBUG
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return;
}
Log.e(TAG,"Width = " + width + ", Height = " + height);
Log.e(TAG,"output stall duration = " + map.getOutputStallDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
Log.e(TAG,"Min output stall duration = " + map.getOutputMinFrameDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
// Size[] sizeList = map.getInputSizes(ImageFormat.YUV_420_888);
// for(Size s : sizeList)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
imageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.i(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
resultReceiver.send(RESULT_DEVICE_NO_CAMERA, null);
}catch (InterruptedException e) {
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
catch(SecurityException se)
{
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Security permission exception while trying to open the camera.", se);
}
resultReceiver.send(RESULT_OK, null);
}
// We can pick the camera being used, i.e. rear camera in this case.
private String getCamera(CameraManager manager) {
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
return null;
}
/**
* 1.1 Callbacks when the camera changes its state - opened, disconnected, or error.
*/
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.i(TAG, "CameraDevice.StateCallback onOpened");
mCameraOpenCloseLock.release();
cameraDevice = camera;
createCaptureSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
};
/**
* 2. To capture or stream images from a camera device, the application must first create
* a camera capture captureSession.
* The camera capture needs a surface to output what has been captured, in this case
* we use ImageReader in order to access the frame data.
*/
public void createCaptureSession() {
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
Log.i(TAG, "CameraCaptureSession.StateCallback onConfigured");
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the captureSession is ready, we start to grab the frame.
Camera2ServiceYUV.this.captureSession = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "CameraCaptureSession.StateCallback onConfigureFailed");
}
};
/**
* 3. The application then needs to construct a CaptureRequest, which defines all the capture parameters
* needed by a camera device to capture a single image.
*/
private CaptureRequest createCaptureRequest() {
try {
/**
* Check other templates for further details.
* TEMPLATE_MANUAL = 6
* TEMPLATE_PREVIEW = 1
* TEMPLATE_RECORD = 3
* TEMPLATE_STILL_CAPTURE = 2
* TEMPLATE_VIDEO_SNAPSHOT = 4
* TEMPLATE_ZERO_SHUTTER_LAG = 5
*
* TODO: can set camera features like auto focus, auto flash here
* captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
*/
CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// captureRequestBuilder.set(CaptureRequest.EDGE_MODE,
// CaptureRequest.EDGE_MODE_OFF);
// captureRequestBuilder.set(
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
// captureRequestBuilder.set(
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
// CaptureRequest.NOISE_REDUCTION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
// CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
//
// captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
captureRequestBuilder.addTarget(imageReader.getSurface());
return captureRequestBuilder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
/**
* ImageReader provides a surface for the camera to output what has been captured.
* Upon the image available, call processImage() to process the image as desired.
*/
private long frameTime = 0;
private ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "called ImageReader.OnImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if( frameTime != 0 )
{
Log.e(TAG, "fps = " + (float)(1000.0 / (float)(SystemClock.elapsedRealtime() - frameTime)) + " fps");
}
frameTime = SystemClock.elapsedRealtime();
img.close();
}
}
};
private void processImage(Image image) {
Mat outputImage = imageToMat(image);
Bitmap bmp = Bitmap.createBitmap(outputImage.cols(), outputImage.rows(), Bitmap.Config.ARGB_8888);
Utils.bitmapToMat(bmp, outputImage);
Point mid = new Point(0, 0);
Point inEnd = new Point(outputImage.cols(), outputImage.rows());
Imgproc.line(outputImage, mid, inEnd, new Scalar(255, 0, 0), 2, Core.LINE_AA, 0);
Utils.matToBitmap(outputImage, bmp);
Intent broadcast = new Intent();
broadcast.setAction("your_load_photo_action");
broadcast.putExtra("BitmapImage", bmp);
sendBroadcast(broadcast);
}
private Mat imageToMat(Image image) {
ByteBuffer buffer;
int rowStride;
int pixelStride;
int width = image.getWidth();
int height = image.getHeight();
int offset = 0;
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
for (int i = 0; i < planes.length; i++) {
buffer = planes[i].getBuffer();
rowStride = planes[i].getRowStride();
pixelStride = planes[i].getPixelStride();
int w = (i == 0) ? width : width / 2;
int h = (i == 0) ? height : height / 2;
for (int row = 0; row < h; row++) {
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
if (pixelStride == bytesPerPixel) {
int length = w * bytesPerPixel;
buffer.get(data, offset, length);
// Advance buffer the remainder of the row stride, unless on the last row.
// Otherwise, this will throw an IllegalArgumentException because the buffer
// doesn't include the last padding.
if (h - row != 1) {
buffer.position(buffer.position() + rowStride - length);
}
offset += length;
} else {
// On the last row only read the width of the image minus the pixel stride
// plus one. Otherwise, this will throw a BufferUnderflowException because the
// buffer doesn't include the last padding.
if (h - row == 1) {
buffer.get(rowData, 0, width - pixelStride + 1);
} else {
buffer.get(rowData, 0, rowStride);
}
for (int col = 0; col < w; col++) {
data[offset++] = rowData[col * pixelStride];
}
}
}
}
// Finally, create the Mat.
Mat mat = new Mat(height + height / 2, width, CV_8UC1);
mat.put(0, 0, data);
return mat;
}
private void stopCamera(Intent intent) {
ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (!mRunning) {
resultReceiver.send(RESULT_NOT_RUNNING, null);
return;
}
closeCamera();
resultReceiver.send(RESULT_OK, null);
mRunning = false;
Log.d(TAG, "Service is finished.");
}
/**
* Closes the current {#link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
}
I bumped into this problem recently when I try to upgrade my AR app from camera1 to camera2 API, I used a mid-range device for testing (Meizu S6) which has Exynos 7872 CPU and Mali-G71 GPU. What I want to achieve is a steady 30fps AR experience.
But through the migration I found that its quite tricky to get a decent preview frame rate using Camera2 API.
I configured my capture request using TEMPLATE_PREVIEW
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Then I Put 2 surfaces, one for preview which is a surfaceTexture at size 1280x720,
another ImageReader at size 1280x720 for image processing.
mImageReader = ImageReader.newInstance(
mVideoSize.getWidth(),
mVideoSize.getHeight(),
ImageFormat.YUV_420_888,
2);
List<Surface> surfaces =new ArrayList<>();
Surface previewSurface = new Surface(mSurfaceTexture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface frameCaptureSurface = mImageReader.getSurface();
surfaces.add(frameCaptureSurface);
mPreviewBuilder.addTarget(frameCaptureSurface);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), captureCallback, mBackgroundHandler);
Everything works as expected, my TextureView gets updated and framecallback gets called too Except ... the frame rate is about 10 fps and I haven't even do any image processing yet.
I have experimented many Camera2 API settings include SENSOR_FRAME_DURATION and different ImageFormat and size combinations but none of them improve the frame rate. But if I just remove the ImageReader from output surfaces, then preview gets 30 fps easily!
So I guess the problem is By adding ImageReader as Camera2 output surface decreased the preview frame rate drastically. At least on my case, so what is the solution?
My solution is glReadPixel
I know glReadPixel is one of the evil things because it copy bytes from GPU to main memory and also causing OpenGL to flush draw commands thus for sake of performance we'd better avoid using it. But its surprising that glReadPixel is actually pretty fast and providing much better frame rate then ImageReader's YUV_420_888 output.
In addition to reduce the memory overhead I make another draw call with smaller frame buffer like 360x640 instead of preview's 720p dedicated for feature detection.
Based on the implementation of camera2 by the openCV library.
I had the same problem, then I noticed this piece of code in the openCV code for the JavaCamera2View, you need to change the settings of the CaptureRequest.Builder that way:
CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
It changed the fps from 10fps to around 28-30fps for me. Worked for me with two target surfaces, one surface of the preview textureview, the second of the ImageReader:
Surface readerSurface = imageReader.getSurface();
Surface surface = new Surface(surfaceTexture);
captureBuilder.addTarget(surface);
captureBuilder.addTarget(readerSurface);
Cannot post a comment (not enough reps). But running into the same issue with Redmi 6.
If using the the TextureView for previewing the camera output I get around 30 fps but replacing it with ImageReader it went down to 8/9 fps. All the camera configs are same in either case.
Interesting enough, on trying out the CameraXBasic, it showed the same issue. The updates from Camera were sluggish. But the android-Camera2Basic (using TextureView) was running without any issues.
Update: 1
Tested out with lowering the preview size from 1280x720 to 640x480, and as expected saw a better performance.
This is what I know after tweaking with it a little, the problem lies on ImageReader's maxImage param, I changed it from 2 to 3 to 56, it changed the fps quite a lot, what I think is the surface which we render to camera2 from ImageReader has a tendency to block the process of saving the camera's image to the buffer/cache when Image class from ImageReader.OnImageAvailableListener is being processed or isn't released, or we could say the camera wanna use the buffer but it doesn't have enough buffer, so when we increase the max buffer of imageReader, we could give space to camera2 to save the image.
I want to use Opencv4android sample Tutorial1 to do Video Recorder.
And I use this solution, but my smartphone not display anything on Screen.
Just black view. Can anyone help me?
here is my code
Tutorial1Activity
public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//
//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;
//---------------MediaRecorder-------------------//
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//---------------------------------------------------------//
bt_Record = (Button)findViewById(R.id.bt_recorder);
folder_pathforfile = folder_path + File.separator + folder_name
+ File.separator + "opencv" + "_";
CreateSDfolder();
ongetTime();
//---------------------------------------------------------//
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
private void CreateSDfolder() {
String filefolderpath = folder_path + File.separator + folder_name;
File dir = new File(filefolderpath);
if (!dir.exists()){
Log.e("folder", "not exist");
try{
//dir.createNewFile(true);
dir.mkdir();
Log.e("folder", "creat exist");
}catch(Exception e){
Log.e("folder", "creat not exist");
e.printStackTrace();
}
}
else{
Log.e("folder", "exist");
}
}
private void ongetTime() {
Date dt=new Date();
showTime=sdf_fileintxt.format(dt);
showTimefile =sdf.format(dt);
}
private void WritetoSD(String data) {
try {
fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
bw = new BufferedWriter(fw);
if (first_in == true) {
first_in = false;
bw.append(showTime);
bw.newLine();
}
bw.append(data);
bw.newLine();
bw.flush();
bw.close();
} catch (IOException e) {
Log.e("WriteToSD", "Write To SD ERROR");
e.printStackTrace();
}
}
public void onRecordSignal (View v){
if(!isRecord){
isRecord = true;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Stop");
//new MediaPrepareTask().execute(null, null, null);
if (prepareMediaRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
mOpenCvCameraView.setRecorder(mediaRecorder);
mediaRecorder.start();
} else {
// prepare didn't work, release the camera
Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
// mediaRecorder.stop();
releaseMediaRecorder();
}
} else{
isRecord = false;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Record");
try {
if(mediaRecorder != null)
mediaRecorder.stop(); // stop the recording
else
Log.e(TAG,"onRecordSignal mediaRecorder is null");
} catch (RuntimeException e) {
// RuntimeException is thrown when stop() is called immediately after start().
// In this case the output file is not properly constructed ans should be deleted.
Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
//noinspection ResultOfMethodCallIgnored
}
releaseMediaRecorder(); // release the MediaRecorder object
}
}
public void releaseMediaRecorder() {
Log.e("debug","releaseMediaRecorder");
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
JavaCameraView.mCamera.lock();
mOpenCvCameraView.releaseRecord();
}
}
private String recordfilepath() {
// TODO Auto-generated method stub
ongetTime();
File sddir = Environment.getExternalStorageDirectory();
File vrdir = new File(sddir, folder_name);
File file = new File(vrdir, showTimefile+"_.mp4");
String filepath = file.getAbsolutePath();
Log.e("debug mediarecorder", filepath);
return filepath;
}
public boolean prepareMediaRecorder() {
// TODO Auto-generated method stub
Log.e("debug mediarecorder", "in prepareMediaRecorder");
mediaRecorder = new MediaRecorder();
try {
JavaCameraView.mCamera.lock();
JavaCameraView.mCamera.unlock();
}catch (RuntimeException e){
Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
}
/*mediaRecorder.reset();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
//mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
mediaRecorder.setOutputFile(recordfilepath());
//mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
//mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
mediaRecorder.reset();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
//mediaRecorder.setOutputFile("out.mp4");
mediaRecorder.setOutputFile(recordfilepath());
mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
//mediaRecorder.setOnInfoListener(this);
//mediaRecorder.setOnErrorListener(this);
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.e("debug mediarecorder", "not prepare");
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.e("debug mediarecorder", "not prepare IOException");
//releaseMediaRecorder();
}
return true;
}
}
CameraBridgeViewBase
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
//-------------------------------------//
protected MediaRecorder mRecorder;
protected Surface mSurface = null;
public void setRecorder(MediaRecorder rec) {
mRecorder = rec;
//Log.e(TAG,mRecorder.toString());
if (mRecorder != null) {
mSurface = mRecorder.getSurface();
Log.e(TAG,"mRecorder is not null");
Log.e(TAG,"mSurface = "+mSurface.toString());
}
else{
Log.e(TAG,"mRecorder is null");
}
}
public void releaseRecord(){
mSurface.release();
}
//-------------------------------------//
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* #param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* #param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* #param maxWidth - the maximum width allowed for camera frame.
* #param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* #param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas;
if (mRecorder != null) {
canvas = mSurface.lockCanvas(null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
mSurface.unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* #param width - the width of this SurfaceView
* #param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* #param supportedSizes
* #param surfaceWidth
* #param surfaceHeight
* #return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}
And I add permission in Manifest
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
I want to create the image of the barcode/QR code etc on my app. I have searched a lot and have found different libraries to do this task but since I am already using Zxing so i would like to work in it.
Following is the code that I have writen:
This is my Scanner Activity class:
public void handleResult(Result rawResult) {
// Do something with the result here
Log.v(TAG, rawResult.getText()); // Prints scan results
Toast.makeText(SimpleScannerActivity.this, rawResult.toString() + " WOW scanned", Toast.LENGTH_LONG).show();
Toast.makeText(SimpleScannerActivity.this, rawResult.getBarcodeFormat().toString(), Toast.LENGTH_LONG).show();
Log.v(TAG, rawResult.getBarcodeFormat().toString()); // Prints the scan format (qrcode, pdf417 etc.)
//Intent scanScreenResult= new Intent("com.aaa.fyp.ScanResultScreen");
setFormat(rawResult);
Intent nextScreen = new Intent("com.aaa.fyp.ScanResultScreen");
nextScreen.putExtra("barcode",rawResult.toString());
nextScreen.putExtra("format", rawResult.getBarcodeFormat().toString());
finish();
startActivity(nextScreen);
}
public void setFormat(Result result){
r=result.getBarcodeFormat();
System.out.println("============================== setformat main"+ r);
}
public BarcodeFormat getFormat(){
System.out.println("============================== getformat main"+ r);
return r;
}
Using the results from the above activity in ScanResultScreen activity.
public class ScanResultScreen extends SimpleScannerActivity {
ImageView scanned;
TextView bc;
TextView f;
String Barcode;
String format;
BarcodeFormat form;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.scan_screen_with_button);
ViewGroup layout = (ViewGroup) findViewById(R.id.scanScreenWithButton);
setContentView(layout);
Intent prevScreen = getIntent(); // gets the previously created intent
Barcode=prevScreen.getStringExtra("barcode");
bc= (TextView)findViewById(R.id.barcode_label);
bc.setText(Barcode);
format=prevScreen.getStringExtra("format");
f=(TextView) findViewById(R.id.format_label);
f.setText(prevScreen.getStringExtra("format").toString());
SimpleScannerActivity obj=new SimpleScannerActivity();
form=obj.getFormat();
d=(TextView)findViewById(R.id.date_label);
String formattedDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(Calendar.getInstance().getTime());
d.setText(formattedDate);
Bitmap bitmap = null;
ImageView iv = new ImageView(this);
try {
bitmap = encodeAsBitmap(Barcode, form, 600, 300);
iv.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
layout.addView(iv);
}
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
Bitmap encodeAsBitmap(String contents, BarcodeFormat format, int img_width, int img_height) throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<EncodeHintType, Object>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contentsToEncode, format, img_width, img_height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
Now I am getting a Null value in the variable "form". Even though I am able to get the barcodeFormat in my second activity by passing it through intent but it's in the type String. Whereas the built-in methods that I am using here requires it in BarcodeFormat that is available in Zxing.
Help!!
BarcodeFormat is an enum type. If you want to pass a String value, you have to convert it to BarcodeFormat.
For example, passing a barcode format "AZTEC":
BarcodeFormat format = Enum.valueOf(BarcodeFormat.class, "AZTEC");
I'm developing a JAVA RTP Streaming App for a company project, which should be capable of joining the Multicast Server and receive the RTP Packets.Later I use the H264 Depacketizer to recreate the a complete frame from the NAL FU (Keep append the data until End Bit & Marker Bit set )
I want to decode and display a raw h264 video byte stream in Android and therefore I'm currently using the MediaCodec classes with Hardware Decoder configured.
The Application is Up and running for the Jeallybean (API 17). Various Resolutions which I need to decodes are :
480P at 30/60 FPS
720P/I at 30/60 FPS
1080P/I at 30/60 FPS
Recently, Due to System Upgrade we are porting the App to Android L Version 5.0.2. My App is not capable of playing the high resolutions videos like 720p#60fps and 1080p#60fps.
For the debugging purpose I started feeding the Elementary H264 Frames with size from the dump file to MediaCodec and found out the Video is Lagging.
There are timestamps on the sample video I used and it seems the actual time taken to proceed by 1 sec in Rendered Video is more
Below is my sample code and links to sample video
h264 video https://www.dropbox.com/s/cocjhhovihm8q25/dump60fps.h264?dl=0
h264 framesize https://www.dropbox.com/s/r146d5zederrne1/dump60fps.size?dl=0
Also as this is my question on stackoverflow, Please bear with me on Bad code formatting and Direct references.
public class MainActivity extends Activity {
static final String TAG = "MainActivity";
private PlayerThread mPlayer = null;
private static final String MIME_TYPE = "video/avc";
private byte[] mSPSPPSFrame = new byte [3000];
private byte[] sps = new byte[37];
File videoFile = null;
File videoFile1 = null;
TextView tv ;
FileInputStream videoFileStream = null;
FileInputStream videoFileStream1 = null;
int[] tall = null ;
SpeedControlCallback mspeed = new SpeedControlCallback();
int mStreamLen = 0;
FrameLayout game;
RelativeLayout rl ;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//mVideoSurfaceView = (SurfaceView)findViewById(R.id.videoSurfaceView);
setContentView(R.layout.activity_main);
SurfaceView first = (SurfaceView) findViewById(R.id.firstSurface);
first.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface created!");
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
Log.d(TAG, "surfaceChanged()");
surfaceHolder.getSurface();
if (mPlayer == null) {
mPlayer = new PlayerThread(surfaceHolder.getSurface());
mPlayer.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface destroyed!");
}
});
tv = (TextView) findViewById(R.id.textview);
videoFile = new File("/data/local/tmp/dump60fps.h264");
videoFile1 = new File("/data/local/tmp/dump60fps.size");
}
private class PlayerThread extends Thread {
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
try {
decodeVideo(0, 1920,1080, 50, surface);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
}
}
}
private void decodeVideo(int testinput, int width, int height,
int threshold, Surface surface) throws Throwable {
MediaCodec codec = null;
MediaFormat mFormat;
final long kTimeOutUs = 10000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
MediaFormat oformat = null;
int errors = -1;
long presentationTimeUs = 0L;
boolean mVideoStart = false;
byte[] byteArray = new byte[65525*5*3];
int i;
int sizeInBytes = 0, index, sampleSize = 0;
try {
byte[] bytes = new byte[(int) videoFile1.length()];
FileInputStream fis = new FileInputStream(videoFile1);
fis.read(bytes);
fis.close();
String[] valueStr = new String(bytes).trim().split("\\s+");
tall = new int[valueStr.length];
mStreamLen = valueStr.length;
Log.e(TAG, "++++++ Total Frames ++++++"+mStreamLen);
for ( i = 0; i < valueStr.length; i++) {
tall[i] = Integer.parseInt(valueStr[i]);
}
} catch (IOException e1) {
e1.printStackTrace();
}
index =1;
try {
videoFileStream = new FileInputStream(videoFile);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
System.currentTimeMillis();
if (mVideoStart == false) {
try {
sizeInBytes = videoFileStream.read(mSPSPPSFrame, 0,37);
Log.e(TAG, "VideoEngine configure ."+sizeInBytes);
//for (i = 0 ; i < sizeInBytes; i++){
// Log.e(TAG, "VideoEngine ."+mSPSPPSFrame[i]);}
} catch (IOException e1) {
e1.printStackTrace();
}
sampleSize = sizeInBytes;
index++;
index++;
mFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1920,1080);
mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( mSPSPPSFrame,0, sizeInBytes));
codec = MediaCodec.createDecoderByType(MIME_TYPE);
codec.configure(mFormat, surface /*surface*/ , null /* crypto */, 0 /* flags */);
codec.start();
codec.getInputBuffers();
codec.getOutputBuffers();
}
// index = 0;
while (!sawOutputEOS && errors < 0) {
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
//Log.d(TAG, String.format("Archana Dqing the input buffer with BufIndex #: %d",inputBufIndex));
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffers()[inputBufIndex];
/*
* Read data from file and copy to the input ByteBuffer
*/
try {
sizeInBytes = videoFileStream.read(byteArray, 0,
tall[index] /*+ 4*/);
sampleSize = tall[index]/*+ 4*/;
index++;
} catch (IOException e) {
e.printStackTrace();
}
if (sizeInBytes <= 0) {
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
0,
presentationTimeUs,
MediaCodec.BUFFER_FLAG_END_OF_STREAM );
sawInputEOS = true;
}
else {
dstBuf.put(byteArray, 0, sizeInBytes);
if (mVideoStart == false) mVideoStart = true;
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
mVideoStart ? 0:MediaCodec.BUFFER_FLAG_CODEC_CONFIG );
//Log.d(TAG, String.format(" After queueing the buffer to decoder with inputbufindex and samplesize #: %d ,%d ind %d",inputBufIndex,sampleSize,index));
}
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
//Log.d(TAG, String.format(" Getting the information about decoded output buffer flags,offset,PT,size #: %d %d %d %d",info.flags,info.offset,info.presentationTimeUs,info.size));
//Log.d(TAG, String.format(" Getting the output of decoder in res #: %d",res));
if (res >= 0) {
int outputBufIndex = res;
//Log.d(TAG, "Output PTS "+info.presentationTimeUs);
//mspeed.preRender(info.presentationTimeUs);
//mspeed.setFixedPlaybackRate(25);
codec.releaseOutputBuffer(outputBufIndex, true /* render */);
//Log.d(TAG, String.format(" releaseoutputbuffer index= #: %d",outputBufIndex));
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
oformat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
}
}
codec.stop();
codec.release();
this.finish();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_main, menu);
return true;
}
}
There are couples of workaround to problem with the above sample test.
Instead of feeding One Full frame to the decoder Inout, I was feeding single of NAL Units at a time. But still the playback was slow and could not match 60FPS
Google has changed the Implementation of Surface BufferQueue from Asynchronous to Synchronous.Hence when we call MediaCodec.dequeueBuffer to get decoded data, the server side (SurfaceTexture::dequeueBuffer) will wait for a buffer to be queued, and the client side waits for that, so that SurfaceTextureClient::dequeueBuffer will not return until a buffer has actually been queued on the server side. Where as in the Asynchronous Mode, a new GraphicBuffer is allocated.
I am new to android Surfaceview implementation. I am developing RDP client application in android3.0. I getting the image stream from socket, drawing this image to surface using Surfaceview and Thread.
The sample code of surface view:
class mySurfaceView extends SurfaceView implements SurfaceHolder.Callback
{
private TutorialThread _thread;
Canvas c=null;
public mySurfaceView(Context context)
{
super(context);
getHolder().addCallback(this);
_thread = new TutorialThread(getHolder(), this);
matrix= new Matrix();
m = new float[9];
paint = new Paint();
}
public void surfaceCreated(SurfaceHolder arg0) {
//setWillNotDraw(false) ;
Log.e("surfaceCreated","surfaceCreated");
if(_thread==null ){
Log.e("_thread.created","thread created");
_thread = new TutorialThread(getHolder(),this);
_thread.setRunning(true);
_thread.start();
// <-- added fix
}else {
Log.e("_thread.getState()",_thread.getState()+"");
_thread.setRunning(true); //original code
_thread.start(); //original code
}
}
public void surfaceDestroyed(SurfaceHolder arg0) {
Log.e("surfaceDestroyed","surfaceDestroyed");
boolean retry = true;
_thread.setRunning(false);
while (retry) {
try {
_thread.join();
retry = false;
} catch (InterruptedException e) {
}
}
}
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2,
int arg3) {
}
class TutorialThread extends Thread
{
private SurfaceHolder _surfaceHolder;
private mySurfaceView _panel;
private boolean _run = false;
private InputStream is;
private Socket socket;
Bitmap resizeBmp;
public TutorialThread(SurfaceHolder surfaceHolder,mySurfaceView panel)
{
Log.e("TutorialThread","TutorialThread-->Constructor");
_surfaceHolder = surfaceHolder;
_panel = panel;
}
public void setRunning(boolean run) {
_run = run;
}
#Override
public void run()
{
Log.e("TutorialThread","TutorialThread-->run()");
try
{
socket = new Socket("172.19.1.144", 4444);
is = socket.getInputStream();
DataInputStream inputputStream = new DataInputStream(is);
//long time = System.currentTimeMillis();
int i=0;
while (socket.isConnected() )
{
Log.e("tutorial thread","While running");
c = null;
i++;
if(i==10){
System.gc();
i=0;
}
Log.e("BEFORE","BEFORE");
synchronized (_surfaceHolder)
{
Log.e("AFTER","AFTER");
ByteBuffer inputHeaderBuffer = ByteBuffer.allocate(20);
inputHeaderBuffer.order(ByteOrder.LITTLE_ENDIAN);
inputputStream.readFully(inputHeaderBuffer.array());
SurfaceViewPinchZoom.serverWidth=inputHeaderBuffer.getInt();
SurfaceViewPinchZoom.serverHeight=inputHeaderBuffer.getInt();
//Log.e("serverWidth","serverWidth "+ SurfaceViewPinchZoom.serverWidth+"serverHeight===="+SurfaceViewPinchZoom.serverHeight);
SurfaceViewPinchZoom.left=inputHeaderBuffer.getInt();
SurfaceViewPinchZoom.top=inputHeaderBuffer.getInt();
int dataLength = inputHeaderBuffer.getInt();
ByteBuffer imageDataCompress = ByteBuffer.allocate(dataLength);
imageDataCompress.order(ByteOrder.LITTLE_ENDIAN);
inputputStream.readFully(imageDataCompress.array());
byte[] imagedata = new byte[imageDataCompress.remaining()];
imageDataCompress.get(imagedata);
//Decompress the image
//Log.e("imagedata.length::::::::::",imagedata.length+"");
// Create the decompressor and give it the data to compress
Inflater decompressor = new Inflater();
decompressor.setInput(imagedata);
// Create an expandable byte array to hold the decompressed data
ByteArrayOutputStream bos = new ByteArrayOutputStream(imagedata.length);
// Decompress the data
byte[] buf = new byte[1024];
while (!decompressor.finished()) {
try {
int count = decompressor.inflate(buf);
bos.write(buf, 0, count);
} catch (DataFormatException e) {
}
}
try {
bos.close();
} catch (IOException e) {
}
// Get the decompressed data
byte[] decompressedData = bos.toByteArray();
/
BitmapFactory.Options options=new BitmapFactory.Options();
options.inJustDecodeBounds=true;
// Log.e("decompressedData.length::::::::::",decompressedData.length+"");
/*SurfaceViewPinchZoom.*/bmp = BitmapFactory.decodeByteArray(decompressedData, 0,decompressedData.length,options);
options.inDither=true;
/*scaleX=(float)screen_width/bmp.getWidth();
scaleY=(float)screen_height/bmp.getHeight();
matrix.setScale(scaleX, scaleY);*/
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, screen_width, screen_height);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
/*SurfaceViewPinchZoom.*/bmp= BitmapFactory.decodeByteArray(decompressedData, 0,decompressedData.length,options);
bmp=BitmapFactory.decodeByteArray(decompressedData, 0,decompressedData.length,options);
c = _surfaceHolder.lockCanvas();
c.drawBitmap(bmp, matrix, paint);
//draw(c);
//postInvalidate();
onDraw(c);
inputHeaderBuffer.clear();
imageDataCompress.clear();
inputHeaderBuffer = null;
imageDataCompress = null;
imagedata = null;
}
if (c != null)
{
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
catch (ArrayIndexOutOfBoundsException ae)
{
ae.printStackTrace();
}
catch (Exception e)
{
e.printStackTrace();
}
finally {
if (c != null) {
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
private int calculateInSampleSize(Options options, int screen_width,
int screen_height) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > screen_height || width > screen_width) {
if (width > height) {
inSampleSize = Math.round((float)height / (float)screen_height);
} else {
inSampleSize = Math.round((float)width / (float)screen_width);
}
}
return inSampleSize;
}
}
}
The problems are
1) if I press home button then surfaceDestroyed() is called and thread is terminated.
But I need to continue the Thread and update the images(input stream from socket) after open the application from home button is pressed.
2) if I call the activity on doubletap event as child activity, surfaceDestroyed() is called and Thread is terminated.
Where I need to continue the images display after returning from child activity.
In both the case I am getting exception as java.lang.IllegalThreadStateException: Thread already started.
Could please help how to run the same thread without terminating on home button is pressed or another child activity is called?
Thanks & Regards
Yamini