I want to use Opencv4android sample Tutorial1 to do Video Recorder.
And I use this solution, but my smartphone not display anything on Screen.
Just black view. Can anyone help me?
here is my code
Tutorial1Activity
public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//
//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;
//---------------MediaRecorder-------------------//
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//---------------------------------------------------------//
bt_Record = (Button)findViewById(R.id.bt_recorder);
folder_pathforfile = folder_path + File.separator + folder_name
+ File.separator + "opencv" + "_";
CreateSDfolder();
ongetTime();
//---------------------------------------------------------//
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
private void CreateSDfolder() {
String filefolderpath = folder_path + File.separator + folder_name;
File dir = new File(filefolderpath);
if (!dir.exists()){
Log.e("folder", "not exist");
try{
//dir.createNewFile(true);
dir.mkdir();
Log.e("folder", "creat exist");
}catch(Exception e){
Log.e("folder", "creat not exist");
e.printStackTrace();
}
}
else{
Log.e("folder", "exist");
}
}
private void ongetTime() {
Date dt=new Date();
showTime=sdf_fileintxt.format(dt);
showTimefile =sdf.format(dt);
}
private void WritetoSD(String data) {
try {
fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
bw = new BufferedWriter(fw);
if (first_in == true) {
first_in = false;
bw.append(showTime);
bw.newLine();
}
bw.append(data);
bw.newLine();
bw.flush();
bw.close();
} catch (IOException e) {
Log.e("WriteToSD", "Write To SD ERROR");
e.printStackTrace();
}
}
public void onRecordSignal (View v){
if(!isRecord){
isRecord = true;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Stop");
//new MediaPrepareTask().execute(null, null, null);
if (prepareMediaRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
mOpenCvCameraView.setRecorder(mediaRecorder);
mediaRecorder.start();
} else {
// prepare didn't work, release the camera
Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
// mediaRecorder.stop();
releaseMediaRecorder();
}
} else{
isRecord = false;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Record");
try {
if(mediaRecorder != null)
mediaRecorder.stop(); // stop the recording
else
Log.e(TAG,"onRecordSignal mediaRecorder is null");
} catch (RuntimeException e) {
// RuntimeException is thrown when stop() is called immediately after start().
// In this case the output file is not properly constructed ans should be deleted.
Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
//noinspection ResultOfMethodCallIgnored
}
releaseMediaRecorder(); // release the MediaRecorder object
}
}
public void releaseMediaRecorder() {
Log.e("debug","releaseMediaRecorder");
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
JavaCameraView.mCamera.lock();
mOpenCvCameraView.releaseRecord();
}
}
private String recordfilepath() {
// TODO Auto-generated method stub
ongetTime();
File sddir = Environment.getExternalStorageDirectory();
File vrdir = new File(sddir, folder_name);
File file = new File(vrdir, showTimefile+"_.mp4");
String filepath = file.getAbsolutePath();
Log.e("debug mediarecorder", filepath);
return filepath;
}
public boolean prepareMediaRecorder() {
// TODO Auto-generated method stub
Log.e("debug mediarecorder", "in prepareMediaRecorder");
mediaRecorder = new MediaRecorder();
try {
JavaCameraView.mCamera.lock();
JavaCameraView.mCamera.unlock();
}catch (RuntimeException e){
Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
}
/*mediaRecorder.reset();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
//mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
mediaRecorder.setOutputFile(recordfilepath());
//mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
//mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
mediaRecorder.reset();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
//mediaRecorder.setOutputFile("out.mp4");
mediaRecorder.setOutputFile(recordfilepath());
mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
//mediaRecorder.setOnInfoListener(this);
//mediaRecorder.setOnErrorListener(this);
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.e("debug mediarecorder", "not prepare");
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.e("debug mediarecorder", "not prepare IOException");
//releaseMediaRecorder();
}
return true;
}
}
CameraBridgeViewBase
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
//-------------------------------------//
protected MediaRecorder mRecorder;
protected Surface mSurface = null;
public void setRecorder(MediaRecorder rec) {
mRecorder = rec;
//Log.e(TAG,mRecorder.toString());
if (mRecorder != null) {
mSurface = mRecorder.getSurface();
Log.e(TAG,"mRecorder is not null");
Log.e(TAG,"mSurface = "+mSurface.toString());
}
else{
Log.e(TAG,"mRecorder is null");
}
}
public void releaseRecord(){
mSurface.release();
}
//-------------------------------------//
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* #param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* #param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* #param maxWidth - the maximum width allowed for camera frame.
* #param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* #param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas;
if (mRecorder != null) {
canvas = mSurface.lockCanvas(null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
mSurface.unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* #param width - the width of this SurfaceView
* #param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* #param supportedSizes
* #param surfaceWidth
* #param surfaceHeight
* #return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}
And I add permission in Manifest
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
Related
i'm developing image capture app in which after capturing image if you do not want to save image, press back button [ onBackPress i launch new activity i.e BottomNavigationActivity] it should switch to home activity (like normal camera app doing).
#Override
public void onBackPressed()
{
startActivity(new Intent(getApplicationContext(), BottomNavigationActivity.class));
}
But when i press back button TransactionTooLargeException occur and then app crash. i don't know why this happen even i am not transferring any huge amount of data like arrayList etc. help me to solve this problem
thanks in advance.
CameraFragment.java [here i capture the image]
public class CameraFragment extends Fragment implements SurfaceHolder.Callback, Camera.PictureCallback
{
public static final String TAG = CameraFragment.class.getSimpleName();
public static final String CAMERA_ID_KEY = "camera_id";
public static final String CAMERA_FLASH_KEY = "flash_mode";
public static final String IMAGE_INFO = "image_info";
private static final int PICTURE_SIZE_MAX_WIDTH = 1280;
private static final int PREVIEW_SIZE_MAX_WIDTH = 640;
private int mCameraID;
private String mFlashMode;
private Camera mCamera;
private SquareCameraPreview mPreviewView;
private SurfaceHolder mSurfaceHolder;
private boolean mIsSafeToTakePhoto = false;
private ImageParameters mImageParameters;
private CameraOrientationListener mOrientationListener;
ImageView iv_camera_close;
public static Fragment newInstance() {
return new CameraFragment();
}
public CameraFragment() {}
#Override
public void onAttach(Context context) {
super.onAttach(context);
mOrientationListener = new CameraOrientationListener(context);
}
#Override
public void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Restore your state here because a double rotation with this fragment
// in the backstack will cause improper state restoration
// onCreate() -> onSavedInstanceState() instead of going through onCreateView()
if (savedInstanceState == null) {
mCameraID = getBackCameraID();
mFlashMode = CameraSettingPreferences.getCameraFlashMode(getActivity());
mImageParameters = new ImageParameters();
}
else
{
mCameraID = savedInstanceState.getInt(CAMERA_ID_KEY);
mFlashMode = savedInstanceState.getString(CAMERA_FLASH_KEY);
mImageParameters = savedInstanceState.getParcelable(IMAGE_INFO);
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
return inflater.inflate(R.layout.squarecamera__fragment_camera, container, false);
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mOrientationListener.enable();
mPreviewView = (SquareCameraPreview) view.findViewById(R.id.camera_preview_view);
mPreviewView.getHolder().addCallback(CameraFragment.this);
final View topCoverView = view.findViewById(R.id.cover_top_view);
final View btnCoverView = view.findViewById(R.id.cover_bottom_view);
iv_camera_close = view.findViewById(R.id.iv_camera_close);
iv_camera_close.setOnClickListener(new View.OnClickListener()
{
#Override
public void onClick(View v)
{
startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
mImageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (savedInstanceState == null) {
ViewTreeObserver observer = mPreviewView.getViewTreeObserver();
observer.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
mImageParameters.mPreviewWidth = mPreviewView.getWidth();
mImageParameters.mPreviewHeight = mPreviewView.getHeight();
mImageParameters.mCoverWidth = mImageParameters.mCoverHeight
= mImageParameters.calculateCoverWidthHeight();
// Log.d(TAG, "parameters: " + mImageParameters.getStringValues());
// Log.d(TAG, "cover height " + topCoverView.getHeight());
resizeTopAndBtmCover(topCoverView, btnCoverView);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mPreviewView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
} else {
mPreviewView.getViewTreeObserver().removeGlobalOnLayoutListener(this);
}
}
});
} else {
if (mImageParameters.isPortrait()) {
topCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
btnCoverView.getLayoutParams().height = mImageParameters.mCoverHeight;
} else {
topCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
btnCoverView.getLayoutParams().width = mImageParameters.mCoverWidth;
}
}
final ImageView swapCameraBtn = (ImageView) view.findViewById(R.id.change_camera);
swapCameraBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mCameraID == CameraInfo.CAMERA_FACING_FRONT) {
mCameraID = getBackCameraID();
} else {
mCameraID = getFrontCameraID();
}
restartPreview();
}
});
final View changeCameraFlashModeBtn = view.findViewById(R.id.flash);
changeCameraFlashModeBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_AUTO)) {
mFlashMode = Camera.Parameters.FLASH_MODE_ON;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_ON)) {
mFlashMode = Camera.Parameters.FLASH_MODE_OFF;
} else if (mFlashMode.equalsIgnoreCase(Camera.Parameters.FLASH_MODE_OFF)) {
mFlashMode = Camera.Parameters.FLASH_MODE_AUTO;
}
setupFlashMode();
setupCamera();
}
});
setupFlashMode();
final ImageView takePhotoBtn = (ImageView) view.findViewById(R.id.capture_image_button);
takePhotoBtn.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void setupFlashMode() {
View view = getView();
if (view == null) return;
final TextView autoFlashIcon = (TextView) view.findViewById(R.id.auto_flash_icon);
if (Camera.Parameters.FLASH_MODE_AUTO.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Auto");
} else if (Camera.Parameters.FLASH_MODE_ON.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("On");
} else if (Camera.Parameters.FLASH_MODE_OFF.equalsIgnoreCase(mFlashMode)) {
autoFlashIcon.setText("Off");
}
}
#Override
public void onSaveInstanceState(Bundle outState) {
// Log.d(TAG, "onSaveInstanceState");
outState.putInt(CAMERA_ID_KEY, mCameraID);
outState.putString(CAMERA_FLASH_KEY, mFlashMode);
outState.putParcelable(IMAGE_INFO, mImageParameters);
super.onSaveInstanceState(outState);
}
private void resizeTopAndBtmCover( final View topCover, final View bottomCover) {
ResizeAnimation resizeTopAnimation
= new ResizeAnimation(topCover, mImageParameters);
resizeTopAnimation.setDuration(800);
resizeTopAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
topCover.startAnimation(resizeTopAnimation);
ResizeAnimation resizeBtmAnimation
= new ResizeAnimation(bottomCover, mImageParameters);
resizeBtmAnimation.setDuration(800);
resizeBtmAnimation.setInterpolator(new AccelerateDecelerateInterpolator());
bottomCover.startAnimation(resizeBtmAnimation);
}
private void getCamera(int cameraID) {
try {
mCamera = Camera.open(cameraID);
mPreviewView.setCamera(mCamera);
} catch (Exception e) {
Log.d(TAG, "Can't open camera with id " + cameraID);
e.printStackTrace();
}
}
/**
* Restart the camera preview
*/
private void restartPreview() {
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
getCamera(mCameraID);
startCameraPreview();
}
/**
* Start the camera preview
*/
private void startCameraPreview() {
determineDisplayOrientation();
setupCamera();
try {
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
setSafeToTakePhoto(true);
setCameraFocusReady(true);
} catch (IOException e) {
Log.d(TAG, "Can't start camera preview due to IOException " + e);
e.printStackTrace();
}
}
/**
* Stop the camera preview
*/
private void stopCameraPreview() {
setSafeToTakePhoto(false);
setCameraFocusReady(false);
// Nulls out callbacks, stops face detection
mCamera.stopPreview();
mPreviewView.setCamera(null);
}
private void setSafeToTakePhoto(final boolean isSafeToTakePhoto) {
mIsSafeToTakePhoto = isSafeToTakePhoto;
}
private void setCameraFocusReady(final boolean isFocusReady) {
if (this.mPreviewView != null) {
mPreviewView.setIsFocusReady(isFocusReady);
}
}
private void determineDisplayOrientation() {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraID, cameraInfo);
// Clockwise rotation needed to align the window display to the natural position
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: {
degrees = 0;
break;
}
case Surface.ROTATION_90: {
degrees = 90;
break;
}
case Surface.ROTATION_180: {
degrees = 180;
break;
}
case Surface.ROTATION_270: {
degrees = 270;
break;
}
}
int displayOrientation;
if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
// Orientation is angle of rotation when facing the camera for
// the camera image to match the natural orientation of the device
displayOrientation = (cameraInfo.orientation + degrees) % 360;
displayOrientation = (360 - displayOrientation) % 360;
} else {
displayOrientation = (cameraInfo.orientation - degrees + 360) % 360;
}
mImageParameters.mDisplayOrientation = displayOrientation;
mImageParameters.mLayoutOrientation = degrees;
mCamera.setDisplayOrientation(mImageParameters.mDisplayOrientation);
}
private void setupCamera() {
// Never keep a global parameters
Camera.Parameters parameters = mCamera.getParameters();
Size bestPreviewSize = determineBestPreviewSize(parameters);
Size bestPictureSize = determineBestPictureSize(parameters);
parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
// Set continuous picture focus, if it's supported
if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
final View changeCameraFlashModeBtn = getView().findViewById(R.id.flash);
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null && flashModes.contains(mFlashMode)) {
parameters.setFlashMode(mFlashMode);
changeCameraFlashModeBtn.setVisibility(View.VISIBLE);
} else {
changeCameraFlashModeBtn.setVisibility(View.INVISIBLE);
}
mCamera.setParameters(parameters);
}
private Size determineBestPreviewSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPreviewSizes(), PREVIEW_SIZE_MAX_WIDTH);
}
private Size determineBestPictureSize(Camera.Parameters parameters) {
return determineBestSize(parameters.getSupportedPictureSizes(), PICTURE_SIZE_MAX_WIDTH);
}
private Size determineBestSize(List<Size> sizes, int widthThreshold) {
Size bestSize = null;
Size size;
int numOfSizes = sizes.size();
for (int i = 0; i < numOfSizes; i++) {
size = sizes.get(i);
boolean isDesireRatio = (size.width / 4) == (size.height / 3);
boolean isBetterSize = (bestSize == null) || size.width > bestSize.width;
if (isDesireRatio && isBetterSize) {
bestSize = size;
}
}
if (bestSize == null) {
Log.d(TAG, "cannot find the best camera size");
return sizes.get(sizes.size() - 1);
}
return bestSize;
}
private int getFrontCameraID() {
PackageManager pm = getActivity().getPackageManager();
if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
return CameraInfo.CAMERA_FACING_FRONT;
}
return getBackCameraID();
}
private int getBackCameraID() {
return CameraInfo.CAMERA_FACING_BACK;
}
private void takePicture() {
if (mIsSafeToTakePhoto) {
setSafeToTakePhoto(false);
mOrientationListener.rememberOrientation();
Camera.ShutterCallback shutterCallback = null;
Camera.PictureCallback raw = null;
// postView callback occurs when a scaled, fully processed
// postView image is available.
Camera.PictureCallback postView = null;
mCamera.takePicture(shutterCallback, raw, postView, this);
}
}
#Override
public void onResume() {
super.onResume();
if (mCamera == null) {
restartPreview();
}
}
#Override
public void onStop() {
mOrientationListener.disable();
// stop the preview
if (mCamera != null) {
stopCameraPreview();
mCamera.release();
mCamera = null;
}
CameraSettingPreferences.saveCameraFlashMode(getActivity(), mFlashMode);
super.onStop();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
mSurfaceHolder = holder;
getCamera(mCameraID);
startCameraPreview();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// The surface is destroyed with the visibility of the SurfaceView is set to View.Invisible
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode != Activity.RESULT_OK) return;
switch (requestCode) {
case 1:
Uri imageUri = data.getData();
break;
default:
super.onActivityResult(requestCode, resultCode, data);
}
}
/**
* A picture has been taken
* #param data
* #param camera
*/
#Override
public void onPictureTaken(byte[] data, Camera camera) {
int rotation = getPhotoRotation();
getFragmentManager()
.beginTransaction()
.replace(
R.id.fragment_container,
EditSavePhotoFragment.newInstance(data, rotation, mImageParameters.createCopy()),
EditSavePhotoFragment.TAG)
.addToBackStack(null)
.commit();
setSafeToTakePhoto(true);
}
private int getPhotoRotation() {
int rotation;
int orientation = mOrientationListener.getRememberedNormalOrientation();
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(mCameraID, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
rotation = (info.orientation - orientation + 360) % 360;
} else {
rotation = (info.orientation + orientation) % 360;
}
return rotation;
}
/**
* When orientation changes, onOrientationChanged(int) of the listener will be called
*/
private static class CameraOrientationListener extends OrientationEventListener {
private int mCurrentNormalizedOrientation;
private int mRememberedNormalOrientation;
public CameraOrientationListener(Context context) {
super(context, SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
public void onOrientationChanged(int orientation) {
if (orientation != ORIENTATION_UNKNOWN) {
mCurrentNormalizedOrientation = normalize(orientation);
}
}
/**
* #param degrees Amount of clockwise rotation from the device's natural position
* #return Normalized degrees to just 0, 90, 180, 270
*/
private int normalize(int degrees) {
if (degrees > 315 || degrees <= 45) {
return 0;
}
if (degrees > 45 && degrees <= 135) {
return 90;
}
if (degrees > 135 && degrees <= 225) {
return 180;
}
if (degrees > 225 && degrees <= 315) {
return 270;
}
throw new RuntimeException("The physics as we know them are no more. Watch out for anomalies.");
}
public void rememberOrientation() {
mRememberedNormalOrientation = mCurrentNormalizedOrientation;
}
public int getRememberedNormalOrientation() {
rememberOrientation();
return mRememberedNormalOrientation;
}
}
}
EditSavePhotoFragment.java [here i display the preview of image]
this is code to switch activity , when i called this my app crashes
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
below is full source code
public class EditSavePhotoFragment extends Fragment {
public static final String TAG = EditSavePhotoFragment.class.getSimpleName();
public static String BITMAP_KEY = "bitmap_byte_array";
public static String ROTATION_KEY = "rotation";
public static String IMAGE_INFO = "image_info";
private static final int REQUEST_STORAGE = 1;
SharedPreferences pref;
SharedPreferences.Editor editor;
private static final String PHOTO_PREF = "PHOTO_PREF";
private static final String CAPTURE_IMAGE_PATH = "CAPTURE_IMAGE_PATH";
public static Fragment newInstance(byte[] bitmapByteArray, int rotation,
#NonNull ImageParameters parameters) {
Fragment fragment = new EditSavePhotoFragment();
Bundle args = new Bundle();
args.putByteArray(BITMAP_KEY, bitmapByteArray);
args.putInt(ROTATION_KEY, rotation);
args.putParcelable(IMAGE_INFO, parameters);
fragment.setArguments(args);
return fragment;
}
public EditSavePhotoFragment() {}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState)
{
View view = inflater.inflate(R.layout.squarecamera__fragment_edit_save_photo, container, false);
pref = getContext().getSharedPreferences(PHOTO_PREF, 0);
editor = pref.edit();
return view;
}
#Override
public void onViewCreated(View view, #Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
int rotation = getArguments().getInt(ROTATION_KEY);
byte[] data = getArguments().getByteArray(BITMAP_KEY);
ImageParameters imageParameters = getArguments().getParcelable(IMAGE_INFO);
if (imageParameters == null) {
return;
}
final ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
imageParameters.mIsPortrait =
getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
final View topView = view.findViewById(R.id.topView);
if (imageParameters.mIsPortrait) {
topView.getLayoutParams().height = imageParameters.mCoverHeight;
} else {
topView.getLayoutParams().width = imageParameters.mCoverWidth;
}
rotatePicture(rotation, data, photoImageView);
view.findViewById(R.id.save_photo).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
savePicture(photoImageView);
}
});
view.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
//getActivity().onBackPressed();
getActivity().startActivity(new Intent(getContext(), BottomNavigationActivity.class));
}
});
}
private void rotatePicture(int rotation, byte[] data, ImageView photoImageView) {
Bitmap bitmap = ImageUtility.decodeSampledBitmapFromByte(getActivity(), data);
// Log.d(TAG, "original bitmap width " + bitmap.getWidth() + " height " + bitmap.getHeight());
if (rotation != 0) {
Bitmap oldBitmap = bitmap;
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
bitmap = Bitmap.createBitmap(
oldBitmap, 0, 0, oldBitmap.getWidth(), oldBitmap.getHeight(), matrix, false
);
oldBitmap.recycle();
}
photoImageView.setImageBitmap(bitmap);
}
private void savePicture(ImageView imageView)
{
/*
try
{
SaveImageMethod(imageView);
}
catch (IOException e)
{
e.printStackTrace();
}
*/
requestForPermission();
}
private void SaveImageMethod(ImageView iv) throws IOException
{
BitmapDrawable draw = (BitmapDrawable) iv.getDrawable();
Bitmap bitmap = draw.getBitmap();
FileOutputStream outStream = null;
File sdCard = Environment.getExternalStorageDirectory();
File dir = new File(sdCard.getAbsolutePath() + "/SelfiLife");
dir.mkdirs();
String fileName = String.format("%d.jpg", System.currentTimeMillis());
File outFile = new File(dir, fileName);
Log.d("MainActivity","new IMAGE PATH = "+outFile);
editor.putString(CAPTURE_IMAGE_PATH, String.valueOf(outFile));
editor.commit();
outStream = new FileOutputStream(outFile);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
outStream.flush();
outStream.close();
}
private void requestForPermission() {
RuntimePermissionActivity.startActivity(EditSavePhotoFragment.this,
REQUEST_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (Activity.RESULT_OK != resultCode) return;
if (REQUEST_STORAGE == requestCode && data != null) {
final boolean isGranted = data.getBooleanExtra(RuntimePermissionActivity.REQUESTED_PERMISSION, false);
final View view = getView();
if (isGranted && view != null) {
ImageView photoImageView = (ImageView) view.findViewById(R.id.photo);
Bitmap bitmap = ((BitmapDrawable) photoImageView.getDrawable()).getBitmap();
Uri photoUri = ImageUtility.savePicture(getActivity(), bitmap);
((CameraActivity) getActivity()).returnPhotoUri(photoUri);
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
}
I am working on Camera related app. I have successfully completed capturing photo and photo preview also. But one issue arised. When I take photo, photo Capture is successful but preview is being inverse. I have tried so many solutions from stack overflow though luck did not favor for me so far.
My Camera Preview class is:
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private Context mContext;
public CameraPreview(Context context, Camera camera) {
super(context);
mContext = context;
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
// create the surface and start camera preview
if (mCamera == null) {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
} catch (IOException e) {
Log.d(VIEW_LOG_TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void refreshCamera(Camera camera) {
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
setCamera(camera);
// TODO: don't hardcode cameraId '0' here... figure this out later.
setCameraDisplayOrientation(mContext, Camera.CameraInfo.CAMERA_FACING_BACK, mCamera);
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(VIEW_LOG_TAG, "Error starting camera preview: " + e.getMessage());
}
}
public static void setCameraDisplayOrientation(Context context, int cameraId, Camera camera) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
// Compensate for the mirror image.
result = (360 - result) % 360;
} else {
// Back-facing camera.
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
refreshCamera(mCamera);
}
public void setCamera(Camera camera) {
//method to set a camera instance
mCamera = camera;
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
// mCamera.release();
}
}
and my PhotoCaputeActivity class is below:
public class PhotoCaptureActivity extends AppCompatActivity {
private static final String TAG = "PhotoCaptureActivity";
//Arraylist for image timer animation
int[] imageArray = {R.drawable.ic_five_128, R.drawable.ic_four_128,
R.drawable.ic_three_128, R.drawable.ic_two_128, R.drawable.ic_one_128,
R.drawable.ic_smiley_128
};
int i = 0;
private final static int DELAY = 1000;
private final Handler handler = new Handler();
Timer timer = new Timer();
Thread timerThread;
// Create variable to handle progress and set it to 0.
private int progress = 0;
Bitmap bitmap,photoCaptureBitmap;
private Camera mCamera;
private CameraPreview mPreview;
private PictureCallback mPicture;
private ImageButton capture, switchCamera;
private Context myContext;
private LinearLayout cameraPreview;
private boolean cameraFront = false;
private ImageView capturedImageHolder;
private ProgressBar progressBar_take_photo;
ImageButton next_button_take_photo;
ImageButton back_button_take_photo;
TextView photo_title_text;
ImageView image_animation;
private MyPreferences myPreferences;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getSupportActionBar().setDisplayShowTitleEnabled(false);
setContentView(R.layout.photo_capture_activity);
//setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
Logger.addLogAdapter(new AndroidLogAdapter());
myContext = this;
initialize();
myPreferences = MyPreferences.getPreferences(this);
nextButton();
backButton();
progressBar();
nameTextShow();
}
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
cameraFront = true;
break;
}
}
return cameraId;
}
private int findBackFacingCamera() {
int cameraId = -1;
//Search for the back facing camera
//get the number of cameras
int numberOfCameras = Camera.getNumberOfCameras();
//for every camera check
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_BACK) {
cameraId = i;
cameraFront = false;
break;
}
}
return cameraId;
}
public void onResume() {
super.onResume();
if (!hasCamera(myContext)) {
Toast toast = Toast.makeText(myContext, "Sorry, your phone does not have a camera!", Toast.LENGTH_LONG);
toast.show();
finish();
}
if (mCamera == null) {
//if the front facing camera does not exist
if (findFrontFacingCamera() < 0) {
//Toast.makeText(this, "", Toast.LENGTH_LONG).show();
//switchCamera.setVisibility(View.GONE);
}
mCamera = Camera.open(findFrontFacingCamera());
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}else{
//Visibility
cameraPreview.setVisibility(View.GONE);
capturedImageHolder.setVisibility(View.VISIBLE);
String photoCapturePreview = myPreferences.getVisitorPhoto();
if(!photoCapturePreview.equals("")) {
photoCaptureBitmap = decodeToBase64(photoCapturePreview);
}
capturedImageHolder.setImageBitmap(photoCaptureBitmap);
}
}
public static Bitmap decodeToBase64(String input) {
byte[] decodedByte = Base64.decode(input, 0);
return BitmapFactory.decodeByteArray(decodedByte, 0, decodedByte.length);
}
// Code for initialization
public void initialize() {
cameraPreview = (LinearLayout) findViewById(R.id.cameraFrame);
mPreview = new CameraPreview(myContext, mCamera);
cameraPreview.addView(mPreview);
capture = (ImageButton) findViewById(R.id.button_capture);
capture.setOnClickListener(captrureListener);
//switchCamera = (Button) findViewById(R.id.button_ChangeCamera);
//switchCamera.setOnClickListener(switchCameraListener);
image_animation = (ImageView) findViewById(R.id.timer_text);
capturedImageHolder = (ImageView) findViewById(R.id.captured_image);
next_button_take_photo = (ImageButton) findViewById(R.id.next_button_take_photo);
back_button_take_photo = (ImageButton) findViewById(R.id.back_button_take_photo);
progressBar_take_photo = (ProgressBar) findViewById(R.id.progressBar_take_photo);
photo_title_text = (TextView) findViewById(R.id.photo_title_text);
}
//Next Button operation based on mobile number input
private void nextButton() {
next_button_take_photo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
hideKeyboard();
if (bitmap != null) {
Intent newIntent = new Intent(getApplicationContext(), ConfirmationActivity.class);
startActivity(newIntent);
}
}
});
}
//Back button operation
private void backButton() {
back_button_take_photo.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Intent newIntent = new Intent(getApplicationContext(), AddressInputActivity.class);
startActivity(newIntent);
}
});
}
//Progress bar operation
private void progressBar() {
//simpleProgressBar.setMax(100); // 100 maximum value for the progress value
//simpleProgressBar.setProgress(50); // 50 default progress value for the progress bar
// Get the Drawable custom_progressbar
//Drawable draw=res.getDrawable(R.drawable.custom_progressbar);
// set the drawable as progress drawable
//progressBar.setProgressDrawable(draw);
}
//Normal text show
private void nameTextShow() {
String vsitorName = myPreferences.getVisitorName();
photo_title_text.setText(vsitorName + ", please smile for the Camera");
}
OnClickListener switchCameraListener = new OnClickListener() {
#Override
public void onClick(View v) {
//get the number of cameras
int camerasNumber = Camera.getNumberOfCameras();
if (camerasNumber > 1) {
//release the old camera instance
//switch camera, from the front and the back and vice versa
releaseCamera();
chooseCamera();
} else {
Toast toast = Toast.makeText(myContext, "Sorry, your phone has only one camera!", Toast.LENGTH_LONG);
toast.show();
}
}
};
public void chooseCamera() {
//if the camera preview is the front
if (cameraFront) {
int cameraId = findBackFacingCamera();
if (cameraId >= 0) {
//open the backFacingCamera
//set a picture callback
//refresh the preview
mCamera = Camera.open(cameraId);
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}
} else {
int cameraId = findFrontFacingCamera();
if (cameraId >= 0) {
//open the backFacingCamera
//set a picture callback
//refresh the preview
mCamera = Camera.open(cameraId);
mPicture = getPictureCallback();
mPreview.refreshCamera(mCamera);
}
}
}
#Override
protected void onPause() {
super.onPause();
//when on Pause, release camera in order to be used from other applications
releaseCamera();
}
private boolean hasCamera(Context context) {
//check if the device has camera
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
return true;
} else {
return false;
}
}
private PictureCallback getPictureCallback() {
PictureCallback picture = new PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
if (data != null) {
//make a new picture file
File pictureFile = getOutputMediaFile();
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
String imageEncoded = Base64.encodeToString(data, Base64.DEFAULT);
myPreferences.setVisitorPhoto(imageEncoded);
if (bitmap == null) {
return;
}
try {
//write the file
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
//Toast toast = Toast.makeText(myContext, "Picture saved: " + pictureFile.getName(), Toast.LENGTH_LONG);
//toast.show();
//Method for image view
captureImageView();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
//refresh camera to continue preview
mPreview.refreshCamera(mCamera);
}
}
};
return picture;
}
**private void captureImageView(){
//Visibitlity of camera photo
cameraPreview.setVisibility(View.GONE);
capturedImageHolder.setVisibility(View.VISIBLE);
int screenWidth = getResources().getDisplayMetrics().widthPixels;
int screenHeight = getResources().getDisplayMetrics().heightPixels;
Camera.CameraInfo info = new Camera.CameraInfo();
Matrix mtx = new Matrix();
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
// Notice that width and height are reversed
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, screenHeight, screenWidth, true);
int w = scaled.getWidth();
int h = scaled.getHeight();
// Perform matrix rotations/mirrors depending on camera that took the photo
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
float[] mirrorY = { -1, 0, 0, 0, 1, 0, 0, 0, 1};
Matrix matrixMirrorY = new Matrix();
matrixMirrorY.setValues(mirrorY);
mtx.postConcat(matrixMirrorY);
}
// Setting post rotate to 90
mtx.postRotate(270);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(scaled, 0, 0, w, h, mtx, true);
//capturedImageHolder.setImageBitmap(bitmap);
capturedImageHolder.setImageBitmap(scaleDownBitmapImage(bitmap, 350, 450));
}else{// LANDSCAPE MODE
//No need to reverse width and height
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, screenWidth,screenHeight , true);
bitmap=scaled;
//capturedImageHolder.setImageBitmap(bitmap);
capturedImageHolder.setImageBitmap(scaleDownBitmapImage(bitmap, 650, 300));
}
}**
private Bitmap scaleDownBitmapImage(Bitmap bitmap, int newWidth, int newHeight) {
Bitmap resizedBitmap = Bitmap.createScaledBitmap(bitmap, newWidth, newHeight, true);
return resizedBitmap;
}
OnClickListener captrureListener = new OnClickListener() {
#Override
public void onClick(View v) {
//Visibitlity of camera photo
cameraPreview.setVisibility(View.VISIBLE);
capturedImageHolder.setVisibility(View.GONE);
new Loading().execute();
//timer.schedule(task, DELAY, DELAY);
timerThread = new Thread()
{
public void run() {
for (i = 0; i < 6; i++) {
runOnUiThread(new Runnable() {
public void run() {
image_animation.setImageResource(imageArray[i]);
}
});
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
if(i==6){
timerThread.interrupt();
}
}
}
};
timerThread.start();
}
};
private final TimerTask task = new TimerTask() {
private int counter = 0;
public void run() {
handler.post(new Runnable() {
public void run() {
image_animation.setImageResource(imageArray[i]);
i++;
if (i > imageArray.length - 1) {
i = 0;
}
}
});
if (++counter == 6) {
timer.cancel();
timer.purge();
}
}
};
public class Loading extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... voids) {
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
// 6000ms=6s at intervals of 1000ms=1s so that means it lasts 5 seconds
new CountDownTimer(5000, 1000) {
#Override
public void onTick(long millisUntilFinished) {
// every time 1 second passes
//tv.setText("" + millisUntilFinished/1000);
/* image_animation.setImageResource(imageArray[i]);
i++;
if (i > imageArray.length - 1) {
i = 0;
}*/
}
#Override
public void onFinish() {
// count finished
//tv.setText("Picture Taken");
mCamera.takePicture(null, null, null, mPicture);
}
}.start();
}
}
//make picture and save to a folder
private static File getOutputMediaFile() {
//make a new file directory inside the "sdcard" folder
File mediaStorageDir = new File("/sdcard/", "JCG Camera");
//if this "JCGCamera folder does not exist
if (!mediaStorageDir.exists()) {
//if you cannot make this folder return
if (!mediaStorageDir.mkdirs()) {
return null;
}
}
//take the current timeStamp
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
//and make a media file:
mediaFile = new File(mediaStorageDir.getPath() + File.separator + "IMG_" + timeStamp + ".jpg");
return mediaFile;
}
private void releaseCamera() {
// stop and release camera
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
private void hideKeyboard() {
View view = getCurrentFocus();
if (view != null) {
((InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE)).
hideSoftInputFromWindow(view.getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
}
}
#Override
public void onBackPressed()
{
// code here to show dialog
super.onBackPressed(); // optional depending on your needs
}
#Override
protected void onDestroy() {
if (mCamera != null) {
mCamera.release();
}
super.onDestroy();
}
#Override
public void onStop() {
if (mCamera != null) {
mCamera.release();
}
super.onStop();
}
}
What is the result I am getting right now and what I want that is below. Please see during photo capture hand was in the right side but when photo preview is displaying it is showing in the left hand side.
So, what can be done to resolve the issue?
Unfortunately we can not disable mirror preview but we can use TextureView and apply SetTransform to reverse Camera Preview, this work only with API >= 14 . so try below code in your PhotoCaputeActivity
mCamera.setDisplayOrientation(90);
Matrix matrix = new Matrix();
matrix.setScale(-1, 1);
matrix.postTranslate(width, 0);
mTextureView.setTransform(matrix);
for more info check this link.
Also keep in mind
This is used with Front camera and when trying with Back camera make mTextureView.setTransform(null);
At last I got solution by adding following attributes in the imageview and worked like a charm :)
android:scaleX="-1"
in OpenCV JavaCameraView, it is very easy
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
imageMat = inputFrame.rgba().t();
if (//horizontal reverse)
Core.flip(imageMat, imageMat, 1);
else //for vertical reverse
Core.flip(imageMat, imageMat, -1);
I have a project that captures an image using OpenCV's CamerabridgeViewBase and another project that detects a pingpong ball. The problem is I can't combine those two projects. What I want is to capture a live feed, then return a single image that already has a detection in it.
Here are the classes of the project that can capture an image and store it in the gallery:
public final class MainActivity extends ActionBarActivity
implements CvCameraViewListener2 {
// A tag for log output.
private static final String TAG =
MainActivity.class.getSimpleName();
// A key for storing the index of the active camera.
private static final String STATE_CAMERA_INDEX = "cameraIndex";
// A key for storing the index of the active image size.
private static final String STATE_IMAGE_SIZE_INDEX =
"imageSizeIndex";
// An ID for items in the image size submenu.
private static final int MENU_GROUP_ID_SIZE = 2;
// The index of the active camera.
private int mCameraIndex;
// The index of the active image size.
private int mImageSizeIndex;
// Whether the active camera is front-facing.
// If so, the camera view should be mirrored.
private boolean mIsCameraFrontFacing;
// The number of cameras on the device.
private int mNumCameras;
// The image sizes supported by the active camera.
private List<Size> mSupportedImageSizes;
// The camera view.
private CameraBridgeViewBase mCameraView;
// Whether the next camera frame should be saved as a photo.
private boolean mIsPhotoPending;
// A matrix that is used when saving photos.
private Mat mBgr;
// Whether an asynchronous menu action is in progress.
// If so, menu interaction should be disabled.
private boolean mIsMenuLocked;
// The OpenCV loader callback.
private BaseLoaderCallback mLoaderCallback =
new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(final int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.d(TAG, "OpenCV loaded successfully");
mCameraView.enableView();
//mCameraView.enableFpsMeter();
mBgr = new Mat();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks.
#SuppressLint("NewApi")
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Window window = getWindow();
window.addFlags(
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (savedInstanceState != null) {
mCameraIndex = savedInstanceState.getInt(
STATE_CAMERA_INDEX, 0);
mImageSizeIndex = savedInstanceState.getInt(
STATE_IMAGE_SIZE_INDEX, 0);
} else {
mCameraIndex = 0;
mImageSizeIndex = 0;
}
final Camera camera;
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.GINGERBREAD) {
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
mIsCameraFrontFacing =
(cameraInfo.facing ==
CameraInfo.CAMERA_FACING_FRONT);
mNumCameras = Camera.getNumberOfCameras();
camera = Camera.open(mCameraIndex);
} else { // pre-Gingerbread
// Assume there is only 1 camera and it is rear-facing.
mIsCameraFrontFacing = false;
mNumCameras = 1;
camera = Camera.open();
}
final Parameters parameters = camera.getParameters();
camera.release();
mSupportedImageSizes =
parameters.getSupportedPreviewSizes();
final Size size = mSupportedImageSizes.get(mImageSizeIndex);
mCameraView = new JavaCameraView(this, mCameraIndex);
mCameraView.setMaxFrameSize(size.width, size.height);
mCameraView.setCvCameraViewListener(this);
setContentView(mCameraView);
}
public void onSaveInstanceState(Bundle savedInstanceState) {
// Save the current camera index.
savedInstanceState.putInt(STATE_CAMERA_INDEX, mCameraIndex);
// Save the current image size index.
savedInstanceState.putInt(STATE_IMAGE_SIZE_INDEX,
mImageSizeIndex);
super.onSaveInstanceState(savedInstanceState);
}
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks.
#SuppressLint("NewApi")
#Override
public void recreate() {
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.HONEYCOMB) {
super.recreate();
} else {
finish();
startActivity(getIntent());
}
}
#Override
public void onPause() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onPause();
}
#Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0,
this, mLoaderCallback);
mIsMenuLocked = false;
}
#Override
public void onDestroy() {
if (mCameraView != null) {
mCameraView.disableView();
}
super.onDestroy();
}
#Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
if (mNumCameras < 2) {
// Remove the option to switch cameras, since there is
// only 1.
menu.removeItem(R.id.menu_next_camera);
}
int numSupportedImageSizes = mSupportedImageSizes.size();
if (numSupportedImageSizes > 1) {
final SubMenu sizeSubMenu = menu.addSubMenu(
R.string.menu_image_size);
for (int i = 0; i < numSupportedImageSizes; i++) {
final Size size = mSupportedImageSizes.get(i);
sizeSubMenu.add(MENU_GROUP_ID_SIZE, i, Menu.NONE,
String.format("%dx%d", size.width,
size.height));
}
}
return true;
}
// Suppress backward incompatibility errors because we provide
// backward-compatible fallbacks (for recreate).
#SuppressLint("NewApi")
#Override
public boolean onOptionsItemSelected(final MenuItem item) {
if (mIsMenuLocked) {
return true;
}
if (item.getGroupId() == MENU_GROUP_ID_SIZE) {
mImageSizeIndex = item.getItemId();
recreate();
return true;
}
switch (item.getItemId()) {
case R.id.menu_next_camera:
mIsMenuLocked = true;
// With another camera index, recreate the activity.
mCameraIndex++;
if (mCameraIndex == mNumCameras) {
mCameraIndex = 0;
}
mImageSizeIndex = 0;
recreate();
return true;
case R.id.menu_take_photo:
mIsMenuLocked = true;
// Next frame, take the photo.
mIsPhotoPending = true;
return true;
default:
return super.onOptionsItemSelected(item);
}
}
#Override
public void onCameraViewStarted(final int width,
final int height) {
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(final CvCameraViewFrame inputFrame) {
final Mat rgba = inputFrame.rgba();
if (mIsPhotoPending) {
mIsPhotoPending = false;
takePhoto(rgba);
}
if (mIsCameraFrontFacing) {
// Mirror (horizontally flip) the preview.
Core.flip(rgba, rgba, 1);
}
return rgba;
}
private void takePhoto(final Mat rgba) {
// Determine the path and metadata for the photo.
final long currentTimeMillis = System.currentTimeMillis();
final String appName = getString(R.string.app_name);
final String galleryPath =
Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).toString();
final String albumPath = galleryPath + File.separator +
appName;
final String photoPath = albumPath + File.separator +
currentTimeMillis + LabActivity.PHOTO_FILE_EXTENSION;
final ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, photoPath);
values.put(Images.Media.MIME_TYPE,
LabActivity.PHOTO_MIME_TYPE);
values.put(Images.Media.TITLE, appName);
values.put(Images.Media.DESCRIPTION, appName);
values.put(Images.Media.DATE_TAKEN, currentTimeMillis);
// Ensure that the album directory exists.
File album = new File(albumPath);
if (!album.isDirectory() && !album.mkdirs()) {
Log.e(TAG, "Failed to create album directory at " +
albumPath);
onTakePhotoFailed();
return;
}
// Try to create the photo.
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
if (!Imgcodecs.imwrite(photoPath, mBgr)) {
Log.e(TAG, "Failed to save photo to " + photoPath);
onTakePhotoFailed();
}
Log.d(TAG, "Photo saved successfully to " + photoPath);
// Try to insert the photo into the MediaStore.
Uri uri;
try {
uri = getContentResolver().insert(
Images.Media.EXTERNAL_CONTENT_URI, values);
} catch (final Exception e) {
Log.e(TAG, "Failed to insert photo into MediaStore");
e.printStackTrace();
// Since the insertion failed, delete the photo.
File photo = new File(photoPath);
if (!photo.delete()) {
Log.e(TAG, "Failed to delete non-inserted photo");
}
onTakePhotoFailed();
return;
}
// Open the photo in LabActivity.
final Intent intent = new Intent(this, LabActivity.class);
intent.putExtra(LabActivity.EXTRA_PHOTO_URI, uri);
intent.putExtra(LabActivity.EXTRA_PHOTO_DATA_PATH,
photoPath);
runOnUiThread(new Runnable() {
#Override
public void run() {
startActivity(intent);
}
});
}
private void onTakePhotoFailed() {
mIsMenuLocked = false;
// Show an error message.
final String errorMessage =
getString(R.string.photo_error_message);
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(MainActivity.this, errorMessage,
Toast.LENGTH_SHORT).show();
}
});
}
--
public final class LabActivity extends ActionBarActivity {
public static final String PHOTO_FILE_EXTENSION = ".png";
public static final String PHOTO_MIME_TYPE = "image/png";
public static final String EXTRA_PHOTO_URI =
"com.nummist.secondsight.LabActivity.extra.PHOTO_URI";
public static final String EXTRA_PHOTO_DATA_PATH =
"com.nummist.secondsight.LabActivity.extra.PHOTO_DATA_PATH";
private Uri mUri;
private String mDataPath;
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Intent intent = getIntent();
mUri = intent.getParcelableExtra(EXTRA_PHOTO_URI);
mDataPath = intent.getStringExtra(EXTRA_PHOTO_DATA_PATH);
final ImageView imageView = new ImageView(this);
imageView.setImageURI(mUri);
setContentView(imageView);
}
#Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.activity_lab, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_delete:
deletePhoto();
return true;
case R.id.menu_edit:
editPhoto();
return true;
case R.id.menu_share:
sharePhoto();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/*
* Show a confirmation dialog. On confirmation ("Delete"), the
* photo is deleted and the activity finishes.
*/
private void deletePhoto() {
final AlertDialog.Builder alert = new AlertDialog.Builder(
LabActivity.this);
alert.setTitle(R.string.photo_delete_prompt_title);
alert.setMessage(R.string.photo_delete_prompt_message);
alert.setCancelable(false);
alert.setPositiveButton(R.string.delete,
new DialogInterface.OnClickListener() {
#Override
public void onClick(final DialogInterface dialog,
final int which) {
getContentResolver().delete(
Images.Media.EXTERNAL_CONTENT_URI,
MediaStore.MediaColumns.DATA + "=?",
new String[] { mDataPath });
finish();
}
});
alert.setNegativeButton(android.R.string.cancel, null);
alert.show();
}
/*
* Show a chooser so that the user may pick an app for editing
* the photo.
*/
private void editPhoto() {
final Intent intent = new Intent(Intent.ACTION_EDIT);
intent.setDataAndType(mUri, PHOTO_MIME_TYPE);
startActivity(Intent.createChooser(intent,
getString(R.string.photo_edit_chooser_title)));
}
/*
* Show a chooser so that the user may pick an app for sending
* the photo.
*/
private void sharePhoto() {
final Intent intent = new Intent(Intent.ACTION_SEND);
intent.setType(PHOTO_MIME_TYPE);
intent.putExtra(Intent.EXTRA_STREAM, mUri);
intent.putExtra(Intent.EXTRA_SUBJECT,
getString(R.string.photo_send_extra_subject));
intent.putExtra(Intent.EXTRA_TEXT,
getString(R.string.photo_send_extra_text));
startActivity(Intent.createChooser(intent,
getString(R.string.photo_send_chooser_title)));
}
}
And here are the classes of the pingpong ball detection project:
public class MainActivity extends AppCompatActivity {
private static final String TAG = "ObjTrackActivity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewTresholded;
public static boolean bShowTresholded = false;
public MainActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(new ObjTrackViewer(this));
}
public boolean onCreateOptionsMenu(Menu menu){
Log.i(TAG, "onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewTresholded = menu.add("Preview Thresholded");
return true;
}
public boolean onOptionsItemSelected(MenuItem item){
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA)
bShowTresholded = false;
else if (item == mItemPreviewTresholded)
bShowTresholded = true;
return true;
}
}
--
public class ObjTrackViewer extends SampleViewBase {
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
public ObjTrackViewer(Context context) {
super(context);
}
#Override
protected void onPreviewStared(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
#Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
#Override
protected Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;
CircleObjectTrack(getFrameWidth(), getFrameHeight(), data, rgba, MainActivity.bShowTresholded);
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return bmp;
}
public native void CircleObjectTrack(int width, int height, byte yuv[], int[] rgba, boolean debug);
static {
System.loadLibrary("objtrack_opencv_jni");
}
}
--
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
//if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
// mCamera.setPreviewTexture( new SurfaceTexture(10) );
//else
mCamera.setPreviewDisplay(null);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* #param previewWidth - the width of the preview frames that will be delivered via processFrame
* #param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
}
}
I am recording mp4 with the following code. But while I am recording,
1- is it possible get frames of video in real time in array Data or another format?
2- is it possible to modify frames? For example, to invert the colors and make it look negative.
public class MainActivity extends Activity implements OnClickListener, SurfaceHolder.Callback {
MediaRecorder recorder;
SurfaceHolder holder;
boolean recording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
recorder = new MediaRecorder();
initRecorder();
setContentView(R.layout.activity_main);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.surfaceView1);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraView.setClickable(true);
cameraView.setOnClickListener(this);
}
private void initRecorder() {
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile cpHigh = CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
recorder.setOutputFile(Environment.getExternalStorageDirectory().getPath() + "/video.mp4");
recorder.setMaxDuration(50 * 1000); // 50 seconds
recorder.setMaxFileSize(5 * 1000000); // Approximately 5 megabytes
}
private void prepareRecorder() {
recorder.setPreviewDisplay(holder.getSurface());
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
public void onClick(View v) {
if (recording) {
recorder.stop();
recording = false;
// Let's initRecorder so we can record again
initRecorder();
prepareRecorder();
} else {
recording = true;
recorder.start();
}
}
public void surfaceCreated(SurfaceHolder holder) {
prepareRecorder();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (recording) {
recorder.stop();
recording = false;
}
recorder.release();
finish();
}
}
I used JavaCV library (which wraps FFMPEG and OpenCV) for editing Frames. Capture frames on onPreviewFrame and record them through FFMPEG recorder.
Following is the RecordActivity.java sample.
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
Log.v(LOG_TAG,"Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
Please check INDE Media Pack, it allows to add recording with effect applying to your app, it has several sample effects like Grayscale, Sepia, Text Overlay etc, you can easily extend them: https://software.intel.com/en-us/articles/intel-inde-media-pack-for-android-tutorials
All effects are OpenGL based, so do not affect performance and let to have a good FPS in final video
Hi I am asking this question with reference to the code at https://github.com/google/grafika .
I am trying to re-initialize my encoder during orientation change in order to adjust the aspect ratio of encoded frames..
UPDATE:
Here is my GLSurfaceView.Renderer class
class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final boolean ROSE_COLORED_GLASSES = false; // experiment
private MainActivity.CameraHandler mCameraHandler;
private TextureMovieEncoder mVideoEncoder;
private File mOutputFile;
TextView tv;
private TextureRender mTextureRender;
private SurfaceTexture mSurfaceTexture;
private boolean mRecordingEnabled;
Context cntex;
public CameraSurfaceRenderer(MainActivity.CameraHandler cameraHandler, TextureMovieEncoder movieEncoder, File outputFile, TextView tx, Context c) {
mOutputFile = outputFile;
mVideoEncoder = movieEncoder;
mRecordingEnabled = false;
mCameraHandler = cameraHandler;
tv = tx;
cntex = c;
}
public void notifyPausing() {
if (mSurfaceTexture != null) {
Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
mSurfaceTexture.release();
mSurfaceTexture = null;
}
}
public void changeRecordingState(boolean isRecording) {
Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
mRecordingEnabled = isRecording;
}
#Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
System.out.println("onSurfaceCreated start");
Log.d(TAG, "onSurfaceCreated");
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
// assign value to gl_FragColor.g and .b as well to get simple B&W
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// Tell the UI thread to enable the camera preview.
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
}
mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
System.out.println("onSurfaceCreated end");
}
boolean is_stpd = false;
int count = 0;
#Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
System.out.println("onSurfaceChanged start");
Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
GLES20.glViewport(0, 0, width, height);
mCameraHandler.sendEmptyMessage(2); // to display toast message of surface change.
int orientation = cntex.getResources().getConfiguration().orientation;
// System.out.println("onSurfaceChanged before reinit");
mVideoEncoder.encoder_reinitialisation(EGL14.eglGetCurrentContext(), orientation);
// System.out.println("onSurfaceChanged after reinit");
System.out.println("onSurfaceChanged end");
}
int _frm_cnt = 0;
double _strt_tm = 0;
#Override
public void onDrawFrame(GL10 unused) {
System.out.println("onDrawFrame start");
if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureRender.getTextureId());
++_frm_cnt;
if(_frm_cnt == 1) {
_strt_tm = System.currentTimeMillis();
}
if((System.currentTimeMillis() - _strt_tm) >= 1000) {
System.out.println("fps = " + _frm_cnt );
//tx.setText("fps = " + Integer.toString(frm_cnt));
_frm_cnt = 0;
}
mSurfaceTexture.updateTexImage();
//
mVideoEncoder.setTextureId(mTextureRender.getTextureId());
//
if(mVideoEncoder.is_prepared == true) {
// // This will be ignored if we're not actually recording.
mVideoEncoder.frameAvailable(mSurfaceTexture);
}
// Draw the video frame.
mTextureRender.drawFrame(mSurfaceTexture);
System.out.println("onDrawFrame end");
}
}
Here is my TextureMovieEncoder class
public class TextureMovieEncoder implements Runnable {
private static final String TAG = MainActivity.TAG;
private static final boolean VERBOSE = false;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 5; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
private static final int MSG_QUIT = 5;
private static final int MSG_REINIT = 6;
// ----- accessed by encoder thread -----
private EglCore mEglBase;
private WindowSurface mInputWindowSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
private TextureRender mTextureRender;
private int mTextureId;
private int mFrameNum;
File enc_file = new File(Environment.getExternalStorageDirectory().getPath() + "/encoded_preview.webm");
FileOutputStream fp_enc = null;
PrintWriter enc_len = null;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private boolean mReady;
public boolean mRunning;
public boolean is_prepared = false;
public TextureMovieEncoder(Context cntxt) {
context = cntxt;
}
public static class EncoderConfig {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext) {
System.out.println("EncoderConfig start");
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
System.out.println("EncoderConfig end");
}
#Override
public String toString() {
return "EncoderConfig: " + mWidth + "x" + mHeight + " #" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
public void startRecording(EncoderConfig config) {
System.out.println("startRecording start");
Log.d(TAG, "Encoder: startRecording()");
synchronized (mReadyFence) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
System.out.println("startRecording end");
}
int orientation_local = -1;
public void encoder_reinitialisation(EGLContext eglContext, int orientation) {
System.out.println("encoder_reinitialisation start");
is_prepared = false;
System.out.println("encoder_reinitialisation before message oriebta = " + orientation);
mHandler.sendMessage(mHandler.obtainMessage(MSG_REINIT, eglContext));
System.out.println("encoder_reinitialisation after message");
orientation_local = orientation;
System.out.println("encoder_reinitialisation end");
}
public void stopRecording() {
System.out.println("stopRecording start");
if(mHandler != null) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
}
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
System.out.println("stopRecording end");
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext sharedContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
}
public void frameAvailable(SurfaceTexture st) {
System.out.println("frameAvailable start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
System.out.println("frameAvailable end");
}
public void setTextureId(int id) {
System.out.println("setTextureId start");
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
System.out.println("setTextureId end");
}
#Override
public void run() {
System.out.println("run start");
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
System.out.println("run end");
}
}
/**
* Handles encoder state change requests.
*/
private static class EncoderHandler extends Handler {
private WeakReference<TextureMovieEncoder> mWeakEncoder;
public EncoderHandler(TextureMovieEncoder encoder) {
mWeakEncoder = new WeakReference<TextureMovieEncoder>(encoder);
}
#Override
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
TextureMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_REINIT:
encoder.encoder_reinit((EGLContext) inputMessage.obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
encoder.handleSetTexture(inputMessage.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Start recording.
*/
private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
mFrameNum = 0;
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
}
private void encoder_reinit(EGLContext obj) {
System.out.println("encoder_reinit start ");
drainEncoder(true);
releaseEncoder();
prepareEncoder(obj, video_width, video_height, 1400000,
null);
System.out.println("encoder_reinit end ");
}
private void handleFrameAvailable(float[] transform, long timestamp) {
System.out.println("handleFrameAvailable start");
if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
if(is_prepared == true) {
drainEncoder(false);
mTextureRender.setTextureId(mTextureId);
mTextureRender.drawFrame(transform);
mInputWindowSurface.setPresentationTime(timestamp);
mInputWindowSurface.swapBuffers();
}
System.out.println("handleFrameAvailable end");
}
private void handleStopRecording() {
Log.d(TAG, "handleStopRecording");
drainEncoder(true);
releaseEncoder();
}
private void handleSetTexture(int id) {
//Log.d(TAG, "handleSetTexture " + id);
mTextureId = id;
}
/**
* Tears down the EGL surface and context we've been using to feed the MediaCodec input
* surface, and replaces it with a new one that shares with the new context.
*/
private void handleUpdateSharedContext(EGLContext newSharedContext) {
System.out.println("handleUpdateSharedContext start");
Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
// Release the EGLSurface and EGLContext.
if(mInputWindowSurface != null) {
mInputWindowSurface.releaseEglSurface();
mEglBase.release();
}
// Create a new EGLContext and recreate the window surface.
mEglBase = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface.recreate(mEglBase);
mInputWindowSurface.makeCurrent();
// Create new programs and such for the new context.
mTextureRender.surfaceCreated();
System.out.println("handleUpdateSharedContext end");
}
boolean created =false;
EGLContext sharedContext_local;
private Context context;
private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
System.out.println("prepareEncoder start width = " + width + "height = " + height);
sharedContext_local = sharedContext;
enc_strm = new byte[width * height * 3 / 2];
encoded_data_buffer = new ofi_vc_buffer();
video_width = width;
video_height = height;
if(!created) {
try {
fp_enc = new FileOutputStream(enc_file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
enc_len = new PrintWriter(Environment.getExternalStorageDirectory().getPath() + "/encoded_len.xls");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
created = true;
}
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = null;
if(orientation_local == 1) {
System.out.println("videoformatting portrait");
format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
} else {
System.out.println("videoformatting landscape");
format = MediaFormat.createVideoFormat(MIME_TYPE, height, width);
}
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEglBase = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglBase, mEncoder.createInputSurface());
mEncoder.start();
mInputWindowSurface.makeCurrent();
mTextureRender = new TextureRender(context);
mTextureRender.surfaceCreated();
mTrackIndex = -1;
mMuxerStarted = false;
is_prepared = true;
System.out.println("prepareEncoder end");
}
/**
* Releases encoder resources.
*/
private void releaseEncoder() {
System.out.println("releaseEncoder end");
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mEglBase != null) {
mEglBase.release();
mEglBase = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
System.out.println("releaseEncoder start");
}
byte[] enc_strm = null;
byte sps_pps_nal[] = null;
int sps_pps_nal_size = 0;
ofi_vc_buffer encoded_data_buffer = null;
private int encod_len = 0;
private int frame_type;
private encoded_stream_info enc_buffer_global;
private int video_width;
private int video_height;
private void drainEncoder(boolean endOfStream) {
System.out.println("drainEncoder start");
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
//encoded_stream_info enc_buffer = new encoded_stream_info(video_width * video_height * 3 / 2);
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
// mTrackIndex = mMuxer.addTrack(newFormat);
// mMuxer.start();
// mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d("Main", "ignoring BUFFER_FLAG_CODEC_CONFIG");
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
System.out.println("Encode SPS PPS buffer size" + mBufferInfo.size );
sps_pps_nal_size = mBufferInfo.size;
sps_pps_nal = new byte[sps_pps_nal_size];
encodedData.get(sps_pps_nal, 0, sps_pps_nal_size);
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// if (!mMuxerStarted) {
// throw new RuntimeException("muxer hasn't started");
// }
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
encodedData.get(enc_strm, sps_pps_nal_size, mBufferInfo.size);
System.arraycopy(sps_pps_nal, 0, enc_strm, 0, sps_pps_nal_size);
encod_len = mBufferInfo.size + sps_pps_nal_size;
if ((enc_strm[sps_pps_nal_size + 4] & 0x1F)== 5) {
frame_type = 2;
} else {
frame_type = 0;
}
//mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d("Main", "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
enc_buffer_global.encoded_len = 0; // frame sending is disabled.
if(enc_buffer_global.encoded_len == 0) {
enc_buffer_global.encoded_data = enc_strm;
enc_buffer_global.encoded_len = encod_len;
enc_buffer_global.frame_type = frame_type;
// System.out.println("encoded Wrote stream len =" + enc_buffer_global.encoded_len);
try {
fp_enc.write(enc_strm, 0, encod_len);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
fp_enc.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
enc_len.format("%d\n", encod_len);
enc_len.flush();
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
System.out.println("drainEncoder end");
}
public void set_buffer(encoded_stream_info bufer) {
enc_buffer_global = bufer;
}
}
Other than these I have added
EglCore.java
EglSurfaceBase.java
TextureRender.java
WindowSurface.java
classes from the grafika link given above.
Still I am not able to figure out the reason for my app getting freezed up during re-initialization. But sometimes it works also.
Can anyone help....
Else what are the steps I should do to change the encoding resolution.
Thanks in advance...........
I got a working one with the following code....
public void onConfigurationChanged(Configuration newConfig) {
System.out.println("On config change start ");
super.onConfigurationChanged(newConfig);
mGLView.queueEvent(new Runnable() {
#Override public void run() {
// Tell the renderer that it's about to be paused so it can clean up.
mRenderer.notifyPausing();
}
});
mGLView.queueEvent(new Runnable() {
#Override public void run() {
mRenderer.re_init();
}
});
}
where re-init function is.
public void re_init() {
mTextureRender = new TextureRender(cntex);
mTextureRender.surfaceCreated();
if (ROSE_COLORED_GLASSES) {
String rosyFragment =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
" gl_FragColor.r = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
"}\n";
mTextureRender.changeFragmentShader(rosyFragment);
}
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(
MainActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
if (cntex.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 640, 480, 1400000, EGL14.eglGetCurrentContext()));
} else {
mVideoEncoder.startRecording(new TextureMovieEncoder.EncoderConfig(
mOutputFile, 480, 640, 1400000, EGL14.eglGetCurrentContext()));
}
//mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
}
and in mRenderer.notifyPausing() I have added videoEncoder.stopRecording() call also which will wait untill the whole recorder is stopped (did an object based synchronisation there).
But the whole re-initialisation takes 250 - 400 ms................