Surface View Preview is stretched vertically - android

I am working on Live Streaming application using RTMP. I am in the initial state, I successful done lot of things but now stuck on one place. I am using Surface view and my video is little bit stretched vertically. Codes are below:
This is my SrcCameraView
public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Renderer {
private GPUImageFilter magicFilter;
private SurfaceTexture surfaceTexture;
private int mOESTextureId = OpenGLUtils.NO_TEXTURE;
private int mSurfaceWidth;
private int mSurfaceHeight;
private int mPreviewWidth;
private int mPreviewHeight;
private boolean mIsEncoding;
private boolean mIsTorchOn = false;
private float mInputAspectRatio;
private float mOutputAspectRatio;
private float[] mProjectionMatrix = new float[16];
private float[] mSurfaceMatrix = new float[16];
private float[] mTransformMatrix = new float[16];
private Camera mCamera;
private ByteBuffer mGLPreviewBuffer;
private final static int PIXEL_FORMAT = ImageFormat.NV21;
private int mCamId = -1;
private int mPreviewRotation = 90;
private int mPreviewOrientation = Configuration.ORIENTATION_PORTRAIT;
private Thread worker;
private final Object writeLock = new Object();
private ConcurrentLinkedQueue<IntBuffer> mGLIntBufferCache = new
ConcurrentLinkedQueue<>();
private PreviewCallback mPrevCb;
public SrsCameraView(Context context) {
this(context, null);
}
public SrsCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
surfaceTexture = new SurfaceTexture(mOESTextureId);
surfaceTexture.setOnFrameAvailableListener(new
SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
// For camera preview on activity creation
if (mCamera != null) {
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
mSurfaceWidth = width;
mSurfaceHeight = height;
magicFilter.onDisplaySizeChanged(width, height);
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
if (width > height) {
Matrix.orthoM(mProjectionMatrix, 0, -1.0f, 1.0f, -aspectRatio,
aspectRatio, -1.0f, 1.0f);
} else {
Matrix.orthoM(mProjectionMatrix, 0, -aspectRatio, aspectRatio, -1.0f, 1.0f, -1.0f, 1.0f);
}
}
#Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0,
mProjectionMatrix, 0);
magicFilter.setTextureTransformMatrix(mTransformMatrix);
magicFilter.onDrawFrame(mOESTextureId);
if (mIsEncoding) {
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
public void setPreviewCallback(PreviewCallback cb) {
mPrevCb = cb;
}
public int[] setPreviewResolution(int width, int height) {
getHolder().setFixedSize(width, height);
mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width,
height));
if (rs != null) {
mPreviewWidth = rs.width;
mPreviewHeight = rs.height;
}
mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocateDirect(mPreviewWidth *
mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ?
(float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight /
mPreviewWidth;
return new int[] { mPreviewWidth, mPreviewHeight };
}
public boolean setFilter(final MagicFilterType type) {
if (mCamera == null) {
return false;
}
queueEvent(new Runnable() {
#Override
public void run() {
if (magicFilter != null) {
magicFilter.destroy();
}
magicFilter = MagicFilterFactory.initFilters(type);
if (magicFilter != null) {
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth,
mPreviewHeight);
magicFilter.onDisplaySizeChanged(mSurfaceWidth,
mSurfaceHeight);
}
}
});
requestRender();
return true;
}
private void deleteTextures() {
if (mOESTextureId != OpenGLUtils.NO_TEXTURE) {
queueEvent(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{ mOESTextureId }, 0);
mOESTextureId = OpenGLUtils.NO_TEXTURE;
}
});
}
}
public void setCameraId(int id) {
mCamId = id;
setPreviewOrientation(mPreviewOrientation);
}
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCamId, info);
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = info.orientation % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 360) % 360;
}
} else if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = (info.orientation + 90) % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 270) % 360;
}
}
}
public int getCameraId() {
return mCamId;
}
public void enableEncoding() {
worker = new Thread(new Runnable() {
#Override
public void run() {
while (!Thread.interrupted()) {
while (!mGLIntBufferCache.isEmpty()) {
IntBuffer picture = mGLIntBufferCache.poll();
mGLPreviewBuffer.asIntBuffer().put(picture.array());
mPrevCb.onGetRgbaFrame(mGLPreviewBuffer.array(),
mPreviewWidth, mPreviewHeight);
}
// Waiting for next frame
synchronized (writeLock) {
try {
// isEmpty() may take some time, so we set timeout to detect next frame
writeLock.wait(500);
} catch (InterruptedException ie) {
worker.interrupt();
}
}
}
}
});
worker.start();
mIsEncoding = true;
}
public void disableEncoding() {
mIsEncoding = false;
mGLIntBufferCache.clear();
if (worker != null) {
worker.interrupt();
try {
worker.join();
} catch (InterruptedException e) {
e.printStackTrace();
worker.interrupt();
}
worker = null;
}
}
public boolean startCamera() {
if (mCamera == null) {
mCamera = openCamera();
if (mCamera == null) {
return false;
}
}
Camera.Parameters params = mCamera.getParameters();
params.setPictureSize(mPreviewWidth, mPreviewHeight);
params.setPreviewSize(mPreviewWidth, mPreviewHeight);
int[] range = adaptFpsRange(SrsEncoder.VFPS,
params.getSupportedPreviewFpsRange());
params.setPreviewFpsRange(range[0], range[1]);
params.setPreviewFormat(ImageFormat.NV21);
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && !supportedFocusModes.isEmpty()) {
if
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if
(supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.autoFocus(null);
} else {
params.setFocusMode(supportedFocusModes.get(0));
}
}
List<String> supportedFlashModes = params.getSupportedFlashModes();
if (supportedFlashModes != null && !supportedFlashModes.isEmpty()) {
if
(supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
if (mIsTorchOn) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
} else {
params.setFlashMode(supportedFlashModes.get(0));
}
}
mCamera.setParameters(params);
mCamera.setDisplayOrientation(mPreviewRotation);
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return true;
}
public void stopCamera() {
disableEncoding();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
private Camera openCamera() {
Camera camera;
if (mCamId < 0) {
Camera.CameraInfo info = new Camera.CameraInfo();
int numCameras = Camera.getNumberOfCameras();
int frontCamId = -1;
int backCamId = -1;
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
backCamId = i;
} else if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
frontCamId = i;
break;
}
}
if (frontCamId != -1) {
mCamId = frontCamId;
} else if (backCamId != -1) {
mCamId = backCamId;
} else {
mCamId = 0;
}
}
camera = Camera.open(mCamId);
return camera;
}
private Camera.Size adaptPreviewResolution(Camera.Size resolution) {
float diff = 100f;
float xdy = (float) resolution.width / (float) resolution.height;
Camera.Size best = null;
for (Camera.Size size :
mCamera.getParameters().getSupportedPreviewSizes()) {
if (size.equals(resolution)) {
return size;
}
float tmp = Math.abs(((float) size.width / (float) size.height) -
xdy);
if (tmp < diff) {
diff = tmp;
best = size;
}
}
return best;
}
private int[] adaptFpsRange(int expectedFps, List<int[]> fpsRanges) {
expectedFps *= 1000;
int[] closestRange = fpsRanges.get(0);
int measure = Math.abs(closestRange[0] - expectedFps) +
Math.abs(closestRange[1] - expectedFps);
for (int[] range : fpsRanges) {
if (range[0] <= expectedFps && range[1] >= expectedFps) {
int curMeasure = Math.abs(range[0] - expectedFps) +
Math.abs(range[1] - expectedFps);
if (curMeasure < measure) {
closestRange = range;
measure = curMeasure;
}
}
}
return closestRange;
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);
}
}
This is My Main Activity:
public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpListener,
SrsRecordHandler.SrsRecordListener, SrsEncodeHandler.SrsEncodeListener {
private static final String TAG = "Yasea";
Button btnPublish = null;
Button btnSwitchCamera = null;
Button btnRecord = null;
Button btnSwitchEncoder = null;
private SharedPreferences sp;
private String rtmpUrl = "rtmp://00.00.000.00/live/" + getRandomAlphaString(3) + '/' + getRandomAlphaDigitString(5);
private String recPath = Environment.getExternalStorageDirectory().getPath() + "/test.mp4";
private SrsPublisher mPublisher;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
// response screen rotation event
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
// restore data.
sp = getSharedPreferences("Yasea", MODE_PRIVATE);
rtmpUrl = sp.getString("rtmpUrl", rtmpUrl);
// initialize url.
final EditText efu = (EditText) findViewById(R.id.url);
efu.setText(rtmpUrl);
btnPublish = (Button) findViewById(R.id.publish);
btnSwitchCamera = (Button) findViewById(R.id.swCam);
btnRecord = (Button) findViewById(R.id.record);
btnSwitchEncoder = (Button) findViewById(R.id.swEnc);
mPublisher = new SrsPublisher((SrsCameraView) findViewById(R.id.glsurfaceview_camera));
mPublisher.setEncodeHandler(new SrsEncodeHandler(this));
mPublisher.setRtmpHandler(new RtmpHandler(this));
mPublisher.setRecordHandler(new SrsRecordHandler(this));
mPublisher.setPreviewResolution(640, 480);
mPublisher.setOutputResolution(720, 1280);
mPublisher.setVideoHDMode();
mPublisher.startCamera();
btnPublish.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnPublish.getText().toString().contentEquals("publish")) {
rtmpUrl = efu.getText().toString();
SharedPreferences.Editor editor = sp.edit();
editor.putString("rtmpUrl", rtmpUrl);
editor.apply();
mPublisher.startPublish(rtmpUrl);
mPublisher.startCamera();
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
Toast.makeText(getApplicationContext(), "Use hard encoder", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(getApplicationContext(), "Use soft encoder", Toast.LENGTH_SHORT).show();
}
btnPublish.setText("stop");
btnSwitchEncoder.setEnabled(false);
} else if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
}
}
});
btnSwitchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mPublisher.switchCameraFace((mPublisher.getCamraId() + 1) % Camera.getNumberOfCameras());
}
});
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnRecord.getText().toString().contentEquals("record")) {
if (mPublisher.startRecord(recPath)) {
btnRecord.setText("pause");
}
} else if (btnRecord.getText().toString().contentEquals("pause")) {
mPublisher.pauseRecord();
btnRecord.setText("resume");
} else if (btnRecord.getText().toString().contentEquals("resume")) {
mPublisher.resumeRecord();
btnRecord.setText("pause");
}
}
});
btnSwitchEncoder.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
mPublisher.switchToSoftEncoder();
btnSwitchEncoder.setText("hard encoder");
} else if (btnSwitchEncoder.getText().toString().contentEquals("hard encoder")) {
mPublisher.switchToHardEncoder();
btnSwitchEncoder.setText("soft encoder");
}
}
});
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
} else {
switch (id) {
case R.id.cool_filter:
mPublisher.switchCameraFilter(MagicFilterType.COOL);
break;
case R.id.beauty_filter:
mPublisher.switchCameraFilter(MagicFilterType.BEAUTY);
break;
case R.id.early_bird_filter:
mPublisher.switchCameraFilter(MagicFilterType.EARLYBIRD);
break;
case R.id.evergreen_filter:
mPublisher.switchCameraFilter(MagicFilterType.EVERGREEN);
break;
case R.id.n1977_filter:
mPublisher.switchCameraFilter(MagicFilterType.N1977);
break;
case R.id.nostalgia_filter:
mPublisher.switchCameraFilter(MagicFilterType.NOSTALGIA);
break;
case R.id.romance_filter:
mPublisher.switchCameraFilter(MagicFilterType.ROMANCE);
break;
case R.id.sunrise_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNRISE);
break;
case R.id.sunset_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNSET);
break;
case R.id.tender_filter:
mPublisher.switchCameraFilter(MagicFilterType.TENDER);
break;
case R.id.toast_filter:
mPublisher.switchCameraFilter(MagicFilterType.TOASTER2);
break;
case R.id.valencia_filter:
mPublisher.switchCameraFilter(MagicFilterType.VALENCIA);
break;
case R.id.walden_filter:
mPublisher.switchCameraFilter(MagicFilterType.WALDEN);
break;
case R.id.warm_filter:
mPublisher.switchCameraFilter(MagicFilterType.WARM);
break;
case R.id.original_filter:
default:
mPublisher.switchCameraFilter(MagicFilterType.NONE);
break;
}
}
setTitle(item.getTitle());
return super.onOptionsItemSelected(item);
}
#Override
protected void onResume() {
super.onResume();
final Button btn = (Button) findViewById(R.id.publish);
btn.setEnabled(true);
mPublisher.resumeRecord();
}
#Override
protected void onPause() {
super.onPause();
mPublisher.pauseRecord();
}
#Override
protected void onDestroy() {
super.onDestroy();
mPublisher.stopPublish();
mPublisher.stopRecord();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mPublisher.stopEncode();
mPublisher.stopRecord();
btnRecord.setText("record");
mPublisher.setScreenOrientation(newConfig.orientation);
if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.startEncode();
}
mPublisher.startCamera();
}
private static String getRandomAlphaString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private static String getRandomAlphaDigitString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz0123456789";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private void handleException(Exception e) {
try {
Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show();
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
} catch (Exception e1) {
//
}
}
// Implementation of SrsRtmpListener.
#Override
public void onRtmpConnecting(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpConnected(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoStreaming() {
}
#Override
public void onRtmpAudioStreaming() {
}
#Override
public void onRtmpStopped() {
Toast.makeText(getApplicationContext(), "Stopped", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpDisconnected() {
Toast.makeText(getApplicationContext(), "Disconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoFpsChanged(double fps) {
Log.i(TAG, String.format("Output Fps: %f", fps));
}
#Override
public void onRtmpVideoBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Video bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Video bitrate: %d bps", rate));
}
}
#Override
public void onRtmpAudioBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Audio bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Audio bitrate: %d bps", rate));
}
}
#Override
public void onRtmpSocketException(SocketException e) {
handleException(e);
}
#Override
public void onRtmpIOException(IOException e) {
handleException(e);
}
#Override
public void onRtmpIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
#Override
public void onRtmpIllegalStateException(IllegalStateException e) {
handleException(e);
}
// Implementation of SrsRecordHandler.
#Override
public void onRecordPause() {
Toast.makeText(getApplicationContext(), "Record paused", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordResume() {
Toast.makeText(getApplicationContext(), "Record resumed", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordStarted(String msg) {
Toast.makeText(getApplicationContext(), "Recording file: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordFinished(String msg) {
Toast.makeText(getApplicationContext(), "MP4 file saved: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordIOException(IOException e) {
handleException(e);
}
#Override
public void onRecordIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
// Implementation of SrsEncodeHandler.
#Override
public void onNetworkWeak() {
Toast.makeText(getApplicationContext(), "Network weak", Toast.LENGTH_SHORT).show();
}
#Override
public void onNetworkResume() {
Toast.makeText(getApplicationContext(), "Network resume", Toast.LENGTH_SHORT).show();
}
#Override
public void onEncodeIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
}

I am not sure that this is the issue (a screenshot could have helped) but there might be a precision issue here (or even more in other places in the code):
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
Both width & height are integers. The result of their division is also an integer and hence it losses precision. I suggest you cast them both to float before the division:
mOutputAspectRatio = width > height ? ((float) width) / ((float)height) : ((float)
height) / ((float) width);
And the same for the computation of mInputAspectRatio.

Related

Camera2 pinch to zoom

I'm struggling with trying to get pinch-to-zoom functionality working under Camera2; the API is not very helpful it has to be said and other libraries, like CameraKit don't do what I want to do.
I've looked at the various pieces of code on this and other websites and have come up with the following but I can't get anything working - not even a feed from the camera on the TextureView. (I'm not interested in saving the image from the camera, just to display and pinch to zoom). This isn't a problem with permissions, as I get asked whether I will allow camera access, its just that nothing appears.
The relevant section of the layout xml file is as follows:
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/frlayout"
android:layout_weight="0.80"
android:layout_width="match_parent"
android:layout_height="0dp">
<TextureView
android:id="#+id/cameraView"
android:background="#android:color/transparent"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:adjustViewBounds="true" />
</FrameLayout>
While the salient portions of the mainactivity are here:
public class MainActivity extends AppCompatActivity {
public float finger_spacing = 0;
public int zoom_level = 1;
String camerId;
CameraCharacteristics characteristics;
int PERMISSION_ALL = 1;
private Size previewsize;
private Size jpegSizes[]=null;
private TextureView textureView;
private CameraDevice cameraDevice;
private CaptureRequest.Builder previewBuilder;
private CameraCaptureSession previewSession;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, PERMISSION_ALL);
}
textureView=(TextureView)findViewById(R.id.cameraView);
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
public void openCamera()
{
CameraManager manager=(CameraManager)getSystemService(Context.CAMERA_SERVICE);
try
{
camerId=manager.getCameraIdList()[0];
characteristics=manager.getCameraCharacteristics(camerId);
StreamConfigurationMap map=characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
previewsize=map.getOutputSizes(SurfaceTexture.class)[0];
if(ActivityCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
manager.openCamera(camerId, stateCallback, null);
}
}catch (Exception e)
{
}
}
private TextureView.SurfaceTextureListener surfaceTextureListener=new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice.StateCallback stateCallback=new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
cameraDevice=camera;
startCamera();
// With previewBuilder set up, we can now call the fn to initialise ontouch
setuptexturetouch();
}
#Override
public void onDisconnected(CameraDevice camera) {
}
#Override
public void onError(CameraDevice camera, int error) {
}
};
#Override
protected void onPause() {
super.onPause();
if(cameraDevice!=null)
{
cameraDevice.close();
}
}
void startCamera()
{
if(cameraDevice==null||!textureView.isAvailable()|| previewsize==null)
{
return;
}
SurfaceTexture texture=textureView.getSurfaceTexture();
if(texture==null)
{
return;
}
texture.setDefaultBufferSize(previewsize.getWidth(),previewsize.getHeight());
Surface surface=new Surface(texture);
//mPreviewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
//mPreviewRequestBuilder.addTarget(surface);
try
{
previewBuilder=cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}catch (Exception e)
{
}
previewBuilder.addTarget(surface);
try
{
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
previewSession=session;
getChangedPreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
},null);
}catch (Exception e)
{
}
}
void getChangedPreview()
{
if(cameraDevice==null)
{
return;
}
previewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread=new HandlerThread("changed Preview");
thread.start();
Handler handler=new Handler(thread.getLooper());
try
{
previewSession.setRepeatingRequest(previewBuilder.build(), null, handler);
}catch (Exception e){}
}
void setuptexturetouch()
{
textureView.setOnTouchListener( new View.OnTouchListener()
{
#Override
public boolean onTouch( View v, MotionEvent event )
{
try {
Activity activity = MainActivity.this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(camerId);
}
catch(Exception e)
{
}
float maxzoom = (characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM))*10;
Rect m = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
int action = event.getAction();
float current_finger_spacing;
if (event.getPointerCount() > 1)
{
// Multi touch logic
current_finger_spacing = getFingerSpacing(event);
if(finger_spacing != 0){
if(current_finger_spacing > finger_spacing && maxzoom > zoom_level){
zoom_level++;
} else if (current_finger_spacing < finger_spacing && zoom_level > 1){
zoom_level--;
}
int minW = (int) (m.width() / maxzoom);
int minH = (int) (m.height() / maxzoom);
int difW = m.width() - minW;
int difH = m.height() - minH;
int cropW = difW /100 *(int)zoom_level;
int cropH = difH /100 *(int)zoom_level;
cropW -= cropW & 3;
cropH -= cropH & 3;
Rect zoom = new Rect(cropW, cropH, m.width() - cropW, m.height() - cropH);
previewBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
}
finger_spacing = current_finger_spacing;
}
else
{
if (action == MotionEvent.ACTION_UP)
{
//single touch logic
}
}
try
{
previewSession.setRepeatingRequest(previewBuilder.build(), mCaptureCallback, null);
}
catch (CameraAccessException e)
{
//e.printStackTrace();
}
catch (NullPointerException ex)
{
//ex.printStackTrace();
}
}
catch (Exception e)
{
//throw new RuntimeException("can not access camera.", e);
}
return true;
}
});
}
private float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback()
{
private void process(CaptureResult result)
{
// Just a placeholder at present - the original code
// had switch statements to see if an image had been saved
// but I only want to view, not save.
}
#Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request,
TotalCaptureResult result) {
process(result);
}
};
}

Incorrect decoding of H264 video stream on Nexus devices

I'm trying to use android MediaCodec class to decode h264 video stream of remote camera. My code is:
public class RemoteCamera {
public interface OnCameraListener {
void onConnected();
void onFailureConnection();
void onDisconnected();
void onReady();
}
private static final int MAX_NAL_LEN = 1024 * 1024;
private static final String TAG = "RemoteCamera";
private OutputThread mOutputThread;
private WebSocketManager mWebSocketManager;
private OnCameraListener mOnCameraListener;
private int mSearchState = 0;
private byte[] mNalData;
private int mNalDataPos;
private MediaCodec mDecoder;
private MediaFormat mFormat;
private SurfaceView mSurfaceView;
private MediaCodec.BufferInfo mInfo = new MediaCodec.BufferInfo();
private boolean mIsWaitingForSPS = true;
public RemoteCamera(final SurfaceView surfaceView, final String wss) {
mSurfaceView = surfaceView;
mWebSocketManager = new WebSocketManager(wss);
mWebSocketManager.setWSListener(new WebSocketManager.OnWSListener() {
#Override
public void onOpen() {
if (mOnCameraListener != null) {
mOnCameraListener.onConnected();
}
}
#Override
public void onClosed() {
if (mOnCameraListener != null) {
mOnCameraListener.onDisconnected();
}
}
#Override
public void onFailure() {
if (mOnCameraListener != null) {
mOnCameraListener.onFailureConnection();
}
}
#Override
public synchronized void onMessage(final ByteString bytes) {
final ByteBuffer bb = ByteBuffer.wrap(bytes.toByteArray());
if (mIsWaitingForSPS) {
if (isSPSUnit(bb)) {
mIsWaitingForSPS = false;
if (mOnCameraListener != null) {
mOnCameraListener.onReady();
}
} else {
return;
}
}
parseDatagram(bb.array(), bytes.size());
}
});
mNalData = new byte[MAX_NAL_LEN];
mNalDataPos = 0;
try {
mDecoder = MediaCodec.createDecoderByType("video/avc");
} catch (Exception e) {
Log.d(TAG, e.toString());
return;
}
mFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
}
public void setOnCameraListener(final OnCameraListener cameraListener) {
mOnCameraListener = cameraListener;
}
public void startStreaming() {
mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mDecoder.configure(mFormat, mSurfaceView.getHolder().getSurface(), null, 0);
} catch (Exception e) {
Log.d(TAG, e.toString());
return;
}
mWebSocketManager.wsRegister();
mDecoder.start();
mOutputThread = new OutputThread();
mOutputThread.start();
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
});
}
private void feedDecoder(byte[] n, int len) {
for (; ; ) {
try {
int inputBufferIndex = mDecoder.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferIndex);
inputBuffer.put(n, 0, len);
mDecoder.queueInputBuffer(inputBufferIndex, 0, len, System.currentTimeMillis(), 0);
break;
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
}
private void parseDatagram(byte[] p, int plen) {
try {
for (int i = 0; i < plen; ++i) {
mNalData[mNalDataPos++] = p[i];
if (mNalDataPos == MAX_NAL_LEN - 1) {
mNalDataPos = 0;
}
switch (mSearchState) {
case 0:
case 1:
case 2:
if (p[i] == 0)
mSearchState++;
else
mSearchState = 0;
break;
case 3:
if (p[i] == 1) {
mNalData[0] = 0;
mNalData[1] = 0;
mNalData[2] = 0;
mNalData[3] = 1;
feedDecoder(mNalData, mNalDataPos - 4);
mNalDataPos = 4;
}
mSearchState = 0;
break;
default:
break;
}
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
private boolean isSPSUnit(final ByteBuffer unit) {
return unit.get(4) == 0x67;
}
private class OutputThread extends Thread {
#Override
public void run() {
while (true) {
try {
int outputBufferIndex = mDecoder.dequeueOutputBuffer(mInfo, 10);
if (outputBufferIndex >= 0) {
mDecoder.releaseOutputBuffer(outputBufferIndex, true);
}
} catch (Exception e) {
Log.d(TAG, e.toString());
}
}
}
}
I tested the code on Sony Xperia Z5 Compact and Yota Phone 2, and it works fine on these devices. The picture that I got from Sony is really good.
Then I tried the video streamer on Nexus 9 and Nexus 7 devices, but it looks like a row moving from top to bottom. There is no correct output on Nexus devices: nexus results.
I know that it depends on native android media codec, but what should I do to resolve the problem and be able to show video on all devices?
Do not pass the 0x00 0x00 0x00 0x01 NALU start code to the decoder.

Android camera2 qr code reader

I have a fragment which is scanning for a qr code using camera2 API and zxing (google's zebra crossing) API, it works fine for all the phones, the problem is with this specific tablet NVIDIA SHIELD. It would just not find the qr in the image, tried a lot:
Smaller, bigger screen/image dimension.
All different camera settings like iso, contrast, lighter, darker.
Using all image formats like JPEG, YUV_422_888 ...
Using planes nr 0,1,2...
Sorry for such a big snippet of code, ask questions, will respond asap.
Here is my code:
public class FragmentDecoder extends Fragment
implements FragmentCompat.OnRequestPermissionsResultCallback {
private static boolean accessGranted = true;
private static boolean showingText = false;
private ImageView camera_guide_image;
private TextView decoderText;
private ImageButton flashButton;
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private int mSensorOrientation;
private boolean mFlashSupported;
private static final String TAG = FragmentDecoder.class.getName();
private final CameraCaptureSession.CaptureCallback mCaptureCallback =
new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
}
#Override
public void onCaptureProgressed(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull CaptureResult partialResult) {
process(partialResult);
}
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request,
#NonNull TotalCaptureResult result) {
process(result);
}
};
private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private QRCodeReader mQrReader;
private String mCameraId;
private AutoFitTextureView mTextureView;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image img = null;
Result rawResult = null;
try {
img = reader.acquireLatestImage();
if (img == null) throw new NullPointerException("cannot be null");
ByteBuffer buffer = img.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
int width = img.getWidth();
int height = img.getHeight();
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
rawResult = mQrReader.decode(bitmap);
if (rawResult == null) {
throw new NullPointerException("no QR code");
}
String decoded = rawResult.getText();
if (accessGranted) {
Log.e(TAG, "entered the accessGranted area!");
accessGranted = false;
boolean isPrivKey = BRWalletManager.getInstance(getActivity()).confirmSweep(getActivity(), decoded);
if (isPrivKey) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
BRAnimator.hideDecoderFragment();
}
});
return;
}
String validationString = validateResult(decoded);
if (Objects.equals(validationString, BRConstants.TEXT_EMPTY)) {
onQRCodeRead((MainActivity) getActivity(), rawResult.getText());
} else {
if (!showingText) {
showingText = true;
setCameraGuide(BRConstants.CAMERA_GUIDE_RED);
setGuideText(validationString);
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
showingText = false;
}
}, 1000);
}
accessGranted = true;
}
}
} catch (ReaderException | NullPointerException | IllegalStateException ignored) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
// Log.e(TAG, "Reader shows an exception! ", ignored);
/* Ignored */
} finally {
mQrReader.reset();
if (img != null)
img.close();
}
if (rawResult == null) {
if (!showingText)
setCameraGuide(BRConstants.CAMERA_GUIDE);
}
}
};
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
getActivity().onBackPressed();
}
};
private Size mPreviewSize;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
private int flashButtonCount;
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
List<Size> bigEnough = new ArrayList<>();
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_decoder, container, false);
mQrReader = new QRCodeReader();
mTextureView = new AutoFitTextureView(getActivity());
RelativeLayout layout = (RelativeLayout) rootView.findViewById(R.id.fragment_decoder_layout);
camera_guide_image = (ImageView) rootView.findViewById(R.id.decoder_camera_guide_image);
decoderText = (TextView) rootView.findViewById(R.id.decoder_text);
flashButton = (ImageButton) rootView.findViewById(R.id.button_flash);
flashButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try {
if (mPreviewRequestBuilder == null || mCaptureSession == null)
return;
if (++flashButtonCount % 2 != 0) {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_on);
SpringAnimator.showAnimation(flashButton);
} else {
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, null);
flashButton.setBackgroundResource(R.drawable.flash_off);
SpringAnimator.showAnimation(flashButton);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
layout.addView(mTextureView, 0);
startBackgroundThread();
return rootView;
}
#Override
public void onResume() {
super.onResume();
accessGranted = true;
showingText = false;
new Handler().post(new Runnable() {
#Override
public void run() {
if (camera_guide_image != null)
SpringAnimator.showExpandCameraGuide(camera_guide_image);
}
});
((BreadWalletApp) getActivity().getApplication()).hideKeyboard(getActivity());
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private void setUpCameraOutputs(int width, int height) {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
new CompareSizesByArea());
mImageReader = ImageReader.newInstance(largest.getWidth()/2, largest.getHeight()/2,
ImageFormat.YUV_420_888, /*maxImages*/2);
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(
mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(
mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
CameraManager manager = (CameraManager) getActivity().getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
if (mCameraId != null)
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
try {
mBackgroundThread.quitSafely();
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException | NullPointerException e) {
e.printStackTrace();
mBackgroundThread = null;
mBackgroundHandler = null;
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
Surface mImageSurface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(mImageSurface);
mPreviewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(mImageSurface, surface),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
// Log.e(TAG, "onConfigured");
if (mCameraDevice == null) return;
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CONTROL_AF_MODE, CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getActivity(), getActivity().getResources().getString(R.string.failed),
Toast.LENGTH_SHORT).show();
}
}, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configureTransform(int viewWidth, int viewHeight) {
if (mTextureView == null || mPreviewSize == null) return;
int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private static synchronized void onQRCodeRead(final MainActivity app, final String text) {
app.runOnUiThread(new Runnable() {
#Override
public void run() {
if (text != null) {
BRAnimator.hideDecoderFragment();
Log.e(TAG, "BEFORE processRequest");
RequestHandler.processRequest(app, text);
}
}
});
}
private static class CompareSizesByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
private int getStatusBarHeight() {
Rect rectangle = new Rect();
Window window = getActivity().getWindow();
window.getDecorView().getWindowVisibleDisplayFrame(rectangle);
int statusBarHeight = rectangle.top;
int contentViewTop =
window.findViewById(Window.ID_ANDROID_CONTENT).getTop();
int titleBarHeight = contentViewTop - statusBarHeight;
return statusBarHeight + titleBarHeight;
}
private String validateResult(String str) {
RequestObject obj = null;
try {
obj = RequestHandler.getRequestFromString(str);
} catch (InvalidAlgorithmParameterException e) {
e.printStackTrace();
}
if (obj == null) {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
if (obj.r != null) {
return BRConstants.TEXT_EMPTY;
}
if (obj.address != null) {
if (BRWalletManager.validateAddress(obj.address)) {
return BRConstants.TEXT_EMPTY;
} else {
return getActivity().getResources().getString(R.string.fragmentdecoder_not_valid_bitcoin_address);
}
}
return getActivity().getResources().getString(R.string.fragmentdecoder_not_a_bitcoin_qr_code);
}
private void setCameraGuide(final String str) {
getActivity().runOnUiThread(new Runnable() {
#Override
public void run() {
if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE)) {
camera_guide_image.setImageResource(R.drawable.cameraguide);
setGuideText(BRConstants.TEXT_EMPTY);
} else if (str.equalsIgnoreCase(BRConstants.CAMERA_GUIDE_RED)) {
camera_guide_image.setImageResource(R.drawable.cameraguide_red);
} else {
throw new IllegalArgumentException("CAMERA_GUIDE and CAMERA_GUIDE_RED only");
}
}
});
}
private void setGuideText(final String str) {
Activity activity = getActivity();
if (activity != null)
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
decoderText.setText(str);
}
});
}
}

Using Camera API in android to take picture in different focus modes

I need to create an app where when I click a photo it will save 3 jpegs is different focus modes(auto focus, Macro focus and Infinite focus) if possible and save it. I have made a basic camera app whoe code I am posting..Can you guys tell me how to implement this?
public class MainActivity extends Activity implements Constants,SurfaceHolder.Callback,Camera.PreviewCallback,
Camera.AutoFocusCallback, OnTouchListener{
private ViewFlipper myviewflipper;
private float initialXpoint;
private Camera mCamera;
private SurfaceView mPreviewSV;
private SurfaceHolder mSurfaceHolder;
private Button mCapture;
private Button mChange;
private Button mReturn;
private Button mLoad;
private ImageView mImageView;
private int mPicturesCaptured = 0;
boolean isCaptureClicked = false;
boolean encodingCompleted = false;
long startTime;
int currentFile = 0;
int initialPointer = -1;
int secondPointer = -1;
int direction = -1;
CameraSize picSize = BACK_CAM_PREVIEW_SIZE_3 ;
File mFile;
ArrayList<String> imagePath;
ProgressDialog mEncodingProgress;
DisplayTask mTask;
Bitmap mDisplayBitmap;
File path;
File[] files;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mFile = new File(Constants.Cameraeffect);
if (!mFile.exists())
mFile.mkdir();
setupUI();
}
private void setupUI()
{
mPreviewSV = (SurfaceView) findViewById(R.id.sv_cam_preview);
mCapture = (Button) findViewById(R.id.button_capture);
mChange = (Button) findViewById(R.id.toggle);
mReturn = (Button) findViewById(R.id.button_return);
mImageView = (ImageView) findViewById(R.id.display);
mImageView.setOnTouchListener(this);
mLoad = (Button) findViewById(R.id.button_load);
mSurfaceHolder = mPreviewSV.getHolder();
mSurfaceHolder.addCallback(this);
mCapture.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
deleteFiles();
mPicturesCaptured = 0;
isCaptureClicked = true;
startTime=System.currentTimeMillis();
progressUpdate();
}
});
mChange.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
int res = getCurrentCameraId();
switch(res){
case 0:
stopCamera();
startCamera(Constants.FRONT_CAMERA);
break;
case 1:
stopCamera();
startCamera(Constants.BACK_CAMERA);
break;
}
}
});
mLoad.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
imagePath=ListAllFiles();
mImageView.bringToFront();
mImageView.setVisibility(View.VISIBLE);
mImageView.setImageBitmap(decodeFile(imagePath.get(0)));
mTask = new DisplayTask();
mTask.execute("");
}
});
mReturn.setOnClickListener(new View.OnClickListener() {
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = getIntent();
finish();
startActivity(intent);
}
});
}
private ArrayList<String> ListAllFiles() {
ArrayList<String> tFileList = new ArrayList<String>();
path= new File(Constants.Cameraeffect);
if( path.exists() ) {
files = path.listFiles();
for(int i=0; i<files.length; i++) {
tFileList.add(files[i].toString());
}
}
return tFileList;
}
public void deleteFiles() {
path= new File(Constants.Cameraeffect);
if( path.exists() ) {
files = path.listFiles();
for(int i=0; i<files.length; i++) {
files[i].delete();
}
}
}
#SuppressWarnings("unused")
int getCurrentCameraId() {
int mCameraId = -1;
CameraInfo ci = new CameraInfo();
for (int i = 0 ; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK){
return Constants.BACK_CAMERA;
}
else if (ci.facing == CameraInfo.CAMERA_FACING_FRONT){
return Constants.FRONT_CAMERA;
}
}
return -1;
}
private void progressUpdate()
{
mEncodingProgress = new ProgressDialog(this);
mEncodingProgress.setProgressStyle(ProgressDialog.STYLE_SPINNER);
mEncodingProgress.setMessage("Encoding");
mEncodingProgress.setIndeterminate(false);
mEncodingProgress.show();
new Thread(new Runnable()
{
public void run()
{
while((System.currentTimeMillis()-startTime)<=5000)
{
try
{
Thread.sleep(10);
mEncodingProgress.setProgress(mPicturesCaptured + 1);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
}
}).start();
}
#SuppressWarnings("unused")
private void startCamera(int id)
{
try
{
if (mCamera != null)
stopCamera();
mCamera = Camera.open(id);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(1280,720);
parameters.setFocusMode("infinite");
parameters.setJpegQuality(100);
parameters.setAutoExposureLock(true);
parameters.setAutoWhiteBalanceLock(true);
parameters.setRotation(0);
mCamera.setDisplayOrientation(0);
mCamera.setParameters(parameters);
mCamera.setPreviewCallback(this);
mCamera.setPreviewDisplay(mPreviewSV.getHolder());
mCamera.startPreview();
}
catch (IOException e)
{
stopCamera();
}
}
private void stopCamera()
{
if (mCamera != null)
{
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mPreviewSV.getHolder().removeCallback(this);
mCamera.release();
mCamera = null;
}
}
#Override
protected void onStart()
{
super.onStart();
}
#Override
protected void onResume()
{
super.onResume();
}
#Override
protected void onPause()
{
stopCamera();
super.onPause();
}
#Override
protected void onDestroy()
{
stopCamera();
super.onDestroy();
}
public void surfaceCreated(SurfaceHolder holder)
{
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
startCamera(0);
}
public void surfaceDestroyed(SurfaceHolder holder)
{
}
public void onPreviewFrame(byte[] data, Camera camera)
{
if (System.currentTimeMillis()-startTime<=5000)
{
savingJpegToSDCard(data);
}
else
{
if(mEncodingProgress!=null){
mEncodingProgress.dismiss();
mEncodingProgress=null;
encodingCompleted=true;
}
}
if(encodingCompleted){
/*
* Log.i("","sent to JNI");
*/
encodingCompleted=false;
}
}
public void onAutoFocus(boolean arg0, Camera arg1) {
// TODO Auto-generated method stub
}
public boolean onTouch(View arg0, MotionEvent event) {
Log.d("","srikrishna:: ontouch");
switch(event.getAction()){
case(MotionEvent.ACTION_DOWN):
initialPointer=(int) event.getX();
break;
case(MotionEvent.ACTION_MOVE):
secondPointer=(int) event.getX();
break;
case(MotionEvent.ACTION_UP):
initialPointer=(int) event.getX();
}
return true;
}
public Bitmap decodeFile(String fPath)
{
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inJustDecodeBounds = true;
opts.inDither = false;
opts.inPurgeable = true;
opts.inInputShareable = true;
BitmapFactory.decodeFile(fPath, opts);
final int REQUIRED_SIZE = 500;
int scale = 1;
if (opts.outHeight > REQUIRED_SIZE || opts.outWidth > REQUIRED_SIZE)
{
final int heightRatio = Math
.round((float) opts.outHeight / (float) REQUIRED_SIZE);
final int widthRatio = Math
.round((float) opts.outWidth / (float) REQUIRED_SIZE);
scale = heightRatio < widthRatio ? heightRatio : widthRatio;//
}
opts.inJustDecodeBounds = false;
opts.inSampleSize = scale;
Bitmap bm = BitmapFactory.decodeFile(fPath, opts).copy(
Bitmap.Config.RGB_565, false);
return bm;
}
private void savingJpegToSDCard(final byte[] b1)
{
try{
Camera.Parameters parameters = mCamera.getParameters();
Size size = parameters.getPreviewSize();
Log.i("","abh size width and height "+size.width + " "+size.height);
YuvImage image = new YuvImage(b1, parameters.getPreviewFormat(),size.width, size.height, null);
File file = new File(Constants.Cameraeffect + "/File" + mPicturesCaptured + ".jpeg");
FileOutputStream filecon = new FileOutputStream(file);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90,filecon);
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
mPicturesCaptured++;
}
class DisplayTask extends AsyncTask<String, Bitmap, Void>
{
#Override
protected void onPreExecute()
{
super.onPreExecute();
currentFile = 0;
}
#Override
protected Void doInBackground(String... params)
{
while(true){
Log.i("","abh imagePath.size() "+imagePath.size());
if(secondPointer-initialPointer >50)
{
Log.i("","abh currentFile "+currentFile);
mDisplayBitmap = decodeFile(imagePath.get(currentFile));
publishProgress(mDisplayBitmap);
try
{
Thread.sleep(10);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
if(currentFile<=90)
currentFile++;
initialPointer=secondPointer;
}
}
}
#Override
protected void onPostExecute(Void result)
{
super.onPostExecute(result);
}
#Override
protected void onProgressUpdate(Bitmap... values)
{
super.onProgressUpdate(values);
mImageView.setImageBitmap(values[0]);
}
}
}
In your start camera method set the focus mode to the required one and start the camera. If you want to change it dynamically on click of button then you must call the start camera method with the required attributes. Make sure you are releasing camera[this is very imp] also note that the custom camera implementation in android is very buggy[many of the bugs are stil open just goggle it and see] i suggest you read it carefully and implement it.

impementation of tuch to focus on camera in android

I want to implement touch to focus on my custom camera here is my code for custom camera . I know android api 14 level support focus and its also hardware dependent so any one have idea how to implement it.
private SurfaceView previewSv;
private ImageButton stillImgCamIb, videoCamIb, galleryIb;
private TextView cancelTv;
private Handler handler = new Handler();
private Camera camera;
private int cameraId = 0;
private boolean cameraFlash = true;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
boolean hasCameraFlash = getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA_FLASH);
boolean hasFrontCam = getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA_FRONT);
previewSv = (SurfaceView) findViewById(R.id.preview_sv);
stillImgCamIb = (ImageButton) findViewById(R.id.still_img_camera_ib);
videoCamIb = (ImageButton) findViewById(R.id.video_camera_ib);
galleryIb = (ImageButton) findViewById(R.id.gallery_ib);
cancelTv = (TextView) findViewById(R.id.cancel_tv);
stillImgCamIb.setOnClickListener(this);
videoCamIb.setOnClickListener(this);
galleryIb.setOnClickListener(this);
cancelTv.setOnClickListener(this);
startCameraPreview();
}
#Override
protected void onStop() {
super.onStop();
stopCameraPreview();
stopRecording();
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.still_img_camera_ib:
if (mode == Mode.STILL_CAMERA) {
if (previewing) {
handleStillCamClick();
}
} else if (mode == Mode.VIDEO_CAMERA) {
stopCameraPreview();
startRecording();
activateMode(Mode.RECORDING);
} else if (mode == Mode.RECORDING) {
stopRecording();
activateMode(Mode.VIDEO_CAMERA);
} else {
activateMode(Mode.STILL_CAMERA);
}
break;
case R.id.video_camera_ib:
if (mode == Mode.VIDEO_CAMERA) {
activateMode(Mode.STILL_CAMERA);
} else {
activateMode(Mode.VIDEO_CAMERA);
}
break;
case R.id.gallery_ib:
// openGallery();
break;
case R.id.cancel_tv:
activateMode(Mode.VIDEO_CAMERA);
stopRecording();
break;
default:
break;
}
}
private Uri capturedImageUri;
private boolean previewing = false;
private void startCameraPreview() {
if (!previewing) {
previewing = true;
camera = Camera.open(cameraId);
camera.getParameters().setFlashMode(
cameraFlash ? Parameters.FLASH_MODE_ON
: Parameters.FLASH_MODE_OFF);
camera.setDisplayOrientation(90);
if (camera != null) {
try {
System.out.println("Started camera preview!");
camera.setPreviewDisplay(previewSv.getHolder());
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
previewSv.getHolder().addCallback(new Callback() {
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopCameraPreview();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
if (previewing) {
stopCameraPreview();
}
startCameraPreview();
}
});
}
private void stopCameraPreview() {
if (camera != null && previewing) {
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
}
private void handleStillCamClick() {
if (camera != null) {
Parameters params = camera.getParameters();
Size defaultSize = params.getPictureSize();
System.out.println("Default size X:" + defaultSize.width + ", Y:"
+ defaultSize.height);
// Loop over supported sizes and pick one that is larger than 640px
// wide & tall, only if the default size is too small
if (defaultSize.width < 640 || defaultSize.height < 640) {
List<Size> sizes = params.getSupportedPictureSizes();
// int biggestH = 0;
// int biggestW = 0;
int maxH = Integer.MAX_VALUE;
int maxW = Integer.MAX_VALUE;
for (int i = 0; i < sizes.size(); i++) {
Size thisSize = sizes.get(i);
int thisW = thisSize.width;
int thisH = thisSize.height;
boolean betterWidth = thisW < maxW && thisW >= 1000;
boolean betterHeight = thisH < maxH && thisH >= 1000;
boolean isFourThreeAspect = thisW == (4 * thisH / 3);
if (isFourThreeAspect) {
System.out.println("Supports size: W" + thisSize.width
+ ", H:" + thisSize.height);
} else {
System.out.println("not 4/3, but Supports size: W"
+ thisSize.width + ", H:" + thisSize.height);
}
if (betterWidth && betterHeight && isFourThreeAspect) {
maxH = thisH;
maxW = thisW;
}
}
if (maxH < 99999 && maxW < 99999) {
System.out.println("Found best size, W:" + maxW + ", H:"
+ maxH);
Toast.makeText(
this,
"Taking picture at resolution:" + maxW + "x" + maxH,
Toast.LENGTH_LONG).show();
params.setPictureSize(maxW, maxH);
} else {
System.out
.println("Couldn't find best size! Going with default");
}
camera.setParameters(params);
} else {
}
camera.takePicture(new ShutterCallback() {
#Override
public void onShutter() {
}
}, null, new PictureCallback() {
public void onPictureTaken(byte[] bytes, Camera camera) {
System.out
.println("got jpeg bytes, length:" + bytes.length);
stopCameraPreview();
}
});
}
}
public MediaRecorder mediaRecorder = new MediaRecorder();
private boolean recording = false;
protected void startRecording() {
String path = Environment.getExternalStorageDirectory()
.getAbsolutePath().toString();
String filename = "/xyz.mp4";
mediaRecorder = new MediaRecorder();
camera = Camera.open(cameraId);
camera.getParameters().setFlashMode(
cameraFlash ? Parameters.FLASH_MODE_AUTO
: Parameters.FLASH_MODE_OFF);
camera.setDisplayOrientation(90);
Log.d("TabCustomCameraFragment", "startRecording - Camera.open");
camera.lock();
camera.unlock();
mediaRecorder.setCamera(camera);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH));
mediaRecorder.setVideoSize(640, 640);
mediaRecorder.setPreviewDisplay(previewSv.getHolder().getSurface());
mediaRecorder.setOutputFile(path + filename);
if (isVertical()) {
mediaRecorder.setOrientationHint(cameraId == 0 ? 90 : 270);
} else {
mediaRecorder.setOrientationHint(cameraId == 0 ? 90 : 180);
}
mediaRecorder.setMaxDuration(10000);
try {
camera.stopPreview();
mediaRecorder.prepare();
mediaRecorder.start();
recording = true;
remainingCaptureTime = MAX_VIDEO_DURATION;
updateRemainingTime();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
protected void stopRecording() {
if (mediaRecorder != null && recording) {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
camera.release();
recording = false;
handler.removeCallbacksAndMessages(null);
}
}
private int remainingCaptureTime;
private void updateRemainingTime() {
Log.d("TabCustomCameraFragment",
"updateRemainingTime - remainingCaptureTime="
+ remainingCaptureTime);
if (remainingCaptureTime <= 500) {
stopCameraPreview();
stopRecording();
return;
}
handler.postDelayed(new Runnable() {
#Override
public void run() {
remainingCaptureTime -= 500;
updateRemainingTime();
}
}, 500);
}
private boolean isVertical() {
Display getOrient = getWindowManager().getDefaultDisplay();
if (getOrient.getWidth() <= getOrient.getHeight()) {
return true;
}
return false;
}
private enum Mode {
STILL_CAMERA, VIDEO_CAMERA, RECORDING;
}
private Mode mode = Mode.STILL_CAMERA;
private void activateMode(Mode mode) {
switch (mode) {
case STILL_CAMERA:
this.mode = Mode.STILL_CAMERA;
cancelTv.setVisibility(View.GONE);
galleryIb.setVisibility(View.VISIBLE);
videoCamIb.setVisibility(View.VISIBLE);
break;
case VIDEO_CAMERA:
this.mode = Mode.VIDEO_CAMERA;
stillImgCamIb.setImageResource(R.drawable.ic_launcher);
videoCamIb.setImageResource(R.drawable.ic_launcher);
cancelTv.setVisibility(View.GONE);
galleryIb.setVisibility(View.VISIBLE);
videoCamIb.setVisibility(View.VISIBLE);
break;
case RECORDING:
this.mode = Mode.RECORDING;
stillImgCamIb.setImageResource(R.drawable.ic_launcher);
cancelTv.setVisibility(View.VISIBLE);
galleryIb.setVisibility(View.GONE);
videoCamIb.setVisibility(View.GONE);
break;
default:
break;
}
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopCameraPreview();
stopRecording();
}

Categories

Resources