impementation of tuch to focus on camera in android - android

I want to implement touch to focus on my custom camera here is my code for custom camera . I know android api 14 level support focus and its also hardware dependent so any one have idea how to implement it.
private SurfaceView previewSv;
private ImageButton stillImgCamIb, videoCamIb, galleryIb;
private TextView cancelTv;
private Handler handler = new Handler();
private Camera camera;
private int cameraId = 0;
private boolean cameraFlash = true;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
boolean hasCameraFlash = getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA_FLASH);
boolean hasFrontCam = getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA_FRONT);
previewSv = (SurfaceView) findViewById(R.id.preview_sv);
stillImgCamIb = (ImageButton) findViewById(R.id.still_img_camera_ib);
videoCamIb = (ImageButton) findViewById(R.id.video_camera_ib);
galleryIb = (ImageButton) findViewById(R.id.gallery_ib);
cancelTv = (TextView) findViewById(R.id.cancel_tv);
stillImgCamIb.setOnClickListener(this);
videoCamIb.setOnClickListener(this);
galleryIb.setOnClickListener(this);
cancelTv.setOnClickListener(this);
startCameraPreview();
}
#Override
protected void onStop() {
super.onStop();
stopCameraPreview();
stopRecording();
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.still_img_camera_ib:
if (mode == Mode.STILL_CAMERA) {
if (previewing) {
handleStillCamClick();
}
} else if (mode == Mode.VIDEO_CAMERA) {
stopCameraPreview();
startRecording();
activateMode(Mode.RECORDING);
} else if (mode == Mode.RECORDING) {
stopRecording();
activateMode(Mode.VIDEO_CAMERA);
} else {
activateMode(Mode.STILL_CAMERA);
}
break;
case R.id.video_camera_ib:
if (mode == Mode.VIDEO_CAMERA) {
activateMode(Mode.STILL_CAMERA);
} else {
activateMode(Mode.VIDEO_CAMERA);
}
break;
case R.id.gallery_ib:
// openGallery();
break;
case R.id.cancel_tv:
activateMode(Mode.VIDEO_CAMERA);
stopRecording();
break;
default:
break;
}
}
private Uri capturedImageUri;
private boolean previewing = false;
private void startCameraPreview() {
if (!previewing) {
previewing = true;
camera = Camera.open(cameraId);
camera.getParameters().setFlashMode(
cameraFlash ? Parameters.FLASH_MODE_ON
: Parameters.FLASH_MODE_OFF);
camera.setDisplayOrientation(90);
if (camera != null) {
try {
System.out.println("Started camera preview!");
camera.setPreviewDisplay(previewSv.getHolder());
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
previewSv.getHolder().addCallback(new Callback() {
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopCameraPreview();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
if (previewing) {
stopCameraPreview();
}
startCameraPreview();
}
});
}
private void stopCameraPreview() {
if (camera != null && previewing) {
camera.stopPreview();
camera.release();
camera = null;
previewing = false;
}
}
private void handleStillCamClick() {
if (camera != null) {
Parameters params = camera.getParameters();
Size defaultSize = params.getPictureSize();
System.out.println("Default size X:" + defaultSize.width + ", Y:"
+ defaultSize.height);
// Loop over supported sizes and pick one that is larger than 640px
// wide & tall, only if the default size is too small
if (defaultSize.width < 640 || defaultSize.height < 640) {
List<Size> sizes = params.getSupportedPictureSizes();
// int biggestH = 0;
// int biggestW = 0;
int maxH = Integer.MAX_VALUE;
int maxW = Integer.MAX_VALUE;
for (int i = 0; i < sizes.size(); i++) {
Size thisSize = sizes.get(i);
int thisW = thisSize.width;
int thisH = thisSize.height;
boolean betterWidth = thisW < maxW && thisW >= 1000;
boolean betterHeight = thisH < maxH && thisH >= 1000;
boolean isFourThreeAspect = thisW == (4 * thisH / 3);
if (isFourThreeAspect) {
System.out.println("Supports size: W" + thisSize.width
+ ", H:" + thisSize.height);
} else {
System.out.println("not 4/3, but Supports size: W"
+ thisSize.width + ", H:" + thisSize.height);
}
if (betterWidth && betterHeight && isFourThreeAspect) {
maxH = thisH;
maxW = thisW;
}
}
if (maxH < 99999 && maxW < 99999) {
System.out.println("Found best size, W:" + maxW + ", H:"
+ maxH);
Toast.makeText(
this,
"Taking picture at resolution:" + maxW + "x" + maxH,
Toast.LENGTH_LONG).show();
params.setPictureSize(maxW, maxH);
} else {
System.out
.println("Couldn't find best size! Going with default");
}
camera.setParameters(params);
} else {
}
camera.takePicture(new ShutterCallback() {
#Override
public void onShutter() {
}
}, null, new PictureCallback() {
public void onPictureTaken(byte[] bytes, Camera camera) {
System.out
.println("got jpeg bytes, length:" + bytes.length);
stopCameraPreview();
}
});
}
}
public MediaRecorder mediaRecorder = new MediaRecorder();
private boolean recording = false;
protected void startRecording() {
String path = Environment.getExternalStorageDirectory()
.getAbsolutePath().toString();
String filename = "/xyz.mp4";
mediaRecorder = new MediaRecorder();
camera = Camera.open(cameraId);
camera.getParameters().setFlashMode(
cameraFlash ? Parameters.FLASH_MODE_AUTO
: Parameters.FLASH_MODE_OFF);
camera.setDisplayOrientation(90);
Log.d("TabCustomCameraFragment", "startRecording - Camera.open");
camera.lock();
camera.unlock();
mediaRecorder.setCamera(camera);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mediaRecorder.setProfile(CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH));
mediaRecorder.setVideoSize(640, 640);
mediaRecorder.setPreviewDisplay(previewSv.getHolder().getSurface());
mediaRecorder.setOutputFile(path + filename);
if (isVertical()) {
mediaRecorder.setOrientationHint(cameraId == 0 ? 90 : 270);
} else {
mediaRecorder.setOrientationHint(cameraId == 0 ? 90 : 180);
}
mediaRecorder.setMaxDuration(10000);
try {
camera.stopPreview();
mediaRecorder.prepare();
mediaRecorder.start();
recording = true;
remainingCaptureTime = MAX_VIDEO_DURATION;
updateRemainingTime();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
protected void stopRecording() {
if (mediaRecorder != null && recording) {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
camera.release();
recording = false;
handler.removeCallbacksAndMessages(null);
}
}
private int remainingCaptureTime;
private void updateRemainingTime() {
Log.d("TabCustomCameraFragment",
"updateRemainingTime - remainingCaptureTime="
+ remainingCaptureTime);
if (remainingCaptureTime <= 500) {
stopCameraPreview();
stopRecording();
return;
}
handler.postDelayed(new Runnable() {
#Override
public void run() {
remainingCaptureTime -= 500;
updateRemainingTime();
}
}, 500);
}
private boolean isVertical() {
Display getOrient = getWindowManager().getDefaultDisplay();
if (getOrient.getWidth() <= getOrient.getHeight()) {
return true;
}
return false;
}
private enum Mode {
STILL_CAMERA, VIDEO_CAMERA, RECORDING;
}
private Mode mode = Mode.STILL_CAMERA;
private void activateMode(Mode mode) {
switch (mode) {
case STILL_CAMERA:
this.mode = Mode.STILL_CAMERA;
cancelTv.setVisibility(View.GONE);
galleryIb.setVisibility(View.VISIBLE);
videoCamIb.setVisibility(View.VISIBLE);
break;
case VIDEO_CAMERA:
this.mode = Mode.VIDEO_CAMERA;
stillImgCamIb.setImageResource(R.drawable.ic_launcher);
videoCamIb.setImageResource(R.drawable.ic_launcher);
cancelTv.setVisibility(View.GONE);
galleryIb.setVisibility(View.VISIBLE);
videoCamIb.setVisibility(View.VISIBLE);
break;
case RECORDING:
this.mode = Mode.RECORDING;
stillImgCamIb.setImageResource(R.drawable.ic_launcher);
cancelTv.setVisibility(View.VISIBLE);
galleryIb.setVisibility(View.GONE);
videoCamIb.setVisibility(View.GONE);
break;
default:
break;
}
}
#Override
public void onBackPressed() {
super.onBackPressed();
stopCameraPreview();
stopRecording();
}

Related

Capture image in every 42 millisecond previewed in surface View

I want to capture image previewed in surfaceView.Capturing time is every 42 millisecond.While capturing I want to send these images to the server as byteArray at this moment.For security reason, photo cant is saved to sd.I must use this for making a video call.Can anyone help me?Pls
Button take;
Camera camera;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
Camera.PictureCallback jpegCallback;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
checkPermission();
surfaceView = (SurfaceView) findViewById(R.id.surface);
surfaceHolder = surfaceView.getHolder();
take = (Button) findViewById(R.id.take);
take.setOnClickListener(this);
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
surfaceHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
jpegCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(String.format("/sdcard/%d.jpg", System.currentTimeMillis()));
outStream.write(data);
outStream.close();
Log.d("Log", "onPictureTaken - wrote bytes: " + data.length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
Toast.makeText(getApplicationContext(), "Picture Saved", Toast.LENGTH_SHORT).show();
refreshCamera();
}
};
}
public void captureImage() throws IOException {
//take the picture
camera.takePicture(null, null, jpegCallback);
}
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
// open the camera
camera = Camera.open();
} catch (RuntimeException e) {
// check for exceptions
System.err.println(e);
return;
}
Camera.Parameters param;
param = camera.getParameters();
// modify parameter
List<Camera.Size> sizes = param.getSupportedPreviewSizes();
Camera.Size selected = sizes.get(0);
param.setPreviewSize(selected.width,selected.height);
camera.setParameters(param);
try {
// The Surface has been created, now tell the camera where to draw
// the preview.
camera.setDisplayOrientation(90);
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
} catch (Exception e) {
// check for exceptions
System.err.println(e);
return;
}
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
refreshCamera();
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
// stop preview and release camera
camera.stopPreview();
camera.release();
camera = null;
}
public void refreshCamera() {
if (surfaceHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
} catch (Exception e) {
}
}
private void requestPermission() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, 1);
}
private boolean checkPermission() {
int result = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (result == PackageManager.PERMISSION_GRANTED) {
return true;
} else {
return false;
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case 1:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
} else {
}
break;
}
}
#Override
public void onPointerCaptureChanged(boolean hasCapture) {
}
#Override
public void onClick(View v) {
switch (v.getId()){
case R.id.take:{
try {
captureImage();
} catch (IOException e) {
e.printStackTrace();
}
break;
}
}
}
So while previewing in SurfaceView I must get every 42 millis photo as byteArray and send it
I found solution to my problem. I have fixed it through setPreviewCallbackWithBuffer an onPreviewFrame.There is no need neither handler nor timer...
private Camera camera;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private ImageView endCallBtn;
private ImageView micBtn;
private ImageView visibilityBtn;
private ImageView cameraBtn;
private Boolean clickedForMic = false;
private Boolean clickedForCamera = false;
private Boolean clickedForVisiblity = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_call);
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
if (Build.VERSION.SDK_INT >= 23) {
if (checkPermission()) {
Log.e("permission", "Permission already granted.");
} else {
requestPermission();
}
}
endCallBtn = (ImageView) findViewById(R.id.endCallBtn);
endCallBtn.setOnClickListener(this);
micBtn = (ImageView) findViewById(R.id.micBtn);
micBtn.setImageResource(R.drawable.ic_mic_white_48px);
micBtn.setOnClickListener(this);
visibilityBtn = (ImageView) findViewById(R.id.visibilityBtn);
visibilityBtn.setImageResource(R.drawable.ic_visibility_white_48px);
visibilityBtn.setOnClickListener(this);
cameraBtn = (ImageView) findViewById(R.id.cameraBtn);
cameraBtn.setImageResource(R.drawable.ic_camera_rear_white_48px);
cameraBtn.setOnClickListener(this);
}
#Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.cameraBtn: {
if (clickedForCamera == false) {
if (clickedForVisiblity == true) {
Toast.makeText(VideoCallActivity.this, "Видимость камеры заблокирована", Toast.LENGTH_SHORT).show();
} else {
stopCamera();
startCameraBack();
cameraBtn.setImageResource(R.drawable.ic_camera_front_white_48px);
clickedForCamera = true;
}
} else {
if (clickedForVisiblity == true) {
Toast.makeText(VideoCallActivity.this, "Видимость камеры заблокирована", Toast.LENGTH_SHORT).show();
} else {
stopCamera();
startCameraFront();
cameraBtn.setImageResource(R.drawable.ic_camera_rear_white_48px);
clickedForCamera = false;
}
}
break;
}
case R.id.micBtn: {
if (clickedForMic == false) {
micBtn.setImageResource(R.drawable.ic_mic_off_white_48px);
micBtn.setColorFilter(Color.parseColor("#00897B"));
clickedForMic = true;
} else {
micBtn.setImageResource(R.drawable.ic_mic_white_48px);
micBtn.setColorFilter(Color.parseColor("#ffffff"));
clickedForMic = false;
}
break;
}
case R.id.endCallBtn: {
stopCamera();
finish();
overridePendingTransition(R.anim.window3, R.anim.window4);
break;
}
case R.id.visibilityBtn: {
if (clickedForVisiblity == false) {
camera.stopPreview();
visibilityBtn.setImageResource(R.drawable.ic_visibility_off_white_48px);
visibilityBtn.setColorFilter(Color.parseColor("#00897B"));
clickedForVisiblity = true;
} else {
camera.startPreview();
visibilityBtn.setImageResource(R.drawable.ic_visibility_white_48px);
visibilityBtn.setColorFilter(Color.parseColor("#ffffff"));
clickedForVisiblity = false;
}
break;
}
}
}
private void requestPermission() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, 1);
}
private boolean checkPermission() {
int result = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (result == PackageManager.PERMISSION_GRANTED) {
return true;
} else {
return false;
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case 1:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
} else {
}
break;
}
}
private void stopCamera() {
camera.stopPreview();
camera.release();
}
private void startCameraFront() {
if (checkPermission()) {
try {
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
} catch (Exception e) {
return;
}
Camera.Parameters param;
camera.setDisplayOrientation(90);
param = camera.getParameters();
param.setPreviewFrameRate(24);
param.setPreviewFpsRange(22000, 30000);
camera.setParameters(param);
try {
camera.setPreviewDisplay(surfaceHolder);
} catch (Exception e) {
return;
}
Log.v("CameraTest", "Camera PreviewFrameRate = " + camera.getParameters().getPreviewFrameRate());
Camera.Size previewSize = camera.getParameters().getPreviewSize();
int dataBufferSize = (int) (previewSize.height * previewSize.width *
(ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat()) / 8.0));
camera.addCallbackBuffer(new byte[dataBufferSize]);
camera.addCallbackBuffer(new byte[dataBufferSize]);
camera.addCallbackBuffer(new byte[dataBufferSize]);
camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
private long timestamp = 0;
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
//Log.v("CameraTest", "Time Gap = " + (System.currentTimeMillis() - timestamp));
Log.v("CameraTest", " data: " + String.valueOf(data.length));
timestamp = System.currentTimeMillis();
try {
camera.addCallbackBuffer(data);
} catch (Exception e) {
Log.e("CameraTest", "addCallbackBuffer error");
return;
}
return;
}
});
camera.startPreview();
}
}
private void startCameraBack() {
if (checkPermission()) {
try {
camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
} catch (Exception e) {
return;
}
Camera.Parameters param;
camera.setDisplayOrientation(90);
param = camera.getParameters();
//modify parameter
param.setPreviewFrameRate(30);
camera.setParameters(param);
try {
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
} catch (Exception e) {
Log.d("Problema", e.toString());
return;
}
}
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
startCameraFront();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}

Surface View Preview is stretched vertically

I am working on Live Streaming application using RTMP. I am in the initial state, I successful done lot of things but now stuck on one place. I am using Surface view and my video is little bit stretched vertically. Codes are below:
This is my SrcCameraView
public class SrsCameraView extends GLSurfaceView implements GLSurfaceView.Renderer {
private GPUImageFilter magicFilter;
private SurfaceTexture surfaceTexture;
private int mOESTextureId = OpenGLUtils.NO_TEXTURE;
private int mSurfaceWidth;
private int mSurfaceHeight;
private int mPreviewWidth;
private int mPreviewHeight;
private boolean mIsEncoding;
private boolean mIsTorchOn = false;
private float mInputAspectRatio;
private float mOutputAspectRatio;
private float[] mProjectionMatrix = new float[16];
private float[] mSurfaceMatrix = new float[16];
private float[] mTransformMatrix = new float[16];
private Camera mCamera;
private ByteBuffer mGLPreviewBuffer;
private final static int PIXEL_FORMAT = ImageFormat.NV21;
private int mCamId = -1;
private int mPreviewRotation = 90;
private int mPreviewOrientation = Configuration.ORIENTATION_PORTRAIT;
private Thread worker;
private final Object writeLock = new Object();
private ConcurrentLinkedQueue<IntBuffer> mGLIntBufferCache = new
ConcurrentLinkedQueue<>();
private PreviewCallback mPrevCb;
public SrsCameraView(Context context) {
this(context, null);
}
public SrsCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0, 0, 0, 0);
magicFilter = new GPUImageFilter(MagicFilterType.NONE);
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
mOESTextureId = OpenGLUtils.getExternalOESTextureID();
surfaceTexture = new SurfaceTexture(mOESTextureId);
surfaceTexture.setOnFrameAvailableListener(new
SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
// For camera preview on activity creation
if (mCamera != null) {
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
mSurfaceWidth = width;
mSurfaceHeight = height;
magicFilter.onDisplaySizeChanged(width, height);
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
float aspectRatio = mOutputAspectRatio / mInputAspectRatio;
if (width > height) {
Matrix.orthoM(mProjectionMatrix, 0, -1.0f, 1.0f, -aspectRatio,
aspectRatio, -1.0f, 1.0f);
} else {
Matrix.orthoM(mProjectionMatrix, 0, -aspectRatio, aspectRatio, -1.0f, 1.0f, -1.0f, 1.0f);
}
}
#Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSurfaceMatrix);
Matrix.multiplyMM(mTransformMatrix, 0, mSurfaceMatrix, 0,
mProjectionMatrix, 0);
magicFilter.setTextureTransformMatrix(mTransformMatrix);
magicFilter.onDrawFrame(mOESTextureId);
if (mIsEncoding) {
mGLIntBufferCache.add(magicFilter.getGLFboBuffer());
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
public void setPreviewCallback(PreviewCallback cb) {
mPrevCb = cb;
}
public int[] setPreviewResolution(int width, int height) {
getHolder().setFixedSize(width, height);
mCamera = openCamera();
mPreviewWidth = width;
mPreviewHeight = height;
Camera.Size rs = adaptPreviewResolution(mCamera.new Size(width,
height));
if (rs != null) {
mPreviewWidth = rs.width;
mPreviewHeight = rs.height;
}
mCamera.getParameters().setPreviewSize(mPreviewWidth, mPreviewHeight);
mGLPreviewBuffer = ByteBuffer.allocateDirect(mPreviewWidth *
mPreviewHeight * 4);
mInputAspectRatio = mPreviewWidth > mPreviewHeight ?
(float) mPreviewWidth / mPreviewHeight : (float) mPreviewHeight /
mPreviewWidth;
return new int[] { mPreviewWidth, mPreviewHeight };
}
public boolean setFilter(final MagicFilterType type) {
if (mCamera == null) {
return false;
}
queueEvent(new Runnable() {
#Override
public void run() {
if (magicFilter != null) {
magicFilter.destroy();
}
magicFilter = MagicFilterFactory.initFilters(type);
if (magicFilter != null) {
magicFilter.init(getContext().getApplicationContext());
magicFilter.onInputSizeChanged(mPreviewWidth,
mPreviewHeight);
magicFilter.onDisplaySizeChanged(mSurfaceWidth,
mSurfaceHeight);
}
}
});
requestRender();
return true;
}
private void deleteTextures() {
if (mOESTextureId != OpenGLUtils.NO_TEXTURE) {
queueEvent(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{ mOESTextureId }, 0);
mOESTextureId = OpenGLUtils.NO_TEXTURE;
}
});
}
}
public void setCameraId(int id) {
mCamId = id;
setPreviewOrientation(mPreviewOrientation);
}
public void setPreviewOrientation(int orientation) {
mPreviewOrientation = orientation;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCamId, info);
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = info.orientation % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 360) % 360;
}
} else if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mPreviewRotation = (info.orientation + 90) % 360;
mPreviewRotation = (360 - mPreviewRotation) % 360; // compensate the mirror
} else {
mPreviewRotation = (info.orientation + 270) % 360;
}
}
}
public int getCameraId() {
return mCamId;
}
public void enableEncoding() {
worker = new Thread(new Runnable() {
#Override
public void run() {
while (!Thread.interrupted()) {
while (!mGLIntBufferCache.isEmpty()) {
IntBuffer picture = mGLIntBufferCache.poll();
mGLPreviewBuffer.asIntBuffer().put(picture.array());
mPrevCb.onGetRgbaFrame(mGLPreviewBuffer.array(),
mPreviewWidth, mPreviewHeight);
}
// Waiting for next frame
synchronized (writeLock) {
try {
// isEmpty() may take some time, so we set timeout to detect next frame
writeLock.wait(500);
} catch (InterruptedException ie) {
worker.interrupt();
}
}
}
}
});
worker.start();
mIsEncoding = true;
}
public void disableEncoding() {
mIsEncoding = false;
mGLIntBufferCache.clear();
if (worker != null) {
worker.interrupt();
try {
worker.join();
} catch (InterruptedException e) {
e.printStackTrace();
worker.interrupt();
}
worker = null;
}
}
public boolean startCamera() {
if (mCamera == null) {
mCamera = openCamera();
if (mCamera == null) {
return false;
}
}
Camera.Parameters params = mCamera.getParameters();
params.setPictureSize(mPreviewWidth, mPreviewHeight);
params.setPreviewSize(mPreviewWidth, mPreviewHeight);
int[] range = adaptFpsRange(SrsEncoder.VFPS,
params.getSupportedPreviewFpsRange());
params.setPreviewFpsRange(range[0], range[1]);
params.setPreviewFormat(ImageFormat.NV21);
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && !supportedFocusModes.isEmpty()) {
if
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if
(supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.autoFocus(null);
} else {
params.setFocusMode(supportedFocusModes.get(0));
}
}
List<String> supportedFlashModes = params.getSupportedFlashModes();
if (supportedFlashModes != null && !supportedFlashModes.isEmpty()) {
if
(supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
if (mIsTorchOn) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
}
} else {
params.setFlashMode(supportedFlashModes.get(0));
}
}
mCamera.setParameters(params);
mCamera.setDisplayOrientation(mPreviewRotation);
try {
mCamera.setPreviewTexture(surfaceTexture);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
return true;
}
public void stopCamera() {
disableEncoding();
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
private Camera openCamera() {
Camera camera;
if (mCamId < 0) {
Camera.CameraInfo info = new Camera.CameraInfo();
int numCameras = Camera.getNumberOfCameras();
int frontCamId = -1;
int backCamId = -1;
for (int i = 0; i < numCameras; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
backCamId = i;
} else if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
frontCamId = i;
break;
}
}
if (frontCamId != -1) {
mCamId = frontCamId;
} else if (backCamId != -1) {
mCamId = backCamId;
} else {
mCamId = 0;
}
}
camera = Camera.open(mCamId);
return camera;
}
private Camera.Size adaptPreviewResolution(Camera.Size resolution) {
float diff = 100f;
float xdy = (float) resolution.width / (float) resolution.height;
Camera.Size best = null;
for (Camera.Size size :
mCamera.getParameters().getSupportedPreviewSizes()) {
if (size.equals(resolution)) {
return size;
}
float tmp = Math.abs(((float) size.width / (float) size.height) -
xdy);
if (tmp < diff) {
diff = tmp;
best = size;
}
}
return best;
}
private int[] adaptFpsRange(int expectedFps, List<int[]> fpsRanges) {
expectedFps *= 1000;
int[] closestRange = fpsRanges.get(0);
int measure = Math.abs(closestRange[0] - expectedFps) +
Math.abs(closestRange[1] - expectedFps);
for (int[] range : fpsRanges) {
if (range[0] <= expectedFps && range[1] >= expectedFps) {
int curMeasure = Math.abs(range[0] - expectedFps) +
Math.abs(range[1] - expectedFps);
if (curMeasure < measure) {
closestRange = range;
measure = curMeasure;
}
}
}
return closestRange;
}
public interface PreviewCallback {
void onGetRgbaFrame(byte[] data, int width, int height);
}
}
This is My Main Activity:
public class MainActivity extends AppCompatActivity implements RtmpHandler.RtmpListener,
SrsRecordHandler.SrsRecordListener, SrsEncodeHandler.SrsEncodeListener {
private static final String TAG = "Yasea";
Button btnPublish = null;
Button btnSwitchCamera = null;
Button btnRecord = null;
Button btnSwitchEncoder = null;
private SharedPreferences sp;
private String rtmpUrl = "rtmp://00.00.000.00/live/" + getRandomAlphaString(3) + '/' + getRandomAlphaDigitString(5);
private String recPath = Environment.getExternalStorageDirectory().getPath() + "/test.mp4";
private SrsPublisher mPublisher;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
// response screen rotation event
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
// restore data.
sp = getSharedPreferences("Yasea", MODE_PRIVATE);
rtmpUrl = sp.getString("rtmpUrl", rtmpUrl);
// initialize url.
final EditText efu = (EditText) findViewById(R.id.url);
efu.setText(rtmpUrl);
btnPublish = (Button) findViewById(R.id.publish);
btnSwitchCamera = (Button) findViewById(R.id.swCam);
btnRecord = (Button) findViewById(R.id.record);
btnSwitchEncoder = (Button) findViewById(R.id.swEnc);
mPublisher = new SrsPublisher((SrsCameraView) findViewById(R.id.glsurfaceview_camera));
mPublisher.setEncodeHandler(new SrsEncodeHandler(this));
mPublisher.setRtmpHandler(new RtmpHandler(this));
mPublisher.setRecordHandler(new SrsRecordHandler(this));
mPublisher.setPreviewResolution(640, 480);
mPublisher.setOutputResolution(720, 1280);
mPublisher.setVideoHDMode();
mPublisher.startCamera();
btnPublish.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnPublish.getText().toString().contentEquals("publish")) {
rtmpUrl = efu.getText().toString();
SharedPreferences.Editor editor = sp.edit();
editor.putString("rtmpUrl", rtmpUrl);
editor.apply();
mPublisher.startPublish(rtmpUrl);
mPublisher.startCamera();
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
Toast.makeText(getApplicationContext(), "Use hard encoder", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(getApplicationContext(), "Use soft encoder", Toast.LENGTH_SHORT).show();
}
btnPublish.setText("stop");
btnSwitchEncoder.setEnabled(false);
} else if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
}
}
});
btnSwitchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
mPublisher.switchCameraFace((mPublisher.getCamraId() + 1) % Camera.getNumberOfCameras());
}
});
btnRecord.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnRecord.getText().toString().contentEquals("record")) {
if (mPublisher.startRecord(recPath)) {
btnRecord.setText("pause");
}
} else if (btnRecord.getText().toString().contentEquals("pause")) {
mPublisher.pauseRecord();
btnRecord.setText("resume");
} else if (btnRecord.getText().toString().contentEquals("resume")) {
mPublisher.resumeRecord();
btnRecord.setText("pause");
}
}
});
btnSwitchEncoder.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (btnSwitchEncoder.getText().toString().contentEquals("soft encoder")) {
mPublisher.switchToSoftEncoder();
btnSwitchEncoder.setText("hard encoder");
} else if (btnSwitchEncoder.getText().toString().contentEquals("hard encoder")) {
mPublisher.switchToHardEncoder();
btnSwitchEncoder.setText("soft encoder");
}
}
});
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
} else {
switch (id) {
case R.id.cool_filter:
mPublisher.switchCameraFilter(MagicFilterType.COOL);
break;
case R.id.beauty_filter:
mPublisher.switchCameraFilter(MagicFilterType.BEAUTY);
break;
case R.id.early_bird_filter:
mPublisher.switchCameraFilter(MagicFilterType.EARLYBIRD);
break;
case R.id.evergreen_filter:
mPublisher.switchCameraFilter(MagicFilterType.EVERGREEN);
break;
case R.id.n1977_filter:
mPublisher.switchCameraFilter(MagicFilterType.N1977);
break;
case R.id.nostalgia_filter:
mPublisher.switchCameraFilter(MagicFilterType.NOSTALGIA);
break;
case R.id.romance_filter:
mPublisher.switchCameraFilter(MagicFilterType.ROMANCE);
break;
case R.id.sunrise_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNRISE);
break;
case R.id.sunset_filter:
mPublisher.switchCameraFilter(MagicFilterType.SUNSET);
break;
case R.id.tender_filter:
mPublisher.switchCameraFilter(MagicFilterType.TENDER);
break;
case R.id.toast_filter:
mPublisher.switchCameraFilter(MagicFilterType.TOASTER2);
break;
case R.id.valencia_filter:
mPublisher.switchCameraFilter(MagicFilterType.VALENCIA);
break;
case R.id.walden_filter:
mPublisher.switchCameraFilter(MagicFilterType.WALDEN);
break;
case R.id.warm_filter:
mPublisher.switchCameraFilter(MagicFilterType.WARM);
break;
case R.id.original_filter:
default:
mPublisher.switchCameraFilter(MagicFilterType.NONE);
break;
}
}
setTitle(item.getTitle());
return super.onOptionsItemSelected(item);
}
#Override
protected void onResume() {
super.onResume();
final Button btn = (Button) findViewById(R.id.publish);
btn.setEnabled(true);
mPublisher.resumeRecord();
}
#Override
protected void onPause() {
super.onPause();
mPublisher.pauseRecord();
}
#Override
protected void onDestroy() {
super.onDestroy();
mPublisher.stopPublish();
mPublisher.stopRecord();
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mPublisher.stopEncode();
mPublisher.stopRecord();
btnRecord.setText("record");
mPublisher.setScreenOrientation(newConfig.orientation);
if (btnPublish.getText().toString().contentEquals("stop")) {
mPublisher.startEncode();
}
mPublisher.startCamera();
}
private static String getRandomAlphaString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private static String getRandomAlphaDigitString(int length) {
String base = "abcdefghijklmnopqrstuvwxyz0123456789";
Random random = new Random();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < length; i++) {
int number = random.nextInt(base.length());
sb.append(base.charAt(number));
}
return sb.toString();
}
private void handleException(Exception e) {
try {
Toast.makeText(getApplicationContext(), e.getMessage(), Toast.LENGTH_SHORT).show();
mPublisher.stopPublish();
mPublisher.stopRecord();
btnPublish.setText("publish");
btnRecord.setText("record");
btnSwitchEncoder.setEnabled(true);
} catch (Exception e1) {
//
}
}
// Implementation of SrsRtmpListener.
#Override
public void onRtmpConnecting(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpConnected(String msg) {
Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoStreaming() {
}
#Override
public void onRtmpAudioStreaming() {
}
#Override
public void onRtmpStopped() {
Toast.makeText(getApplicationContext(), "Stopped", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpDisconnected() {
Toast.makeText(getApplicationContext(), "Disconnected", Toast.LENGTH_SHORT).show();
}
#Override
public void onRtmpVideoFpsChanged(double fps) {
Log.i(TAG, String.format("Output Fps: %f", fps));
}
#Override
public void onRtmpVideoBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Video bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Video bitrate: %d bps", rate));
}
}
#Override
public void onRtmpAudioBitrateChanged(double bitrate) {
int rate = (int) bitrate;
if (rate / 1000 > 0) {
Log.i(TAG, String.format("Audio bitrate: %f kbps", bitrate / 1000));
} else {
Log.i(TAG, String.format("Audio bitrate: %d bps", rate));
}
}
#Override
public void onRtmpSocketException(SocketException e) {
handleException(e);
}
#Override
public void onRtmpIOException(IOException e) {
handleException(e);
}
#Override
public void onRtmpIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
#Override
public void onRtmpIllegalStateException(IllegalStateException e) {
handleException(e);
}
// Implementation of SrsRecordHandler.
#Override
public void onRecordPause() {
Toast.makeText(getApplicationContext(), "Record paused", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordResume() {
Toast.makeText(getApplicationContext(), "Record resumed", Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordStarted(String msg) {
Toast.makeText(getApplicationContext(), "Recording file: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordFinished(String msg) {
Toast.makeText(getApplicationContext(), "MP4 file saved: " + msg, Toast.LENGTH_SHORT).show();
}
#Override
public void onRecordIOException(IOException e) {
handleException(e);
}
#Override
public void onRecordIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
// Implementation of SrsEncodeHandler.
#Override
public void onNetworkWeak() {
Toast.makeText(getApplicationContext(), "Network weak", Toast.LENGTH_SHORT).show();
}
#Override
public void onNetworkResume() {
Toast.makeText(getApplicationContext(), "Network resume", Toast.LENGTH_SHORT).show();
}
#Override
public void onEncodeIllegalArgumentException(IllegalArgumentException e) {
handleException(e);
}
}
I am not sure that this is the issue (a screenshot could have helped) but there might be a precision issue here (or even more in other places in the code):
mOutputAspectRatio = width > height ? (float) width / height : (float)
height / width;
Both width & height are integers. The result of their division is also an integer and hence it losses precision. I suggest you cast them both to float before the division:
mOutputAspectRatio = width > height ? ((float) width) / ((float)height) : ((float)
height) / ((float) width);
And the same for the computation of mInputAspectRatio.

why video is getting stretched in camera2 API?

I am building a video camera app which is always be in landscape mode and always record from front camera.
But when I am recording the video it looks like stretched.How to solve it?
Where I am making mistake??
public class MainActivity extends AppCompatActivity {
public static final int REQUEST_CAMERA_PERMISSION_RESULT = 0;
public static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private TextureView mTextureView;
//public ImageView mImageView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
//Toast.makeText(getApplicationContext(),"TextureView is Available",Toast.LENGTH_SHORT).show();
setupCamera(width, height);
connectCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback;
{
mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if (mIsRecording) {
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
#Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
//Toast.makeText(getApplicationContext(),"Camera connected",Toast.LENGTH_SHORT).show();
}
#Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
#Override
public void onError(CameraDevice camera, int i) {
}
};
}
private HandlerThread mBackgroundHandlerThread;
private Handler mBackgroundHandler;
private String mCameraId;
private Size mPreviewSize;
private Size mVideoSize;
private MediaRecorder mMediaRecorder;
private Chronometer mChronometer;
private ImageView thumb;
//private String V1, V2, V3, V4, V5;
// private Map<String, String> mapA = new HashMap<>();
// private ImageView[] IMGS = {mImageView1, mImageView2, mImageView3, mImageView4, mImageView5};
private int mTotalRotation;
private CaptureRequest.Builder mCaptureRequestBuilder;
public static int count;
public static int max = 5;
private ImageButton mRecordImageButton;
private boolean mIsRecording = false;
public static File mVideoFolder;
private static File mRawVideoFolder;
public static String mVideoFileName;
//Test
private List<Bitmap> bitMapsAvailable = new ArrayList<>();
private List<String> bitMapsFilePath = new ArrayList<>();
private int bitMapIndex;
CameraCaptureSession storedSession;
private ArrayAdapter bitMapAdapter;
private ArrayAdapter bitMapFileAdapter;
private static SparseIntArray ORIENTATIONS = new SparseIntArray();
public int index;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 0);
ORIENTATIONS.append(Surface.ROTATION_90, 90);
ORIENTATIONS.append(Surface.ROTATION_180, 180);
ORIENTATIONS.append(Surface.ROTATION_270, 270);
}
private static class CompareSizeByArea implements Comparator<Size> {
#Override
public int compare(Size lhs, Size rhs) {
return Long.signum((long) lhs.getWidth() * lhs.getHeight() / (long) rhs.getWidth() * rhs.getHeight());
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
createVideoFolder();
mMediaRecorder = new MediaRecorder();
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mTextureView = (TextureView) findViewById(R.id.textureView);
mRecordImageButton = (ImageButton) findViewById(R.id.videoButton);
mRecordImageButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mIsRecording) {
mChronometer.stop();
mChronometer.setVisibility(View.INVISIBLE);
mIsRecording = false;
mRecordImageButton.setImageResource(R.mipmap.start_recording);
//Toast.makeText(getApplicationContext(),"Started",Toast.LENGTH_SHORT).show();
if(storedSession != null){
try {
storedSession.stopRepeating();
storedSession.abortCaptures();
}catch (CameraAccessException e){
throw new RuntimeException(e.getMessage());
}
}
mMediaRecorder.stop();
mMediaRecorder.reset();
startPreview();
//Create bitmap with current video file path
Bitmap bitMap = ThumbnailUtils.createVideoThumbnail(mVideoFileName, MediaStore.Video.Thumbnails.MICRO_KIND);
//Add bitmap to array list
bitMapsAvailable.add(bitMap);
bitMapsFilePath.add(mVideoFileName);
// Shows thumbnails
showThumbnails();
} else {
checkWriteStoragePermission();
}
}
});
}
private void showThumbnails() {
LinearLayout layout = (LinearLayout) findViewById(R.id.thumbnails);
bitMapAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsAvailable);
bitMapFileAdapter = new ArrayAdapter(this, R.layout.activity_main, bitMapsFilePath);
bitMapIndex = 0;
if (layout.getChildCount() > 0) {
layout.removeAllViews();
}
for (Bitmap eachBitMap : bitMapsAvailable) {
bitMapIndex++;
ImageView thumb = new ImageView(this);
thumb.setId(new Integer(bitMapIndex+ 17));
thumb.setLayoutParams(new android.view.ViewGroup.LayoutParams(100, 80));
thumb.setImageBitmap(eachBitMap);
// Adds the view to the layout
thumb.setOnClickListener(previewThumb(thumb));
layout.addView(thumb);
}
}
View.OnClickListener previewThumb(final ImageView imageview) {
return new View.OnClickListener() {
public void onClick(View arg0) {
index = imageview.getId()-18;
imageview.setBackgroundColor(0xff999999);
// Start NewActivity.class
Intent myIntent = new Intent(MainActivity.this,
VideoViewActivity.class);
Bundle bundle = new Bundle();
bundle.putStringArrayList("bitMapsAvailable", new ArrayList(bitMapsAvailable));
bundle.putStringArrayList("bitMapsFilePath", new ArrayList(bitMapsFilePath));
//Add your data to bundle
bundle.putInt("urlIndex", index);
myIntent.putExtras(bundle);
bitMapAdapter.notifyDataSetChanged();
bitMapFileAdapter.notifyDataSetChanged();
// startActivity(myIntent);
startActivityForResult(myIntent, 111);
}
};
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
if (requestCode == 111) {
int indexToBeDeleted = data.getIntExtra("indexToBeDeleted",index);
//bitMapsFilePath.remove(indexToBeDeleted);
bitMapsAvailable.remove(indexToBeDeleted);
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
connectCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
#Override
public void onRequestPermissionsResult(int requestCode, String[] permission, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permission, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not run without camera service", Toast.LENGTH_SHORT).show();
}
if (grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(), "Application will not have audio on record ", Toast.LENGTH_SHORT).show();
}
}
if (requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
mIsRecording = true;
mRecordImageButton.setImageResource(R.mipmap.ic_launcher);
try {
createVideoFileName();
} catch (IOException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
Toast.makeText(this, "Permission Successfully Granted", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "App needs to save video to run", Toast.LENGTH_SHORT).show();
}
}
}
#Override
protected void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
View decorView = getWindow().getDecorView();
if (hasFocus) {
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
}
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotateWidth = width;
int rotateHeight = height;
if (swapRotation) {
rotateWidth = height;
rotateHeight = width;
}
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotateWidth, rotateHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotateWidth, rotateHeight);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this, "Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[]{android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void startRecord() {
if (this.bitMapsAvailable.size() < max) {
try {
setupMediaRecorder();
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, null);
storedSession = session;
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
} else {
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.stopRepeating();
session.abortCaptures();
session.setRepeatingRequest(mCaptureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(getApplicationContext(), "Unable to setup Camera Preview", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private void closeCamera() {
if (mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void startBackgroundThread() {
mBackgroundHandlerThread = new HandlerThread("AuthorTV");
mBackgroundHandlerThread.start();
mBackgroundHandler = new Handler(mBackgroundHandlerThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundHandlerThread.quitSafely();
try {
mBackgroundHandlerThread.join();
mBackgroundHandlerThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
private static int sensorToDeviceRotation(CameraCharacteristics cameraCharacteristics) {
int sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// deviceOrientation = ORIENTATIONS.get(deviceOrientation);
return (sensorOrientation + 180 + 360) % 360;
}
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
List<Size> bigEnough = new ArrayList<Size>();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * height / width && option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizeByArea());
} else {
return choices[0];
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setOutputFile(mVideoFileName);
mMediaRecorder.setVideoEncodingBitRate(1000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mMediaRecorder.setOrientationHint(mTotalRotation);
mMediaRecorder.prepare();
}
chooseOptimalSize needs to be adjusted and different for mPreviewSize and mVideoSize. Check this link Camera2VideoFragment.java

Tap and hold to record video like Vine

I want to make an app that record video, it seems like vine, hold to record, release it stop, hold to record and keep that to the end.
I have used MediaRecorder, but it just record once a time, if I start record again, app is crashed.
Please tell me there is any way to do this?
I edited my code:
public class VideoRecordingActivity extends AppCompatActivity implements View.OnTouchListener, View.OnLongClickListener {
private Context myContext;
private boolean hasCamera;
private boolean onRecording;
private Camera mCamera;
private CameraPreview mPreview;
private MediaRecorder mediaRecorder;
private boolean cameraFront = false;
private int cameraId;
private int videoNumer;
private boolean isActionDown = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_introduction_recording);
initUI();
initialize();
}
private LinearLayout lnCameraPreview;
private ImageButton btn_recording;
private void initUI() {
lnCameraPreview = (LinearLayout) findViewById(R.id.ln_body_recording);
btn_recording = (ImageButton) findViewById(R.id.btn_recording);
}
public void initialize() {
myContext = this;
mPreview = new CameraPreview(this, cameraId, mCamera);
lnCameraPreview.addView(mPreview);
btn_recording.setOnLongClickListener(this);
btn_recording.setOnTouchListener(this);
videoNumer = 0;
}
public boolean onLongClick(View v) {
isActionDown = true;
try {
boolean isPrepared = false;
if (isActionDown)
isPrepared = prepareMediaRecorder();
if (isPrepared && isActionDown) {
// work on UiThread for better performance
runOnUiThread(new Runnable() {
public void run() {
mediaRecorder.start();
onRecording = true;
}
});
}
} catch (Exception e) {
e.printStackTrace();
Log.e("onLongPress Error ", e.toString());
}
return true;
}
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
isActionDown = false;
try {
if (onRecording) {
if (mediaRecorder != null) {
mediaRecorder.stop();
}
onRecording = false;
videoNumer++;
}
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
break;
}
return false;
}
public void onResume() {
super.onResume();
if (!hasCamera(myContext)) {
Toast.makeText(myContext, "Sorry, your phone does not have a camera!", Toast.LENGTH_LONG).show();
return;
}
initCamera();
}
#Override
protected void onPause() {
super.onPause();
// when on Pause, release camera in order to be used from other
// applications
releaseCamera();
}
private final int cMaxRecordDurationInMs = 30000;
private final long cMaxFileSizeInBytes = 5000000;
private final int cFrameRate = 20;
private File prRecordedFile;
#SuppressLint("SdCardPath")
private boolean prepareMediaRecorder() {
mediaRecorder = new MediaRecorder();
try {
mCamera.unlock();
} catch (Exception ex) {
return false;
}
// adjust the camera the way you need
mediaRecorder.setCamera(mCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
//
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
mediaRecorder.setPreviewDisplay(mPreview.getHolder().getSurface());
mediaRecorder.setOutputFile("/sdcard/" + videoNumer + "videocapture_example.mp4");
//set max size
mediaRecorder.setMaxDuration(600000); // Set max duration 60 sec.
mediaRecorder.setMaxFileSize(50000000); // Set max file size 50M
try {
mediaRecorder.prepare();
} catch (Exception e) {
releaseMediaRecorder();
e.printStackTrace();
}
return true;
}
private void releaseMediaRecorder() {
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
if (mCamera != null) {
mCamera.lock(); // lock camera for later use
}
}
}
/**
* Camera
*/
private void initCamera() {
if (mCamera == null) {
// if the front facing camera does not exist
if (findFrontFacingCamera() < 0) {
Toast.makeText(this, "No front facing camera found.", Toast.LENGTH_LONG).show();
}
mCamera = Camera.open(findBackFacingCamera());
mPreview.refreshCamera(mCamera);
}
onRecording = false;
}
private boolean hasCamera(Context context) {
// check if the device has camera
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
hasCamera = true;
} else {
hasCamera = false;
}
return hasCamera;
}
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
cameraFront = true;
break;
}
}
this.cameraId = cameraId;
return cameraId;
}
private int findBackFacingCamera() {
int cameraId = -1;
// Search for the back facing camera
// get the number of cameras
int numberOfCameras = Camera.getNumberOfCameras();
// for every camera check
for (int i = 0; i < numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
cameraId = i;
cameraFront = false;
break;
}
}
this.cameraId = cameraId;
return cameraId;
}
public void switchCamera() {
// if the camera preview is the front
if (cameraFront) {
int cameraId = findBackFacingCamera();
if (cameraId >= 0) {
// open the backFacingCamera
mCamera = Camera.open(cameraId);
// refresh the preview
mPreview.refreshCamera(mCamera);
}
} else {
int cameraId = findFrontFacingCamera();
if (cameraId >= 0) {
// open the backFacingCamera
mCamera = Camera.open(cameraId);
// refresh the preview
mPreview.refreshCamera(mCamera);
}
}
}
private void releaseCamera() {
// stop and release camera
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
You can do achieve this functionality by setting OnLongClickListener() and OnTouchListener() on your record button. Like this:
recordBtn.setOnLongClickListener(recordBtnLCListener);
recordBtn.setOnTouchListener(recordBtnTouchListener);
then :
#Override
public boolean onLongClick(View v) {
ivCancel.setVisibility(View.GONE);
ivDone.setVisibility(View.GONE);
isActionDown = true;
try {
if (isActionDown) {
initRecorder();
if (isActionDown)
prepareRecorder();
}
if (isPrepared && isActionDown) {
mMediaRecorder.start();
isRecording = true;
}
} catch (Exception e) {
e.printStackTrace();
Log.e("onLongPress Error ", e.toString());
}
return true;
}
and :
#Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
isActionDown = false;
try {
if (isRecording) {
if (mMediaRecorder != null) {
mMediaRecorder.stop();
}
isRecording = false;
}
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
break;
}
return false;
}
So, in this way you can record the parts of video.Means each time you LongPress your record button, the recording starts. And time you release the button, the recording stops and here you have to save this part of video in any temporary folder.
Once you done taking all parts of videos as many as you want, then you have to combine all that parts of videos to make a single video.
Here, is the code to merge all that video parts saved in temperory folder:
public void mergeVideos() {
try {
List<Movie> inMovies = new ArrayList<>();
for (int i = 0; i < videosPathList.size(); i++) {
String filePath = videosPathList.get(i);
try {
Movie movie = MovieCreator.build(filePath);
if (movie != null)
inMovies.add(movie);
} catch (Exception e) {
e.printStackTrace();
}
}
List<Track> videoTracks = new LinkedList<Track>();
List<Track> audioTracks = new LinkedList<Track>();
for (Movie m : inMovies) {
for (Track t : m.getTracks()) {
try {
if (t.getHandler().equals("soun")) {
audioTracks.add(t);
}
if (t.getHandler().equals("vide")) {
videoTracks.add(t);
}
} catch (Exception e) {
}
}
}
Movie result = new Movie();
if (audioTracks.size() > 0) {
result.addTrack(new AppendTrack(audioTracks
.toArray(new Track[audioTracks.size()])));
}
if (videoTracks.size() > 0) {
result.addTrack(new AppendTrack(videoTracks
.toArray(new Track[videoTracks.size()])));
}
BasicContainer out = (BasicContainer) new DefaultMp4Builder().build(result);
File f = null;
String finalVideoPath;
try {
f = setUpVideoFile(Environment
.getExternalStorageDirectory()+"/MyApp/videos/");
finalVideoPath = f.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
f = null;
finalVideoPath = null;
}
WritableByteChannel fc = new RandomAccessFile(finalVideoPath, "rw").getChannel();
out.writeContainer(fc);
fc.close();
deleteFilesDir(); //In this method you have to delete all parts of video stored in temporary folder.
} catch (Exception e) {
e.printStackTrace();
progressDialog.dismiss();
finish();
}
}
File setUpVideoFile(String directory) throws IOException {
File videoFile = null;
if (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState())) {
File storageDir = new File(directory);
if (storageDir != null) {
if (!storageDir.mkdirs()) {
if (!storageDir.exists()) {
Log.d("CameraSample", "failed to create directory");
return null;
}
}
}
videoFile = File.createTempFile("video_"
+ System.currentTimeMillis() + "_",
.mp4, storageDir);
}
return videoFile;
}
You can call mergeVideos() method after stopping mediaRecorder.
Hope this code helps you. :)
For merging the videos you have to use the isoparser library. So you have to add following dependendy in your gradle file :
compile 'com.googlecode.mp4parser:isoparser:1.0.5.4'
This is my code.
public class VideoRecordingActivity extends AppCompatActivity implements View.OnClickListener, SurfaceHolder.Callback {
MediaRecorder recorder;
SurfaceHolder holder;
boolean recording = false;
private boolean isPrepared = false;
int videoNumber = 0;
#Override
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
super.onCreate(savedInstanceState);
recorder = new MediaRecorder();
initRecorder();
setContentView(R.layout.activity_video_introduction_recording);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.ln_body_recording);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraView.setClickable(true);
cameraView.setOnClickListener(this);
}
private void initRecorder() {
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "CameraSample");
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile cpHigh = CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US).format(new Date());
//CGlobal.VIDEO_RECORD_PATH = CGlobal.VIDEO_HOME_PATH + "VID_" + timeStamp;
File mediaFile;
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_"+ timeStamp + ".mp4");
recorder.setOutputFile(mediaFile+".mp4");
recorder.setMaxDuration(50000); // 50 seconds
recorder.setMaxFileSize(5000000); // Approximately 5 megabytes
}
private void prepareRecorder() {
recorder.setPreviewDisplay(holder.getSurface());
try {
recorder.prepare();
isPrepared = true;
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
public void onClick(View v) {
if (recording) {
recorder.stop();
recording = false;
isPrepared = false;
videoNumber++;
// Let's initRecorder so we can record again
} else {
if (!isPrepared){
initRecorder();
prepareRecorder();
}
recording = true;
recorder.start();
}
}
public void surfaceCreated(SurfaceHolder holder) {
prepareRecorder();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (recording) {
recorder.stop();
recording = false;
}
recorder.release();
finish();
}
}

android camera start activity after picture taken

I am creating small app which previews camera and when clicking on screen saves image.
My problem is when image is saved, I want to start different activity which will do preview and have some tools on it. But what ever I do activity doesn't start.
Here is my code
public class CameraActivity extends Activity {
private SurfaceView preview = null;
private SurfaceHolder previewHolder = null;
private Camera camera = null;
private boolean inPreview = false;
private boolean cameraConfigured = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera);
preview = (SurfaceView) findViewById(R.id.preview);
Button button = (Button) findViewById(R.id.button_capture);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (camera != null) {
camera.takePicture(null, null, photoCallback);
onPause();
Intent intent = new Intent(CameraActivity.this, PreviewAndSaveActivity.class);
intent.putExtra("image", "");
CameraActivity.this.startActivity(intent);
}
}
});
previewHolder = preview.getHolder();
previewHolder.addCallback(surfaceCallback);
previewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
Camera.CameraInfo info = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
camera = Camera.open(i);
}
}
}
if (camera == null) {
camera = Camera.open();
}
startPreview();
}
#Override
public void onPause() {
if (camera != null) {
if (inPreview) {
camera.stopPreview();
}
camera.release();
camera = null;
}
inPreview = false;
super.onPause();
}
private Camera.Size getBestPreviewSize(int width, int height,
Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if (size.width <= width && size.height <= height) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
result = size;
}
}
}
}
return (result);
}
private Camera.Size getSmallestPictureSize(Camera.Parameters parameters) {
Camera.Size result = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
if (result == null) {
result = size;
} else {
int resultArea = result.width * result.height;
int newArea = size.width * size.height;
if (newArea < resultArea) {
result = size;
}
}
}
return (result);
}
private void initPreview(int width, int height) {
if (camera != null && previewHolder.getSurface() != null) {
try {
camera.setPreviewDisplay(previewHolder);
} catch (Throwable t) {
Log.e("PreviewDemo-surfaceCallback",
"Exception in setPreviewDisplay()", t);
Toast.makeText(CameraActivity.this, t.getMessage(),
Toast.LENGTH_LONG).show();
}
if (!cameraConfigured) {
Camera.Parameters parameters = camera.getParameters();
Camera.Size size = getBestPreviewSize(width, height, parameters);
Camera.Size pictureSize = getSmallestPictureSize(parameters);
if (size != null && pictureSize != null) {
parameters.setPreviewSize(size.width, size.height);
parameters.setPictureSize(pictureSize.width,
pictureSize.height);
parameters.setPictureFormat(ImageFormat.JPEG);
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
camera.setParameters(parameters);
cameraConfigured = true;
}
}
}
}
private void startPreview() {
if (cameraConfigured && camera != null) {
camera.startPreview();
inPreview = true;
}
}
SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
// no-op -- wait until surfaceChanged()
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
initPreview(width, height);
startPreview();
}
public void surfaceDestroyed(SurfaceHolder holder) {
}
};
Camera.PictureCallback photoCallback = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
camera.takePicture(null, null, new PhotoHandler(
getApplicationContext()));
camera.startPreview();
inPreview = true;
}
};
}
And in PhotoHandler after decoding and saving image I want to start activity
public class PhotoHandler implements PictureCallback {
private final Context context;
public PhotoHandler(Context context) {
this.context = context;
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Log.d("", "Can't create directory to save image.");
Toast.makeText(context, "Can't create directory to save image.",
Toast.LENGTH_LONG).show();
return;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "pic_" + date + ".jpg";
String pictureName = pictureFileDir.getPath() + File.separator
+ photoFile;
File pictureFile = new File(pictureName);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(pictureFile);
fos.write(data);
Intent intent = new Intent(context, PreviewAndSaveActivity.class);
intent.putExtra("image", pictureName);
context.startActivity(intent);
} catch (Exception error) {
Toast.makeText(context, "Image could not be saved.",
Toast.LENGTH_LONG).show();
} finally {
if (fos != null) {
try {
fos.flush();
fos.close();
} catch (IOException e) {
}
}
}
}
private File getDir() {
File sdDir = Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
return new File(sdDir, "dir");
}
}
Any suggestion what am I doing wrong?
try this code :
(Replace your button click listener )
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (camera != null) {
camera.takePicture(null, null, photoCallback);
onPause();
Intent intent = new Intent(CameraActivity.this, PreviewAndSaveActivity.class);
intent.putExtra("image", "");
CameraActivity.this.startActivityForResult(intent, 100);
}
}
});
In your CameraActivity add this callback method to redirect on another activity after image save :
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 100) {
// start you new activity from here...
}
super.onActivityResult(requestCode, resultCode, data);
}

Categories

Resources