How to set a ThumbnailView of picture gallery in Android - android

I have developed a camera app using the latest camera2 API ,i want to set a thumbnail view of gallery on the screen ,like the camera in jellybean and ICS.Can anybody help me ?? how to do that .??
XML
<TextureView
android:id="#+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentStart="true"
android:layout_alignParentTop="true" />
<Button
android:id="#+id/btn_takepicture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:text="picture" />
</RelativeLayout>
java
public class SecondActivity extends Activity {
private final static String TAG = "Camera2testJ";
private Size mPreviewSize;
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Button mBtnShot;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_second);
mTextureView = (TextureView)findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mBtnShot = (Button)findViewById(R.id.btn_takepicture);
mBtnShot.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.e(TAG, "mBtnShot clicked");
takePicture();
}
});
}
protected void takePicture() {
Log.e(TAG, "takePicture");
if(null == mCameraDevice) {
Log.e(TAG, "mCameraDevice is null, return");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(Environment.getExternalStorageDirectory()+"/DCIM", "pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(SecondActivity.this, "Saved:"+file, Toast.LENGTH_SHORT).show();
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
Log.e(TAG, "onResume");
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.e(TAG, "openCamera E");
try {
String cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[0];
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e(TAG, "openCamera X");
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener(){
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface,
int width, int height) {
Log.e(TAG, "onSurfaceTextureSizeChanged");
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Log.e(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
Log.e(TAG, "onDisconnected");
}
#Override
public void onError(CameraDevice camera, int error) {
Log.e(TAG, "onError");
}
};
#Override
protected void onPause() {
Log.e(TAG, "onPause");
super.onPause();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
protected void startPreview() {
if(null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "startPreview fail, return");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG,"texture is null, return");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(SecondActivity.this, "onConfigureFailed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}

Related

Android textureView stretched

SurfaceTexture preview is stretched! I'm developing a camera2 app. Every device is working bad. The preview for video and photo is stretched.
public class CamActivity extends AppCompatActivity {
private static final int REQUEST_LOCATION = 1;
LocationManager locationManager;
Animation animFadeOut, textAnimFadeOut;
ImageView viewFadeOut;
TextView textFadeOut, Lado;
private TextureView textureView;
private FloatingActionButton btnCapture;
//Check state orientation of output image
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0,0);
}
private String cameraId;
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSessions;
private CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
//Save to FILE
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice=null;
}
};
#SuppressLint("WrongViewCast")
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_cam);
ActivityCompat.requestPermissions(this, new String[]{android.Manifest.permission.ACCESS_FINE_LOCATION}, REQUEST_LOCATION);
Lado = (TextView)findViewById(R.id.lado_armadilha);
Lado.setText(getIntent().getStringExtra("lado_armadilha"));
viewFadeOut = (ImageView)findViewById(R.id.viewFadeOut);
animFadeOut = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.fade);
viewFadeOut.startAnimation(animFadeOut);
textFadeOut =(TextView)findViewById(R.id.textFadeOut);
textAnimFadeOut = AnimationUtils.loadAnimation(getApplicationContext(),R.anim.fade_text);
textFadeOut.startAnimation(textAnimFadeOut);
textureView = (TextureView)findViewById(R.id.textureView);
//From Java 1.4 , you can use keyword 'assert' to check expression true or false
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
btnCapture = (FloatingActionButton) findViewById(R.id.btnCapture);
btnCapture.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
takePicture();
}
});
}
private void takePicture() {
if(cameraDevice == null)
return;
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if(characteristics != null)
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
Log.i("JPEG_SIZE", "" + jpegSizes[0].getWidth());
Log.i("JPEG_SIZE", "" + jpegSizes[0].getHeight());
//Capture image with custom size
int width = jpegSizes[0].getWidth();
int height =jpegSizes[0].getHeight();
if(jpegSizes == null && jpegSizes.length < 0)
{
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width,height,ImageFormat.JPEG,1);
List<Surface> outputSurface = new ArrayList<>(2);
outputSurface.add(reader.getSurface());
outputSurface.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
//Check orientation base on device
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,ORIENTATIONS.get(rotation));
file = new File(getIntent().getStringExtra(MediaStore.EXTRA_OUTPUT));
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
try {
image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
Log.e("ioexception", e.getMessage());
}
finally {
if(image != null)
image.close();
}
Intent intent = new Intent();
intent.putExtra("Bitmap",file.getAbsolutePath());
setResult(1,intent);
finish();
}
private void save(byte[] bytes) throws IOException {
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
} finally {
if(outputStream != null)
outputStream.close();
}
}
};
reader.setOnImageAvailableListener(readerListener,mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(CamActivity.this, "Saved "+file, Toast.LENGTH_SHORT).show();
}
};
cameraDevice.createCaptureSession(outputSurface, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
try {
cameraCaptureSession.capture(captureBuilder.build(),captureListener,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
}
},mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
if (cameraDevice == null)
return;
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(CamActivity.this, "Changed", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(cameraDevice == null)
Toast.makeText(this, "Error", Toast.LENGTH_SHORT).show();
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE,CaptureRequest.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(),null,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
try {
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
//Check realtime permission if run higher API 23
if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(this,new String[]{
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
},REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId,stateCallback,null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if(requestCode == REQUEST_CAMERA_PERMISSION)
{
if(grantResults[0] != PackageManager.PERMISSION_GRANTED)
{
Toast.makeText(this, "You can't use camera without permission", Toast.LENGTH_SHORT).show();
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if(textureView.isAvailable())
openCamera();
else
textureView.setSurfaceTextureListener(textureListener);
}
#Override
protected void onPause() {
stopBackgroundThread();
super.onPause();
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread= null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
}
A TextureView will by default just stretch whatever images you give it into its view dimensions. So if you give it a 1920x1080 buffer (16:9 aspect ratio) but its dimensions are 1600x1200 (4:3), you'll get a vertically squished image shown.
Take a look at the AutoFitTextureView in the Camera2Basic sample from Google.
It tries to ensure that its aspect ratio matches that of the camera, but it does require you to set its layout parameters correctly (and it has to be in a layout container that allows its children some flexibility in sizing).

How to handle camera flash android camera2 API?

I want to set flash off,on and auto as user selection. How should I do that?
In this code if I enable flash it is on but when I click image flash goes off. And when I change camera at that time also flash goes off.
If the user sets flash to enable then when the user clicks an image only at that time flash should go on and after that it should be off.
public class MainActivity extends AppCompatActivity {
protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest.Builder captureRequestBuilder;
private boolean deviceHasFlash;
private boolean flashStatus=false;
private String cameraId=CAMERA_BACK;
private Size imageDimension;
private ImageReader imageReader;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
private static final int PERMISSION_CODE = 200;
public static final String CAMERA_FRONT = "1";
public static final String CAMERA_BACK = "0";
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
ImageView captureImage;
ImageView flash;
ImageView switchCamera;
private TextureView textureView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
captureImage = findViewById(R.id.btn_camera_shutter);
textureView = (TextureView) findViewById(R.id.textureView);
flash=findViewById(R.id.btn_flash);
switchCamera=findViewById(R.id.btn_camera_switch);
if (checkPermission()) {
setCameraView();
createFolderIfNotExists();
Log.e("DB", "Permission is Given");
} else {
Log.e("DB", "Ask For Permission");
requestPermission();
}
captureImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
takePicture();
}
});
switchCamera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
switchCameraFunction();
}
});
flash.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
deviceHasFlash = getApplication().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
if(!deviceHasFlash){
Toast.makeText(MainActivity.this, "Sorry, you device does not have any camera", Toast.LENGTH_LONG).show();
}
else {
if(flashStatus){
flashOff();
flash.setImageResource(R.drawable.ic_flash_off);
flashStatus=false;
}
else {
flashOn();
flash.setImageResource(R.drawable.ic_flash_on);
flashStatus=true;
}
}
}
});
}
private void switchCameraFunction() {
if (cameraId.equals(CAMERA_FRONT)) {
cameraId = CAMERA_BACK;
closeCamera();
reopenCamera();
} else if (cameraId.equals(CAMERA_BACK)) {
cameraId = CAMERA_FRONT;
closeCamera();
reopenCamera();
}
}
private void reopenCamera() {
if (textureView.isAvailable()) {
Log.e("DB", "reopenCamera if");
openCamera();
} else {
Log.e("DB", "reopenCamera else");
textureView.setSurfaceTextureListener(textureListener);
}
}
private void flashOff() {
captureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void flashOn() {
captureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setCameraView() {
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
}
private void createFolderIfNotExists() {
File folder = new File(Environment.getExternalStorageDirectory() +
File.separator + "CrazyCam");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdirs();
if (success) {
Log.e("DB", "Folder Created");
} else {
Log.e("DB", "Something Wrong");
}
} else {
Log.e("DB", "Folder exists");
}
}
private boolean checkPermission() {
int readExternal = ContextCompat.checkSelfPermission(getApplicationContext(), READ_EXTERNAL_STORAGE);
int writeExternal = ContextCompat.checkSelfPermission(getApplicationContext(), WRITE_EXTERNAL_STORAGE);
int camera = ContextCompat.checkSelfPermission(getApplicationContext(), CAMERA);
return camera == PackageManager.PERMISSION_GRANTED && writeExternal == PackageManager.PERMISSION_GRANTED && readExternal == PackageManager.PERMISSION_GRANTED;
}
private void requestPermission() {
ActivityCompat.requestPermissions(this, new String[]{WRITE_EXTERNAL_STORAGE, CAMERA, READ_EXTERNAL_STORAGE}, PERMISSION_CODE);
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
switch (requestCode) {
case PERMISSION_CODE:
if (grantResults.length > 0) {
boolean wrireExteernal = grantResults[0] == PackageManager.PERMISSION_GRANTED;
boolean cameraAccepted = grantResults[1] == PackageManager.PERMISSION_GRANTED;
boolean readExternal = grantResults[1] == PackageManager.PERMISSION_GRANTED;
if (wrireExteernal && cameraAccepted && readExternal) {
setCameraView();
createFolderIfNotExists();
Log.e("DB", "Permission is Given");
} else {
Toast.makeText(getApplicationContext(), "All Permission Requeid", Toast.LENGTH_LONG).show();
System.exit(0);
}
}
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.e("DB", "is camera open");
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
Log.e("DB","imageDimension===>"+imageDimension);
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSION_CODE);
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e("DB", "openCamera X");
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
openCamera();
Log.e("DB","onSurfaceTextureAvailable");
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
//This is called when the camera is open
Log.e("DB", "onOpened");
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
}
#Override
public void onError(#NonNull CameraDevice cameraDevice, int i) {
cameraDevice.close();
cameraDevice = null;
}
};
/* #Override
protected void onDestroy() {
closeCamera();
Log.e("DB","On Destroy Call");
super.onDestroy();
}
*/
private void closeCamera() {
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}
protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
#Override
protected void onResume() {
super.onResume();
Log.e("DB", "onResume");
startBackgroundThread();
if (textureView.isAvailable()) {
Log.e("DB", "textureView if");
openCamera();
} else {
Log.e("DB", "textureView else");
textureView.setSurfaceTextureListener(textureListener);
}
}
#Override
protected void onPause() {
Log.e("DB", "onPause");
closeCamera();
stopBackgroundThread();
super.onPause();
}
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
if(null == cameraDevice) {
Log.e("DB", "updatePreview error, return");
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void takePicture() {
if(null == cameraDevice) {
Log.e("DB", "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
captureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
for(int i=0;i<jpegSizes.length;i++){
Log.e("DB==>","Size:"+jpegSizes[i]);
}
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File folder = new File(Environment.getExternalStorageDirectory() +
File.separator + "CrazyCam");
String fileName = String.format("CrazyCam_" + System.currentTimeMillis() + ".jpg");
final File outFile = new File(folder, fileName);
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
FileOutputStream outStream = null;
try {
outStream = new FileOutputStream(outFile);
outStream.write(bytes);
outStream.flush();
} finally {
if (null != outStream) {
outStream.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(getApplicationContext(), "Image Save at " + folder, Toast.LENGTH_LONG).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}

Flash mode selected but camera2 wont capture image with those flash modes

I am working on an application that involves a manual camera in which it has its own flash button it rotates through from OFF to ON to AUTO. I am calling a switch statement with an onClickListener to do this.
private void setFlash() {
switch (mFlashMode) {
case CONTROL_AE_MODE_OFF:
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
useFlashButton.setImageResource(R.drawable.flash_on);
mFlashMode = CONTROL_AE_MODE_ON;
break;
case CONTROL_AE_MODE_ON:
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
useFlashButton.setImageResource(R.drawable.flash_automatic);
mFlashMode = CONTROL_AE_MODE_ON_AUTO_FLASH;
break;
case CONTROL_AE_MODE_ON_AUTO_FLASH:
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF);
useFlashButton.setImageResource(R.drawable.flash_off);
mFlashMode = CONTROL_AE_MODE_OFF;
break;
}
}
So what I need to do now is depending on what flash mode is selected, the camera will capture appropriately. at the moment no matter what mode I select, no flash happens.
what can I do?
more code to help.
protected void onImageCaptureClick() {
if (null == mCameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(_pictureUri.getPath());
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
//byte[] data;
#Override
public void onCaptureCompleted(#NonNull CameraCaptureSession session, #NonNull CaptureRequest request, #NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Intent intent = new Intent (CameraActivity.this, CameraReviewPhotoActivity.class);
intent.putExtra (MediaStore.EXTRA_OUTPUT, _pictureUri);
startActivityForResult (intent, CameraActivity.kRequest_Code_Approve_Image);
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void runPrecaptureSequence() {
try {
setFlash();
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mState = STATE_WAITING_PRE_CAPTURE;
mCameraCaptureSessions.capture(mCaptureRequestBuilder.build(), mCaptureCallback,
mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
yes, it works for me. here is is my code:
public class AndroidCameraApi extends AppCompatActivity {
private static final String TAG = "AndroidCameraApi";
private Button takePictureButton;
private TextureView textureView;
private Button flashButton;
private CaptureRequest.Builder captureBuilder;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraId;
protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private boolean flash_on=false;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textureView = (TextureView) findViewById(R.id.texture);
flashButton=(Button)findViewById(R.id.button);
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
takePictureButton = (Button) findViewById(R.id.btn_takepicture);
assert takePictureButton != null;
flashButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
if(!flash_on){
flash_on=true;
}else{
flash_on=false;
}
}
});
takePictureButton.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//open your camera here
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Transform you image captured size according to the surface width and height
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
//This is called when the camera is open
Log.e(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
cameraDevice.close();
}
#Override
public void onError(CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void takePicture() {
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
//THIS YOU HAVE TO PUT YOUR METOD setFlash(); be shore your swith metos works i use just 2 states "Flash on" and "Flash off" you can add "Flash Auto"
if(flash_on){
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
captureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_SINGLE);
}else{
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
captureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(Environment.getExternalStorageDirectory()+"/pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback(){
#Override
public void onConfigured(#NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(AndroidCameraApi.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.e(TAG, "is camera open");
try {
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(AndroidCameraApi.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e(TAG, "openCamera X");
}
public void updatePreview() {
if(null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
// close the app
Toast.makeText(AndroidCameraApi.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show();
finish();
}
}
}
#Override
protected void onResume() {
super.onResume();
Log.e(TAG, "onResume");
startBackgroundThread();
if (textureView.isAvailable()) {
openCamera();
} else {
textureView.setSurfaceTextureListener(textureListener);
}
}
#Override
protected void onPause() {
Log.e(TAG, "onPause");
//closeCamera();
stopBackgroundThread();
super.onPause();
}
#Override
protected void onDestroy() {
closeCamera();
super.onDestroy();
}
}
try it and my be you finde what is wrong with your code
layout activity_main.xml:
for take on flash you can use this :`
mCaptureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
and for take off flash:
mCaptureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
hope it helps.
`
i think this will work.
first change your metod setFlash() like this:
private void setFlash(CaptureRequest.Builder cuptureBuilder) {
switch (mFlashMode) {
case CONTROL_AE_MODE_OFF:
//Flash ON
cuptureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
cuptureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_SINGLE);
useFlashButton.setImageResource(R.drawable.flash_on);
mFlashMode = CONTROL_AE_MODE_ON;
break;
case CONTROL_AE_MODE_ON:
//FLASH AUTO
cuptureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
cuptureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
useFlashButton.setImageResource(R.drawable.flash_automatic);
mFlashMode = CONTROL_AE_MODE_ON_AUTO_FLASH;
break;
case CONTROL_AE_MODE_ON_AUTO_FLASH:
//FLASH OFF
cuptureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
cuptureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
useFlashButton.setImageResource(R.drawable.flash_off);
mFlashMode = CONTROL_AE_MODE_OFF;
break;
}
}
than put youre setFlash() metod like this:
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
// Use the same AE and AF modes as the preview.
//***PUT YOURE METOD setFlash(cuptureBuilder) HERE***
setFlash(cuptureBuilder);
captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
i test it and it work fine:)hope it help
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
>
<TextureView
android:id="#+id/texture"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"/>
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="fill_parent"
android:gravity="bottom">
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center_horizontal">
<Button
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="flash mode"
android:id="#+id/button"
android:layout_alignParentBottom="true"
android:layout_alignParentEnd="true"
android:layout_weight="1" />
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center_horizontal">
<Button
android:id="#+id/btn_takepicture"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="#string/take_picture"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:layout_weight="1" />
</LinearLayout>
</LinearLayout>
</LinearLayout>
</RelativeLayout>

Poor camera2 video recording performance

I'm attempting to record video via a mediarecorder and the camera2 API and I'm having some difficulty getting good quality recordings out of it.
I am also getting a few different errors in my logcat like "App passed a NULL surface" when starting up the textureview.
Other than that it seems to work, but the captured videos are zoomed in and have a very low framerate ~2 fps.
This is the code that I'm working with:
public class VideoTest extends AppCompatActivity {
private TextureView mTextureView;
private CameraDevice mCameraDevice;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Size mPreviewSize;
private Handler backgroundHandler;
private HandlerThread thread;
private MediaRecorder mMediaRecorder;
private String mVideoPath;
private boolean mIsRecordingVideo;
private static final String TAG = "VideoTest";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_test);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
mMediaRecorder = new MediaRecorder();
}
#Override
public void onResume() {
super.onResume();
registerReceiver(buttonReceiver, new IntentFilter("ACTION_PRESSED"));
}
#Override
public void onPause() {
Log.d(TAG, "onPause");
super.onPause();
closeCamera();
stopBackgroundThread();
}
private BroadcastReceiver buttonReceiver = new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "Got Button Press!");
try {
if (mIsRecordingVideo) {
i2cRequest(IndicatorControlReceiver.INDICATOR_OFF);
stopRecordingVideo();
} else {
i2cRequest(IndicatorControlReceiver.INDICATOR_ON);
startRecordingVideo();
}
} catch (Exception ex) {
Log.d(TAG, "ERROR BLAH CAMERA SUX");
}
}
};
private void openCamera() {
CameraManager camManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.d(TAG, "Opening Camera");
try {
String camId = camManager.getCameraIdList()[0];
CameraCharacteristics cameraChars = camManager.getCameraCharacteristics(camId);
StreamConfigurationMap map = cameraChars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[14];
camManager.openCamera(camId, cameraStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable, width="+width+",height="+height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
//Log.e(TAG, "onSurfaceTextureSizeChanged");
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return true;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
//Log.e(TAG, "onSurfaceTextureUpdated");
}
};
private CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.d(TAG, "onOpened");
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.d(TAG, "onDisconnected");
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "onError code: " + error);
}
};
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Error Starting Preview. ABORTED!");
return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
if(null == texture) {
Log.e(TAG, "Cannot create texture. ABORTED!");
return;
}
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Collections.singletonList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
i2cRequest(I2CRequestReceiver.VIDEO_READY);
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "onConfigureFailed");
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void updatePreview() {
if(null == mCameraDevice) {
Log.e(TAG, "Camera Device is Null! ABORT!");
return;
}
/* mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,CameraMetadata.CONTROL_AF_MODE_OFF);*/
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30000,30000));
thread = new HandlerThread("CameraPreview");
thread.start();
backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Stops the background thread and its {#link Handler}.
*/
private void stopBackgroundThread() {
thread.quitSafely();
try {
thread.join();
thread = null;
backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (null != mPreviewSession) {
mPreviewSession.close();
}
}
private void closeCamera(){
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
}
private void setupMediaRecorder() throws IOException {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
/*
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
*/
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P);
profile.audioBitRate = 128000;
profile.audioCodec = MediaRecorder.AudioEncoder.AAC;
profile.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2048000;
profile.videoFrameRate = 30;
mMediaRecorder.setProfile(profile);
/* mMediaRecorder.setVideoEncodingBitRate(2048000);
mMediaRecorder.setAudioEncodingBitRate(128000);
mMediaRecorder.setVideoSize(1280, 720);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);*/
if (mVideoPath == null || mVideoPath.isEmpty()) {
mVideoPath = getVideoFilePath();
}
mMediaRecorder.setOutputFile(mVideoPath);
mMediaRecorder.prepare();
}
#SuppressLint("SdCardPath")
private String getVideoFilePath() {
return "/sdcard/LIVE/video/" + System.currentTimeMillis() + ".mp4";
}
Surface recorderSurface;
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
Log.e(TAG, "Cannot bind camera, textureView, or previewSize");
return;
}
try {
closePreviewSession();
setupMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture!= null;
texture.setDefaultBufferSize(1280, 720);
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
new Thread(new Runnable() {
#Override
public void run() {
i2cRequest(I2CRequestReceiver.VIDEO_RECORDING);
mIsRecordingVideo = true;
mMediaRecorder.start();
}
}).start();
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "Capture failed!");
runOnUiThread(new Runnable() {
#Override
public void run() {
i2cRequest(I2CRequestReceiver.E_CAMERA_ERROR);
mIsRecordingVideo = false;
}
});
}
},backgroundHandler);
} catch (IOException | CameraAccessException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo() {
try {
mPreviewSession.abortCaptures();
} catch (CameraAccessException e) {
e.printStackTrace();
}
mIsRecordingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Video saved: " + mVideoPath);
}
private void i2cRequest(String request) {
Intent sendI2cRequest = new Intent();
sendI2cRequest.setAction(I2CRequestReceiver.NOWSPEAK_REQUEST_ACTION);
switch (request) {
case I2CRequestReceiver.VIDEO_READY:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.VIDEO_READY);
Log.d(TAG, "VIDEO READY!!");
break;
case I2CRequestReceiver.E_CAMERA_ERROR:
sendI2cRequest.putExtra(I2CRequestReceiver.EXTRA_SPEAK, I2CRequestReceiver.E_CAMERA_ERROR);
Log.d(TAG, "VIDEO ERROR!!");
break;
case IndicatorControlReceiver.INDICATOR_ON:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_ON);
break;
case IndicatorControlReceiver.INDICATOR_OFF:
sendI2cRequest.setAction(IndicatorControlReceiver.INDICATOR_CONTROL_ACTION);
sendI2cRequest.putExtra(IndicatorControlReceiver.EXTRA_INDICATOR, IndicatorControlReceiver.INDICATOR_OFF);
break;
}
LocalBroadcastManager.getInstance(this).sendBroadcast(sendI2cRequest);
}
}
CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE takes frame rates in units of FPS, not 1/1000 of FPS.
So try
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(30,30));
or even better, pick from the list provided by CameraCharacteristics.CONTROL_AVAILABLE_TARGET_FPS_RANGES
After talking with the board manufacturer, It seems that there are some issues with the implementation and they are working on fixing them.
I would like to thank #CommonsWare and #EddyTalvala for their expertise in helping me find the issue here.
-Rob

How can I show image taken from camera

I'm trying to write an android app, that works with the camera.
I show a preview of the camera in a textureview, that is working. If you press a button the camera will take a picture and show it on an imageview on the second half of the screen.
But everytime I press the button my app stops working, it tells me the error
android.view.ViewRootImpl$CalledFromWrongThreadException: Only the
original thread that created a view hierarchy can touch its views.
So I looked it up, and it seems I can't change the image of the imageview as I try to. But I don't know how else I could solve my problem. Can you give me some advise? Here is my code: The image is changed in the "ImageReader.OnImageAvailableListener" in the method takePicture().
public class MainActivityOld extends Activity {
private TextureView mTextureView;
private ImageView mImageView;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession;
private Button mBtnShot;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//no titlebar
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
mTextureView = (TextureView) findViewById(R.id.texture);
mTextureView.setSurfaceTextureListener(mSurfaceListener);
mImageView = (ImageView) findViewById(R.id.lastPicture);
mBtnShot = (Button) findViewById(R.id.btn_takepicture);
mBtnShot.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePicture();
}
});
}
private void takePicture() {
Log.e("MyTag", "Take picture");
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = jpegSizes[0].getWidth();
int height = jpegSizes[0].getHeight();
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mImageView.setImageBitmap(bitmap);
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroundHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener,backgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
startPreview();
}
};
mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
String cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[0];
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e("MyTag", "openCamera");
}
private TextureView.SurfaceTextureListener mSurfaceListener = new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e("myLog", "onSurfaceTextureAvailable, width=" + width + ",height=" + height);
openCamera();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
#Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(CameraDevice camera) {
Log.e("myTag", "onOpened");
mCameraDevice = camera;
startPreview();
}
#Override
public void onDisconnected(CameraDevice camera) {
Log.e("myTag", "onDisconnected");
}
#Override
public void onError(CameraDevice camera, int error) {
Log.e("myTag", "onDisconnected");
}
};
protected void startPreview() {
if (mCameraDevice == null || mTextureView.isAvailable() || mPreviewSize == null) {
Log.e("MyTag", "startPreview failed, still working, so just ignore it");
// return;
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface surface = new Surface(texture);
try {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mPreviewBuilder.addTarget(surface);
try {
mCameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(MainActivityOld.this, "onConfigureFailed", Toast.LENGTH_LONG).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void updatePreview() {
mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
Handler backgroundHandler = new Handler(thread.getLooper());
try {
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
As the Exception says CalledFromWrongThreadException, your readerListener is working on another thread, from where you cannot change your views.
Change your code to:
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener(){
#Override
public void onImageAvailable(final ImageReader reader) {
MainActivityOld.this.runOnUIThread(new Runnable() {
#Override
public void run() {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
mImageView.setImageBitmap(bitmap);
}
});
}
};
I think you will be better of trying to stream to a surface view, not sure if this can even be done with a imageview.
try this tutorial it should get you going in the right direction
or this for a more up to date example provided by google

Categories

Resources