How to get single preview frame in Camera2 API Android 5.0? - android

I'm trying to get a preview frame for QR code scanning functionality using Camera2 API. In old Camera API it's as easy as:
android.hardware.Camera mCamera;
...
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
// will be invoked for every preview frame in addition to displaying them on the screen
}
});
However, I can't find a way to achieve that using new Camera2 API. I'd like to receive multiple frames that I can work on - the best would be to receive byte array as in old API. Any ideas how to do that?

A little late but better than never:
Usually a TextureView is used to display the preview of the camera. You can use TextureView.SurfaceTextureListener to get a callback every time the surface changes. TextureView does provide a method getBitmap(Bitmap) which you can use to get the preview frame in the same size as the TextureView.
You can use this Google sample as starting point. Simply update the surfaceTextureListener like shown here:
private val surfaceTextureListener = object : TextureView.SurfaceTextureListener {
override fun onSurfaceTextureAvailable(texture: SurfaceTexture, width: Int, height: Int) {
openCamera(width, height)
}
override fun onSurfaceTextureSizeChanged(texture: SurfaceTexture, width: Int, height: Int) {
configureTransform(width, height)
}
override fun onSurfaceTextureDestroyed(texture: SurfaceTexture) = true
override fun onSurfaceTextureUpdated(texture: SurfaceTexture) {
// Start changes
// Get the bitmap
val frame = Bitmap.createBitmap(textureView.width, textureView.height, Bitmap.Config.ARGB_8888)
textureView.getBitmap(frame)
// Do whatever you like with the frame
frameProcessor?.processFrame(frame)
// End changes
}
}

Use Below code to do so.
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 480;//480x320
int height = 320;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = ((Activity) context).getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = getFileDir();
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}

Related

[Edit my log code ]No textureview photo saved despite pressing camera button

I am making a custom camera using camera API2 and textureview.
When saving pictures using textureview, the following issues occur: Sometimes I can't save the picture and the following error code occurs.I have newly attached the log code according to the comments below. I can solve this problem should help.
protected void takePicture() {
if (null == cameraDevice) {
Log.e(TAG, "mCameraDevice is null, return");
return;
}
try {
Size[] jpegSizes = null;
CameraManager cameraManager = (CameraManager)
getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map != null) {
jpegSizes = map.getOutputSizes(ImageFormat.JPEG);
Log.d("TEST", "map != null " + jpegSizes.length);
}
int width = 1280;
int height =960;
if (jpegSizes != null && 0 < jpegSizes.length) {
for (int i = 0 ; i < jpegSizes.length; i++) {
Log.d("TEST", "getHeight = " + jpegSizes[i].getHeight() + ", getWidth = " +
jpegSizes[i].getWidth());
}
//width = jpegSizes[0].getWidth();
width = 1440;
//height = jpegSizes[0].getHeight();
height = 1080;
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_START);
// Orientation
int rotation = ((Activity)
getActivity()).getWindowManager().getDefaultDisplay().getRotation();
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
int surfaceRotation = ORIENTATIONS.get(rotation);
int jpegOrientation = (surfaceRotation + sensorOrientation + 270) % 360;
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, jpegOrientation);
Date date = new Date();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
//final File file = new File(Environment.getExternalStorageDirectory() + "/DCIM", "pic_" +
dateFormat.format(date) + ".jpg");
File file = new File (Environment.getExternalStorageDirectory()+ "/Pictures" , "/FitMe");
if (!file.exists()) {
file.mkdirs();
}
String fileName = dateFormat.format(date)+".jpg";
ImageReader.OnImageAvailableListener readerListener = new
ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
if(Camera_change_number==1){
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length,
options);
Matrix matrix = new Matrix();
matrix.setScale(-1,1);
matrix.postRotate(90);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, 1440, 1080, matrix, true);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
byte[] front_picture = stream.toByteArray();
bytes = front_picture;
}
save(bytes);
Log.d(TAG, "[junsu] save()");
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
reader.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
File outputFile = new File(file, fileName);
output = new FileOutputStream(outputFile);
output.write(bytes);
output.flush();
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
//final Handler delayPreview = new Handler();
final CameraCaptureSession.CaptureCallback captureListener = new
CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(getActivity(), "Saved:" + file, Toast.LENGTH_SHORT).show();
//delayPreview.postDelayed(mDelayPreviewRunnable, 1000);
//startPreview();
createCameraPreviewSession();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: ");
}
}, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
enter image description here

I use android.hardware.camera2, CDS enable 0 num_str 1 applied 0 stream_id 5

I'm using camera2 to record video,There was an error.
ImageReader
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),ImageFormat.YUV_420_888, 5);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
OnImageAvailableListener
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.d("---------->", "mOnPreviewAvailableListener" + reader);
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
}
}
};
PreviewRequestBuilder
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());

How to correctly receive camera images from onImageAvailable?

I am trying to achieve video streaming from one android device to another. In order to do this I want to obtain frames from camera, send them through sockets and show on ImageView on another phone. The problem is that I have to use ImageReader with specific format - YUV_420_888. When I try to create bitmap based on bytes from this image I can only get ALPHA_8(other formats cause Exceptions saying that buffer is not big enough for pixels), which is black and white + wrongly oriented. I would like to know if there is a way to obtain correct version directly or somehow convert it so I can make a proper bitmap ready to show on ImageView. Here's some code:
public class MainActivity extends AppCompatActivity
{
private ImageReader mImageReader;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
this.imageView = (ImageView) findViewById(R.id.image);
this.handler = new Handler(){
#Override
public void handleMessage(Message msg)
{
if(msg.what == Worker.PROCESSED_IMAGE)
{
imageView.setImageBitmap((Bitmap) msg.obj);
}
}
};
}
private void openCamera()
{
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try
{
String cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = map.getOutputSizes(SurfaceTexture.class)[0];
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 50);
mImageReader.setOnImageAvailableListener(new Worker(this, this.handler, mPreviewSize.getWidth(), mPreviewSize.getHeight()), new Handler());
manager.openCamera(cameraId, mStateCallback, null);
}
catch(CameraAccessException e)
{
e.printStackTrace();
}
}
}
public class Worker implements ImageReader.OnImageAvailableListener
{
#Override
public void onImageAvailable(ImageReader imageReader)
{
Image image = imageReader.acquireLatestImage();
Image.Plane plane = image.getPlanes()[0];
ByteBuffer buffer = plane.getBuffer();
Bitmap bm = Bitmap.createBitmap(width, heigth, Bitmap.Config.ALPHA_8);
bm.copyPixelsFromBuffer(buffer);
image.close();
handler.obtainMessage(PROCESSED_IMAGE, bm).sendToTarget();
}
Almost correct, try to use this code instead:
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
// Log.d(TAG, String.format(Locale.getDefault(), "image w = %d; h = %d", image.getWidth(), image.getHeight()));
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
buffer.rewind();
byte[] data = new byte[buffer.capacity()];
buffer.get(data);
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
//use your bitmap
} catch (Exception e) {
e.printStackTrace();
}
if (image != null) {
image.close();
}
}
I used a helper function to convert image into bitmap:
Preview2.kt#image -> bitmap
Preview.kt#yuvToBitmap
protected fun yuvToBitmapFaster(bytes: ByteArray, w: Int, h: Int): Bitmap {
if (yuvType == null) {
yuvType = Type.Builder(rs, Element.U8(rs)).setX(bytes.size)
`in` = Allocation.createTyped(rs, yuvType!!.create(), Allocation.USAGE_SCRIPT)
rgbaType = Type.Builder(rs, Element.RGBA_8888(rs)).setX(w).setY(h)
out = Allocation.createTyped(rs, rgbaType?.create(), Allocation.USAGE_SCRIPT)
}
`in`?.copyFrom(bytes)
yuvToRgbIntrinsic!!.setInput(`in`)
yuvToRgbIntrinsic!!.forEach(out)
val bmp = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888)
out?.copyTo(bmp)
return bmp
}
btw, Google has an example with MLkit to get camera images and process them as: VisionProcessorBase.kt and tutorial

Android background video recording

I am developing a recording service for a custom Android platform. When the application starts it will start recording a video in the background. Unfortunately this application runs on hardware that prevents me from using video recording.
My solution to this problem is to take images and hold them in a circular buffer, when an event happens it will stop feeding images to the buffer and place them together in a video.
The problem I am encountering is that when I save the images to video I just get a noisy green screen.
I based my code on this example: Using MediaCodec to save series of images as Video
Note: I cannot use MediaMux either, I am developing for API level <18.
I will guide your through the steps I take. On creation of the service I simply open the camera, I set the preview on a SurfaceTexture and I will add images to my buffer when the PreviewCallback is called.
private Camera mCamera;
private String mTimeStamp;
SurfaceTexture mSurfaceTexture;
private CircularBuffer<ByteArrayOutputStream> mCircularBuffer;
private static final int MAX_BUFFER_SIZE = 200;
private int mWidth = 720;
private int mHeight = 480;
#Override
public void onCreate() {
try {
mCircularBuffer = new CircularBuffer(MAX_BUFFER_SIZE);
mTimeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
mSurfaceTexture = new SurfaceTexture(10);
mCamera = getCameraInstance();
Parameters parameters = mCamera.getParameters();
parameters.setJpegQuality(20);
parameters.setPictureSize(mWidth, mHeight);
mCamera.setParameters(parameters);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
mCamera.setPreviewCallback(mPreviewCallback);
} catch (IOException e) {
Log.d(TAG, "IOException: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private PreviewCallback mPreviewCallback = new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
Rect rectangle = new Rect(0, 0, mWidth, mHeight);
yuvImage.compressToJpeg(rectangle, 20, out);
mCircularBuffer.add(out);
}
};
All of this works, when I convert the byte arrays to jpg at this point they are all correct image files.
Now when an event happens, the service will be destroyed and the last 200 images will need to be placed behind each other and converted to mp4. I do this by first saving it to H264, based on the code provided in the link above. And then converting that file to mp4 by using mp4parser.
#Override
public void onDestroy() {
super.onDestroy();
mCamera.stopPreview();
saveFileToH264("video/avc");
convertH264ToMP4();
}
private void saveFileToH264(String MIMETYPE) {
MediaCodec codec = MediaCodec.createEncoderByType(MIMETYPE);
MediaFormat mediaFormat = null;
int height = mCamera.getParameters().getPictureSize().height;
int width = mCamera.getParameters().getPictureSize().width;
Log.d(TAG, height + ", " + width);
mediaFormat = MediaFormat.createVideoFormat(MIMETYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
codec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
boolean sawInputEOS = false;
int inputBufferIndex = -1, outputBufferIndex = -1;
BufferInfo info = null;
try {
File file = new File("/sdcard/output.h264");
FileOutputStream fstream2 = new FileOutputStream(file);
DataOutputStream dos = new DataOutputStream(fstream2);
// loop through buffer and get image output streams
for (int i = 0; i < MAX_BUFFER_SIZE; i++) {
ByteArrayOutputStream out = mCircularBuffer.getData(i);
byte[] dat = out.toByteArray();
long WAITTIME = 50;
inputBufferIndex = codec.dequeueInputBuffer(WAITTIME);
int bytesread = MAX_BUFFER_SIZE - 1 - i;
int presentationTime = 0;
if (bytesread <= 0)
sawInputEOS = true;
if (inputBufferIndex >= 0) {
if (!sawInputEOS) {
int samplesiz = dat.length;
inputBuffers[inputBufferIndex].put(dat);
codec.queueInputBuffer(inputBufferIndex, 0, samplesiz, presentationTime, 0);
presentationTime += 100;
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
Log.i("BATA", "outputBufferIndex=" + outputBufferIndex);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
if (sawInputEOS)
break;
}
} else {
codec.queueInputBuffer(inputBufferIndex, 0, 0, presentationTime,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
break;
}
}
}
}
codec.flush();
try {
fstream2.close();
dos.flush();
dos.close();
} catch (IOException e) {
e.printStackTrace();
}
codec.stop();
codec.release();
codec = null;
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private void convertH264ToMP4() {
try {
DataSource videoFile = new FileDataSourceImpl("/sdcard/output.h264");
H264TrackImpl h264Track = new H264TrackImpl(videoFile, "eng", 5, 1);
// 5fps. you can play with timescale and timetick to get non integer fps, 23.967 is
// 24000/1001
Movie movie = new Movie();
movie.addTrack(h264Track);
Container out = new DefaultMp4Builder().build(movie);
FileOutputStream fos = new FileOutputStream(new File("/sdcard/output.mp4"));
out.writeContainer(fos.getChannel());
fos.flush();
fos.close();
Log.d(TAG, "Video saved to sdcard");
} catch (Exception e) {
Log.d(TAG, "No file was saved");
}
}
I'm pretty sure the problem is in the saveFileToH264 code. I've read a post, on the link provided above, that this is probably a stride and/or alignment issue(?). I have however no experience with encoding/decoding so I'm not sure how to solve this issue. If anyone could help that would be greatly appreciated!
Note: I know the code is not optimal and I still need to add more checks and whatnot, but I first want to get a working video out of this.

saving Zbar barcode as image

I want to use barcode scanner into my application and i am using Zbar library
however, i can scan barcode but i want to save scanned image into sd card.
So far, i can able to capture image and save into SD but when i try to open it i have broken image error and cannot display it.
What am i using is :
private final Camera.PreviewCallback saveImage = new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
mCamera.setPreviewCallback(null);
String path = Environment.getExternalStorageDirectory() + "/DCIM/mcs_" + timeStamp + ".jpg";
FileOutputStream fos = null;
try
{
fos = new FileOutputStream(path);
fos.write(data);
fos.close();
}
catch(Exception e)
{
}
}
};
PreviewCallback previewCb = new PreviewCallback()
{
public void onPreviewFrame(byte[] data, Camera camera)
{
Camera.Parameters parameters = camera.getParameters();
Size size = parameters.getPreviewSize();
Image barcode = new Image(size.width, size.height);
barcode.setData(data);
barcode = barcode.convert("Y800");
int result = scanner.scanImage(barcode);
if (result != 0)
{
mCamera.setPreviewCallback(saveImage);
mCamera.stopPreview();
SymbolSet syms = scanner.getResults();
for (Symbol sym : syms)
{
Intent intent = new Intent(getApplicationContext(), ScanCodeResult.class);
intent.putExtra("timeStamp", timeStamp);
intent.putExtra("result", sym.getData().toString());
//startActivity(intent);
break;
}
}
}
};
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Size size = camera.getParameters().getPreviewSize(); //获取预览大小
final int w = size.width; //宽度
final int h = size.height;
final YuvImage image = new YuvImage(data, ImageFormat.NV21, w, h, null);
ByteArrayOutputStream os = new ByteArrayOutputStream(data.length);
if(!image.compressToJpeg(new Rect(0, 0, w, h), 100, os)){
return;
}
byte[] tmp = os.toByteArray();
Bitmap bmp = BitmapFactory.decodeByteArray(tmp, 0,tmp.length);
FileHelper fileHelper = new FileHelper();
fileHelper.storeInSD(bmp);
}

Categories

Resources