[Edit my log code ]No textureview photo saved despite pressing camera button - android

I am making a custom camera using camera API2 and textureview.
When saving pictures using textureview, the following issues occur: Sometimes I can't save the picture and the following error code occurs.I have newly attached the log code according to the comments below. I can solve this problem should help.
protected void takePicture() {
if (null == cameraDevice) {
Log.e(TAG, "mCameraDevice is null, return");
return;
}
try {
Size[] jpegSizes = null;
CameraManager cameraManager = (CameraManager)
getActivity().getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map != null) {
jpegSizes = map.getOutputSizes(ImageFormat.JPEG);
Log.d("TEST", "map != null " + jpegSizes.length);
}
int width = 1280;
int height =960;
if (jpegSizes != null && 0 < jpegSizes.length) {
for (int i = 0 ; i < jpegSizes.length; i++) {
Log.d("TEST", "getHeight = " + jpegSizes[i].getHeight() + ", getWidth = " +
jpegSizes[i].getWidth());
}
//width = jpegSizes[0].getWidth();
width = 1440;
//height = jpegSizes[0].getHeight();
height = 1080;
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_START);
// Orientation
int rotation = ((Activity)
getActivity()).getWindowManager().getDefaultDisplay().getRotation();
int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
int surfaceRotation = ORIENTATIONS.get(rotation);
int jpegOrientation = (surfaceRotation + sensorOrientation + 270) % 360;
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, jpegOrientation);
Date date = new Date();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
//final File file = new File(Environment.getExternalStorageDirectory() + "/DCIM", "pic_" +
dateFormat.format(date) + ".jpg");
File file = new File (Environment.getExternalStorageDirectory()+ "/Pictures" , "/FitMe");
if (!file.exists()) {
file.mkdirs();
}
String fileName = dateFormat.format(date)+".jpg";
ImageReader.OnImageAvailableListener readerListener = new
ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
if(Camera_change_number==1){
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length,
options);
Matrix matrix = new Matrix();
matrix.setScale(-1,1);
matrix.postRotate(90);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, 1440, 1080, matrix, true);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
byte[] front_picture = stream.toByteArray();
bytes = front_picture;
}
save(bytes);
Log.d(TAG, "[junsu] save()");
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
reader.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
File outputFile = new File(file, fileName);
output = new FileOutputStream(outputFile);
output.write(bytes);
output.flush();
} finally {
if (null != output) {
output.close();
}
}
}
};
HandlerThread thread = new HandlerThread("CameraPicture");
thread.start();
final Handler backgroudHandler = new Handler(thread.getLooper());
reader.setOnImageAvailableListener(readerListener, backgroudHandler);
//final Handler delayPreview = new Handler();
final CameraCaptureSession.CaptureCallback captureListener = new
CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(getActivity(), "Saved:" + file, Toast.LENGTH_SHORT).show();
//delayPreview.postDelayed(mDelayPreviewRunnable, 1000);
//startPreview();
createCameraPreviewSession();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: ");
}
}, backgroudHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
enter image description here

Related

how can i compress image size before uploading this is my code

how can I compress image size before uploading this is my code
I want to compress image size before its uploaded to WordPress site
I am using WordPress rest API in my android app
how can I compress image size before uploading this is my code
I want to compress image size before its uploaded to WordPress site
I am using WordPress rest API in my android app
// getting images selected from gallery for post and sending them to server
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case FilePickerConst.REQUEST_CODE_PHOTO:
if (resultCode == Activity.RESULT_OK && data != null) {
imagePaths = new ArrayList<>();
imageRequestCount = 1;
imagePaths.addAll(data.getStringArrayListExtra(FilePickerConst.KEY_SELECTED_MEDIA));
if (SettingsMain.isConnectingToInternet(context)) {
if (imagePaths.size() > 0) {
btnSelectPix.setEnabled(false);
AsyncImageTask asyncImageTask = new AsyncImageTask();
asyncImageTask.execute(imagePaths);
}
}
} else {
btnSelectPix.setEnabled(true);
Toast.makeText(context, settingsMain.getAlertDialogMessage("internetMessage"), Toast.LENGTH_SHORT).show();
}
break;
}
}
private void adforest_uploadImages(List<MultipartBody.Part> parts) {
Log.d("info image parts", parts.toString());
String ad_id = Integer.toString(addId);
RequestBody adID =
RequestBody.create(
okhttp3.MultipartBody.FORM, ad_id);
Log.d("info SendImage", addId + "");
final Call<ResponseBody> req = restService.postUploadImage(adID, parts, UrlController.UploadImageAddHeaders(context));
req.enqueue(new Callback<ResponseBody>() {
#Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
if (response.isSuccessful()) {
Log.v("info Upload", response.toString());
JSONObject responseobj = null;
try {
responseobj = new JSONObject(response.body().string());
Log.d("info UploadImage object", "" + responseobj.getJSONObject("data").toString());
if (responseobj.getBoolean("success")) {
adforest_updateImages(responseobj.getJSONObject("data"));
int selectedImageSize = imagePaths.size();
int totalSize = currentSize + selectedImageSize;
Log.d("info image2", "muImage" + totalSize + "imagePaths" + totalUploadedImages);
if (totalSize == totalUploadedImages) {
adforest_UploadSuccessImage();
imagePaths.clear();
paths.clear();
if (allFile.size() > 0) {
for (File file : allFile) {
if (file.exists()) {
if (file.delete()) {
Log.d("file Deleted :", file.getPath());
} else {
Log.d("file not Deleted :", file.getPath());
}
}
}
}
}
} else {
adforest_UploadFailedImage();
Toast.makeText(context, responseobj.get("message").toString(), Toast.LENGTH_SHORT).show();
}
} catch (JSONException e) {
adforest_UploadFailedImage();
e.printStackTrace();
} catch (IOException e) {
adforest_UploadFailedImage();
e.printStackTrace();
}
btnSelectPix.setEnabled(true);
} else {
adforest_UploadFailedImage();
}
}
#Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
Log.e("info Upload Image Err:", t.toString());
if (t instanceof TimeoutException) {
adforest_UploadFailedImage();
// adforest_requestForImages();
}
if (t instanceof SocketTimeoutException) {
adforest_UploadFailedImage();
// adforest_requestForImages();
//
} else {
adforest_UploadFailedImage();
// adforest_requestForImages();
}
}
});
}
private void adforest_UploadFailedImage() {
progress_bar.setVisibility(View.GONE);
loadingLayout.setVisibility(View.GONE);
Gallary.setVisibility(View.VISIBLE);
Gallary.setCompoundDrawablesWithIntrinsicBounds(R.drawable.ic_check_circle_black_24dp, 0, 0, 0);
Gallary.setText("" + 0);
Gallary.setTextColor(Color.parseColor("#a0a0a0"));
tv_done.setVisibility(View.VISIBLE);
tv_done.setTextColor(Color.parseColor("#ff0000"));
tv_done.setText(progressModel.getFailMessage());
btnSelectPix.setEnabled(true);
Toast.makeText(context, progressModel.getFailMessage(), Toast.LENGTH_SHORT).show();
}
private void adforest_UploadSuccessImage() {
progress_bar.setVisibility(View.GONE);
Gallary.setVisibility(View.VISIBLE);
loadingLayout.setVisibility(View.GONE);
Gallary.setCompoundDrawablesWithIntrinsicBounds(R.drawable.ic_check_circle_green_24dp, 0, 0, 0);
Gallary.setText(totalFiles + "");
tv_done.setText(progressModel.getSuccessMessage());
Toast.makeText(context, progressModel.getSuccessMessage(), Toast.LENGTH_SHORT).show();
btnSelectPix.setEnabled(true);
tv_done.setTextColor(Color.parseColor("#20a406"));
}
private MultipartBody.Part adforestst_prepareFilePart(String fileName, Uri fileUri) {
File finalFile = new File(SettingsMain.getRealPathFromURI(context, fileUri));
allFile.add(finalFile);
// create RequestBody instance from file
RequestBody requestFile =
RequestBody.create(
MediaType.parse(getContentResolver().getType(fileUri)),
finalFile
);
// MultipartBody.Part is used to send also the actual file name
return MultipartBody.Part.createFormData(fileName, finalFile.getName(), requestFile);
}
private File adforest_rotateImage(String path) {
File file = null;
Bitmap bitmap = BitmapFactory.decodeFile(path);
ExifInterface ei = null;
try {
ei = new ExifInterface(path);
} catch (IOException e) {
e.printStackTrace();
}
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
Bitmap rotatedBitmap = null;
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotatedBitmap = rotateImage(bitmap, 90);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotatedBitmap = rotateImage(bitmap, 180);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotatedBitmap = rotateImage(bitmap, 270);
break;
case ExifInterface.ORIENTATION_NORMAL:
default:
rotatedBitmap = bitmap;
}
file = new File(getRealPathFromURI(getImageUri(rotatedBitmap)));
allFile.add(file);
return file;
}
public Uri getImageUri(Bitmap inImage) {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
String path = MediaStore.Images.Media.insertImage(getContentResolver(), inImage, "Title", null);
return Uri.parse(path);
}
public String getRealPathFromURI(Uri uri) {
Cursor cursor = getContentResolver().query(uri, null, null, null, null);
cursor.moveToFirst();
int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA);
return cursor.getString(idx);
}
private class AsyncImageTask extends AsyncTask<ArrayList<String>, Void, List<MultipartBody.Part>> {
ArrayList<String> imaagesLis = null;
boolean checkDimensions = true, checkImageSize;
#SafeVarargs
#Override
protected final List<MultipartBody.Part> doInBackground(ArrayList<String>... params) {
List<MultipartBody.Part> parts = null;
imaagesLis = params[0];
totalFiles = imaagesLis.size();
for (int i = 0; i < imaagesLis.size(); i++) {
parts = new ArrayList<>();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss");
String currentDateandTime = sdf.format(new Date());
Log.d("info image", currentDateandTime);
checkDimensions = true;
checkImageSize = true;
if (adPostImageModel.getDim_is_show()) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imaagesLis.get(i), options);
int imageWidth = options.outWidth;
int imageHeight = options.outHeight;
if (imageHeight > Integer.parseInt(adPostImageModel.getDim_height()) &&
imageWidth > Integer.parseInt(adPostImageModel.getDim_width())) {
checkDimensions = true;
} else {
checkDimensions = false;
runOnUiThread(() -> Toast.makeText(context, adPostImageModel.getDim_height_message(), Toast.LENGTH_SHORT).show());
}
}
File file = new File(imaagesLis.get(i));
long fileSizeInBytes = file.length();
Integer fileSizeBytes = Math.round(fileSizeInBytes);
if (fileSizeBytes > Integer.parseInt(adPostImageModel.getImg_size())) {
checkImageSize = false;
runOnUiThread(() -> Toast.makeText(context, adPostImageModel.getImg_message(), Toast.LENGTH_SHORT).show());
} else {
checkImageSize = true;
}
if (checkImageSize && checkDimensions) {
File finalFile1 = adforest_rotateImage(imaagesLis.get(i));
// File finalFile1 =new File(imaagesLis.get(i));
Uri tempUri = SettingsMain.decodeFile(context, finalFile1);
parts.add(adforestst_prepareFilePart("file" + i, tempUri));
adforest_uploadImages(parts);
}
}
return parts;
}
You can use this scale down function
public static Bitmap scaleDown(Bitmap your_image, float YOUR_SIZE,boolean filter) { //filter false = Pixelated, true = Smooth
float ratio = Math.min( YOUR_SIZE/ your_image.getWidth(),
YOUR_SIZE/ your_image.getHeight());
int width = Math.round( ratio * your_image.getWidth());
int height = Math.round( ratio * your_image.getHeight());
Bitmap newBitmap = Bitmap.createScaledBitmap(your_image, width, height,
filter);
return newBitmap;
}
You already have the mechanism for compressing in your code:
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
See the documentation:
https://developer.android.com/reference/android/graphics/Bitmap.html#public-methods_1
Basically you need to change 100 to a lower number in order to compress the image.

How can i take a screenshot of ArFragment?

After hours of searching, i'm finally able to save screenshot of ArFragment.
but the problem is it only saves the current image of the camera except the 3D object which is placed.
how can i get the full screenshot (current image of the camera + 3D object which is placed)?
the codes that i used is below here.
ImageButton btn3 = (ImageButton)findViewById(R.id.camera_btn);
btn3.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
onSceneUpdate((FrameTime) frameTime);
Toast.makeText(AR_Activity.this, "스크린샷이 저장되었습니다.", Toast.LENGTH_SHORT).show();
}
});
private void onSceneUpdate(FrameTime frameTime) {
try {
Date now = new Date();
android.text.format.DateFormat.format("yyyy-MM-dd_hh:mm:ss", now);
String mPath = Environment.getExternalStorageDirectory().toString() + "/" + now + ".jpg";
Frame currentFrame = arFragment.getArSceneView().getArFrame();
Image currentImage = currentFrame.acquireCameraImage();
int imageFormat = currentImage.getFormat();
if (imageFormat == ImageFormat.YUV_420_888) {
Log.d("ImageFormat", "Image format is YUV_420_888");
}
WriteImageInformation((Image) currentImage, (String) mPath);
} catch (Exception e) {
}
}
private static byte[] NV21toJPEG(byte[] nv21, int width, int height) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuv = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
yuv.compressToJpeg(new Rect(0, 0, width, height), 100, out);
return out.toByteArray();
}
public static void WriteImageInformation(Image image, String path) {
byte[] data = null;
data = NV21toJPEG(YUV_420_888toNV21(image),
image.getWidth(), image.getHeight());
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(path));
bos.write(data);
bos.flush();
bos.close();
} catch (Throwable e) {
e.printStackTrace();
}
}
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}
Use PixelCopy. it worked perfectly.
For those who might wonder,
I will add my code below.
ImageButton btn3 = (ImageButton)findViewById(R.id.camera_btn);
btn3.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePhoto();
}
});
private String generateFilename() {
//현재시간을 기준으로 파일 이름 생성
String date =
new SimpleDateFormat("yyyyMMddHHmmss", java.util.Locale.getDefault()).format(new Date());
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES) + File.separator + "IM/" + date + "_screenshot.jpg";
}
private void saveBitmapToDisk(Bitmap bitmap, String filename) throws IOException {
//사용자의 갤러리에 IM 디렉토리 생성 및 Bitmap 을 JPEG 형식으로 갤러리에 저장
File out = new File(filename);
if (!out.getParentFile().exists()) {
out.getParentFile().mkdirs();
}
try (FileOutputStream outputStream = new FileOutputStream(filename);
ByteArrayOutputStream outputData = new ByteArrayOutputStream()) {
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputData);
outputData.writeTo(outputStream);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
throw new IOException("Failed to save bitmap to disk", ex);
}
}
private void takePhoto(){
//PixelCopy 를 사용하여 카메라 화면과 object 를 bitmap 으로 생성
final String filename = generateFilename();
ArSceneView view = arFragment.getArSceneView();
final Bitmap bitmap = Bitmap.createBitmap(view.getWidth(),view.getHeight(),
Bitmap.Config.ARGB_8888);
final HandlerThread handlerThread = new HandlerThread("PixelCopier");
handlerThread.start();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
PixelCopy.request(view, bitmap, (copyResult) -> {
if (copyResult == PixelCopy.SUCCESS) {
try {
saveBitmapToDisk(bitmap, filename);
//Media Scanning 실시
Uri uri = Uri.parse("file://" + filename);
Intent i = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
i.setData(uri);
sendBroadcast(i);
} catch (IOException e) {
Toast toast = Toast.makeText(AR_Activity.this, e.toString(),
Toast.LENGTH_LONG);
toast.show();
return;
}
Snackbar snackbar = Snackbar.make(findViewById(android.R.id.content),
"스크린샷이 저장되었습니다.", Snackbar.LENGTH_LONG);
snackbar.setAction("갤러리에서 보기", v -> {
//어플 내에서 저장한 스크린샷을 확인 가능
File photoFile = new File(filename);
Uri photoURI = FileProvider.getUriForFile(AR_Activity.this,
AR_Activity.this.getPackageName() + ".ar.codelab.name.provider",
photoFile);
Intent intent = new Intent(Intent.ACTION_VIEW, photoURI);
intent.setDataAndType(photoURI, "image/*");
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
startActivity(intent);
});
snackbar.show();
} else {
Toast toast = Toast.makeText(AR_Activity.this,
"스크린샷 저장 실패!: " + copyResult, Toast.LENGTH_LONG);
toast.show();
}
handlerThread.quitSafely();
}, new Handler(handlerThread.getLooper()));
}
}

Accessing Full Frame Using ZXing on Android

We are developing an Android application and using ZXing for scanning QR codes. Is there a way to get full frame of decoded QR code using ZXing ?
Following method is added to PlanarYUVLuminanceSource class.
public void saveFrame() {
try {
YuvImage image = new YuvImage(yuvData, ImageFormat.NV21, getWidth(), getHeight(), null);
File file = new File(Environment.getExternalStorageDirectory().getPath() + "/frame.jpeg");
FileOutputStream fos = new FileOutputStream(file);
image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 100, fos);
} catch (FileNotFoundException e) {
Log.e(TAG, "FileNotFoundException!");
}
}
Called within DecodeHandler class as below,
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get()
.buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP,
source.renderCroppedGreyscaleBitmap());
source.saveFrame();
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(),
R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Here is result image,
Finally I found a solution. It is based on renderCroppedGreyscaleBitmap() method of PlanarYUVLuminanceSource class. Here is how I changed decode() method.
private void decode(byte[] data, int width, int height) {
long start = System.currentTimeMillis();
Result rawResult = null;
PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
// Don't log the barcode contents for security.
long end = System.currentTimeMillis();
Log.d(TAG, "Found barcode in " + (end - start) + " ms");
// Grab & save frame
Bitmap wholeBmp = renderGrayScaleBitmap(data, width, height);
if(wholeBmp != null)
saveBitmap(wholeBmp, "frame.png");
else
Log.e(TAG, "Bitmap of frame is empty!");
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(DecodeThread.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap());
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(activity.getHandler(), R.id.zxinglib_decode_failed);
message.sendToTarget();
}
}
Create bitmap from decode data
private Bitmap renderGrayScaleBitmap(byte[] data, int width, int height) {
int[] pixels = new int[width * height];
int inputOffset = width;
for (int y = 0; y < height; y++) {
int outputOffset = y * width;
for (int x = 0; x < width; x++) {
int grey = data[inputOffset + x] & 0xff;
pixels[outputOffset + x] = 0xFF000000 | (grey * 0x00010101);
}
inputOffset += width;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
Save bitmap to sdcard
private void saveBitmap(Bitmap bmp, String name) {
FileOutputStream out = null;
try {
String filename = Environment.getExternalStorageDirectory().toString() + "/" + name;
Log.i(TAG, "writtenPath=" + filename);
out = new FileOutputStream(filename);
bmp.compress(Bitmap.CompressFormat.PNG, 100, out);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

Android background video recording

I am developing a recording service for a custom Android platform. When the application starts it will start recording a video in the background. Unfortunately this application runs on hardware that prevents me from using video recording.
My solution to this problem is to take images and hold them in a circular buffer, when an event happens it will stop feeding images to the buffer and place them together in a video.
The problem I am encountering is that when I save the images to video I just get a noisy green screen.
I based my code on this example: Using MediaCodec to save series of images as Video
Note: I cannot use MediaMux either, I am developing for API level <18.
I will guide your through the steps I take. On creation of the service I simply open the camera, I set the preview on a SurfaceTexture and I will add images to my buffer when the PreviewCallback is called.
private Camera mCamera;
private String mTimeStamp;
SurfaceTexture mSurfaceTexture;
private CircularBuffer<ByteArrayOutputStream> mCircularBuffer;
private static final int MAX_BUFFER_SIZE = 200;
private int mWidth = 720;
private int mHeight = 480;
#Override
public void onCreate() {
try {
mCircularBuffer = new CircularBuffer(MAX_BUFFER_SIZE);
mTimeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
mSurfaceTexture = new SurfaceTexture(10);
mCamera = getCameraInstance();
Parameters parameters = mCamera.getParameters();
parameters.setJpegQuality(20);
parameters.setPictureSize(mWidth, mHeight);
mCamera.setParameters(parameters);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
mCamera.setPreviewCallback(mPreviewCallback);
} catch (IOException e) {
Log.d(TAG, "IOException: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private PreviewCallback mPreviewCallback = new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
Rect rectangle = new Rect(0, 0, mWidth, mHeight);
yuvImage.compressToJpeg(rectangle, 20, out);
mCircularBuffer.add(out);
}
};
All of this works, when I convert the byte arrays to jpg at this point they are all correct image files.
Now when an event happens, the service will be destroyed and the last 200 images will need to be placed behind each other and converted to mp4. I do this by first saving it to H264, based on the code provided in the link above. And then converting that file to mp4 by using mp4parser.
#Override
public void onDestroy() {
super.onDestroy();
mCamera.stopPreview();
saveFileToH264("video/avc");
convertH264ToMP4();
}
private void saveFileToH264(String MIMETYPE) {
MediaCodec codec = MediaCodec.createEncoderByType(MIMETYPE);
MediaFormat mediaFormat = null;
int height = mCamera.getParameters().getPictureSize().height;
int width = mCamera.getParameters().getPictureSize().width;
Log.d(TAG, height + ", " + width);
mediaFormat = MediaFormat.createVideoFormat(MIMETYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
codec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
boolean sawInputEOS = false;
int inputBufferIndex = -1, outputBufferIndex = -1;
BufferInfo info = null;
try {
File file = new File("/sdcard/output.h264");
FileOutputStream fstream2 = new FileOutputStream(file);
DataOutputStream dos = new DataOutputStream(fstream2);
// loop through buffer and get image output streams
for (int i = 0; i < MAX_BUFFER_SIZE; i++) {
ByteArrayOutputStream out = mCircularBuffer.getData(i);
byte[] dat = out.toByteArray();
long WAITTIME = 50;
inputBufferIndex = codec.dequeueInputBuffer(WAITTIME);
int bytesread = MAX_BUFFER_SIZE - 1 - i;
int presentationTime = 0;
if (bytesread <= 0)
sawInputEOS = true;
if (inputBufferIndex >= 0) {
if (!sawInputEOS) {
int samplesiz = dat.length;
inputBuffers[inputBufferIndex].put(dat);
codec.queueInputBuffer(inputBufferIndex, 0, samplesiz, presentationTime, 0);
presentationTime += 100;
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
Log.i("BATA", "outputBufferIndex=" + outputBufferIndex);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
if (sawInputEOS)
break;
}
} else {
codec.queueInputBuffer(inputBufferIndex, 0, 0, presentationTime,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
break;
}
}
}
}
codec.flush();
try {
fstream2.close();
dos.flush();
dos.close();
} catch (IOException e) {
e.printStackTrace();
}
codec.stop();
codec.release();
codec = null;
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private void convertH264ToMP4() {
try {
DataSource videoFile = new FileDataSourceImpl("/sdcard/output.h264");
H264TrackImpl h264Track = new H264TrackImpl(videoFile, "eng", 5, 1);
// 5fps. you can play with timescale and timetick to get non integer fps, 23.967 is
// 24000/1001
Movie movie = new Movie();
movie.addTrack(h264Track);
Container out = new DefaultMp4Builder().build(movie);
FileOutputStream fos = new FileOutputStream(new File("/sdcard/output.mp4"));
out.writeContainer(fos.getChannel());
fos.flush();
fos.close();
Log.d(TAG, "Video saved to sdcard");
} catch (Exception e) {
Log.d(TAG, "No file was saved");
}
}
I'm pretty sure the problem is in the saveFileToH264 code. I've read a post, on the link provided above, that this is probably a stride and/or alignment issue(?). I have however no experience with encoding/decoding so I'm not sure how to solve this issue. If anyone could help that would be greatly appreciated!
Note: I know the code is not optimal and I still need to add more checks and whatnot, but I first want to get a working video out of this.

Picture Callback (doesn't enter its loop)

I am creating a program that uses twice the takepicture function, like this:
mCamera.takePicture(null, null, mPicture);
But only once it enter the PictureCallback loop:
private PictureCallback mPicture = new PictureCallback(){
// Bitmap readyToGo;
#Override
public void onPictureTaken(byte[] data, Camera camera){
Log.d(TAG, "entrei no picture callback");
//-----OUT OF MEMORY ERROR
pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
mPreview.setDrawingCacheEnabled(true);
mPreview.setDrawingCacheQuality(View.DRAWING_CACHE_QUALITY_AUTO);
BitmapFactory.Options options = new BitmapFactory.Options();
//options.inPurgeable = true;
//options.inInputShareable = true;
options.inJustDecodeBounds = true;
options.inSampleSize = 5;
options.inJustDecodeBounds = false;
Bitmap bitmap = mPreview.getDrawingCache();
//---------------------------------------------------
bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
//combine the two bitmaps!!!!-------------
Log.d("main", "antes overlay");
combination = overlay(bmp, bitmap);
Log.d("main", "depois overlay");
//------------------ROTATION---------------------
if(pictureFile == null)
{
Log.d(TAG, "Error creating media file, check storages permissions. ");
return;
}
try
{
ExifInterface exif = new ExifInterface(pictureFile.getAbsolutePath()); //Since API Level 5
String exifOrientation = exif.getAttribute(ExifInterface.TAG_ORIENTATION);
Log.d("main", "exif orientation= "+exifOrientation);
FileOutputStream fos = new FileOutputStream(pictureFile);
Log.d(TAG, "ALO!!!");
combination.compress(CompressFormat.JPEG, 100, fos);//troquei bitmap por combination
fos.flush();
fos.close();
//------------------------------
clearBitmap(combination);
clearBitmap(bmp);
//------------------------------
}
catch(FileNotFoundException e)
{
Log.d(TAG, "File not found: "+e.getMessage());
}
catch(IOException e)
{
Log.d(TAG, "Error accessing file: "+e.getMessage());
}
}
};
Does anyobody know what happen, can I call it twice?
call thread to pause for 2 seconds and then recall it
or You can use your above code with this picture callback
public void takeSnapPhoto() {
camera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Parameters parameters = camera.getParameters();
int format = parameters.getPreviewFormat();
//YUV formats require more conversion
if (format == ImageFormat.NV21 || format == ImageFormat.YUY2 || format == ImageFormat.NV16) {
int w = parameters.getPreviewSize().width;
int h = parameters.getPreviewSize().height;
// Get the YuV image
YuvImage yuv_image = new YuvImage(data, format, w, h, null);
// Convert YuV to Jpeg
Rect rect = new Rect(0, 0, w, h);
ByteArrayOutputStream output_stream = new ByteArrayOutputStream();
yuv_image.compressToJpeg(rect, 100, output_stream);
byte[] byt = output_stream.toByteArray();
FileOutputStream outStream = null;
try {
// Write to SD Card
File file = createFileInSDCard(FOLDER_PATH, "Image_"+System.currentTimeMillis()+".jpg");
//Uri uriSavedImage = Uri.fromFile(file);
outStream = new FileOutputStream(file);
outStream.write(byt);
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
}); }

Categories

Resources