Ffmpeg output files not available immediately for use - android

I am executing split video Ffmpeg command on Android and it creates output_%d files to SD card (external storage)
Once command is executed and finished, files are not appearing immediately until I open Gallery App
I tried running media scanner for newly created directory but I see that directory is empty and has no files available immediately.

What do you mean by "not appearing immediately ?
1/ With a file explorer ?
This issue is due to the use of the MTP protocol. You have to do a rescan of the files using the MediaScannerConnection class.
Using a media scan app from google play help if you don't want to reboot or develop something.
2/ from java ?
You could try to list all files of the folder and then do a media scanner on each files (instead of doing it on the directory).
Context ctx = <your_context>;
File directory = new File("<your_path>");
File[] files = directory.listFiles();
for (int i = 0; i < files.length; i++)
{
Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
intent.setData(Uri.fromFile(files[i]));
context.sendBroadcast(intent);
}
If you need the result immediately when the scan is finished, you'll need MediaScannerConnectionClient
private static final class Client implements MediaScannerConnectionClient
{ private final String path;
private final String mimeType;
public MediaScannerConnection connection;
public Client(String path, String mimeType)
{ this.path = path;
this.mimeType = mimeType;
}
#Override
public void onMediaScannerConnected()
{ connection.scanFile(path, mimeType);
}
#Override
public void onScanCompleted(String path, Uri uri)
{ connection.disconnect();
}
};
public static void scanFile(Context context, String path, String mimeType)
{ Client client = new Client(path, mimeType);
MediaScannerConnection connection = new MediaScannerConnection(context, client);
client.connection = connection;
connection.connect();
}
Ideally, this should be done in the ffmpeg splitter app.

Try this...
I am using this
You are using the Full sample Code Sample Here record Video , Spilt image and take output l
You try the command of FFmpeg as you want if you need any help let me know
public class FrontCameraPreviewActivity extends AppCompatActivity implements CameraController.CameraRecordListener, FFMpegVideoGenerator.VideoGeneratorListener {
public final static String TAG = "FrontCameraPreviewAct";
private final static int CAMERA_PERMISSION_REQUEST_CODE = 50;
private FrontCameraSurfaceView mPreview;
private FrameLayout mPreviewFrame;
private FloatingActionButton fab;
private Animation hideCameraFab;
private CameraController mCameraController;
private ProgressBar mPbProcessing;
SweetAlertDialog progressDialog;
int mCount;
View mView;
ImageView btn_record_image;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_front_camera_preview);
mView=(View)findViewById(R.id.mView);
btn_record_image=(ImageView) findViewById(R.id.btn_record_image);
mView.setVisibility(View.GONE);
fab = (FloatingActionButton) findViewById(R.id.btn_record);
btn_record_image.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
btn_record_image.setVisibility(View.GONE);
mCameraController.record();
mCount=0;
mView.setBackgroundColor(ContextCompat.getColor(FrontCameraPreviewActivity.this,R.color.white));
mView.setVisibility(View.VISIBLE);
new CountDownTimer(Constants.VIDEO_DURATION, 100) {
public void onTick(long millisUntilFinished) {
mCount++;
if(mCount%2 == 0)
{
mView.setVisibility(View.VISIBLE);
}
else
{
mView.setVisibility(View.GONE);
}
// mTextField.setText("seconds remaining: " + millisUntilFinished / 1000);
//here you can have your logic to set text to edittext
}
public void onFinish() {
mView.setVisibility(View.GONE);
}
}.start();
}
});
progressDialog = new SweetAlertDialog(FrontCameraPreviewActivity.this, SweetAlertDialog.PROGRESS_TYPE);
mPbProcessing = (ProgressBar) findViewById(R.id.pb_processing);
mPreviewFrame = (FrameLayout) findViewById(R.id.fl_camera_preview);
View decorView = getWindow().getDecorView();
// Hide the status bar.
decorView.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_FULLSCREEN |
View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN |
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
);
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case CAMERA_PERMISSION_REQUEST_CODE: {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
this.initCamera();
} else {
// TODO : display an error view
}
}
}
}
#Override
protected void onResume() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED&&
ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA,Manifest.permission.WRITE_EXTERNAL_STORAGE}, CAMERA_PERMISSION_REQUEST_CODE);
} else {
initCamera();
}
super.onResume();
}
#Override
protected void onPause() {
super.onPause();
if (mCameraController != null) {
mCameraController.release();
mCameraController = null;
}
}
private void initCamera() {
if (mCameraController == null) {
mCameraController = new CameraController(this);
mCameraController.setCameraRecordListener(this);
if (mCameraController.getCamera() == null) {
Toast.makeText(this, R.string.camera_not_available, Toast.LENGTH_SHORT).show();
// TODO : display an error view
} else if (mPreview == null) {
mPreview = new FrontCameraSurfaceView(this, mCameraController.getCamera(), CameraController.getFrontCameraInfo());
mPreviewFrame.addView(mPreview);
} else {
// handle the onResume after background properly
mPreview.setCamera(mCameraController.getCamera());
}
} else {
mCameraController.getCamera();
}
}
#Override
public void onCameraRecordSuccess(final File file) {
this.runOnUiThread(new Runnable() {
#Override
public void run() {
fab.setVisibility(View.GONE);
btn_record_image.setVisibility(View.GONE);
mCameraController.release();
mPreviewFrame.removeAllViews();
mCameraController = null;
mPbProcessing.setVisibility(View.GONE);
progressDialog.getProgressHelper().setBarColor(ContextCompat.getColor(FrontCameraPreviewActivity.this, R.color.darkbutton));
progressDialog.setTitleText("Processing...");
progressDialog.setCancelable(false);
progressDialog.show();
}
});
new Thread(new Runnable() {
#Override
public void run() {
Log.d(TAG, "start");
FFMpegVideoGenerator generator = new FFMpegVideoGenerator(FrontCameraPreviewActivity.this.getApplication());
generator.setVideoGeneratorListener(FrontCameraPreviewActivity.this);
generator.convert(file);
}
}).start();
}
#Override
public void onCameraRecordFailure() {
this.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(FrontCameraPreviewActivity.this, R.string.camera_not_available, Toast.LENGTH_SHORT).show();
}
});
}
#Override
public void onVideoGenerated(String message, File generatedFile) {
progressDialog.dismiss();
Intent intent = new Intent(FrontCameraPreviewActivity.this, VideoPreviewActivity.class);
intent.putExtra(VideoPreviewActivity.VIDEO_PATH, generatedFile.getAbsolutePath());
startActivity(intent);
recreate();
}
#Override
public void onVideoGeneratedError(String message) {
Log.e(TAG, message);
}
}
Split Images of Video and Other Functionality
FFMpegVideoGenerator.java
import android.content.Context;
import android.util.Log;
import com.github.hiteshsondhi88.libffmpeg.ExecuteBinaryResponseHandler;
import com.github.hiteshsondhi88.libffmpeg.FFmpeg;
import com.github.hiteshsondhi88.libffmpeg.LoadBinaryResponseHandler;
import com.github.hiteshsondhi88.libffmpeg.exceptions.FFmpegCommandAlreadyRunningException;
import com.github.hiteshsondhi88.libffmpeg.exceptions.FFmpegNotSupportedException;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.ref.WeakReference;
import java.util.Arrays;
import java.util.Locale;
import wc.arjs.boomerang.controllers.VideoGenerator;
import wc.arjs.boomerang.utils.FileUtils;
import wc.arjs.boomerang.Constants;
public class FFMpegVideoGenerator extends Thread implements VideoGenerator {
private static final String TAG = "VideoGenerator";
private static final String TEMP_IMG_DIR = "imgs";
private static final String METADATA_PREFIX = "met_";
private static final String CROP_PREFIX = "crop_";
private WeakReference<VideoGeneratorListener> mWeakListener;
private String mWorkingDir;
private String mFinalOutputDir;
private FFmpeg mFFmpeg;
public FFMpegVideoGenerator(Context ctx) {
mFFmpeg = FFmpeg.getInstance(ctx);
mWorkingDir = ctx.getCacheDir() + File.separator + TMP_DIR;
mFinalOutputDir = ctx.getCacheDir() + File.separator + OUTPUT_DIR;
}
#Override
public void setVideoGeneratorListener(VideoGeneratorListener listener) {
mWeakListener = new WeakReference<>(listener);
}
#Override
public void convert(final File inputFile) {
FileUtils.createDirIfNeeded(mWorkingDir);
FileUtils.createDirIfNeeded(mFinalOutputDir);
try {
mFFmpeg.loadBinary(new LoadBinaryResponseHandler() {
#Override
public void onSuccess() {
fixMetaData(inputFile);
}
});
} catch (FFmpegNotSupportedException e) {
Log.e(TAG, "not supported");
}
}
private void fixMetaData(final File inputFile) {
Log.d(TAG, "fixeMetaData");
String c = "-y -i " + inputFile.getAbsolutePath() + " -metadata:s:v rotate=90 -codec copy "
+ mWorkingDir + File.separator + METADATA_PREFIX + inputFile.getName();
String[] cmd = c.split(" ");
try {
mFFmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.e(TAG, message);
dispatchError(message);
}
#Override
public void onSuccess(String message) {
Log.d(TAG, message);
File generated = new File(mWorkingDir +
File.separator + METADATA_PREFIX + inputFile.getName());
cropVideo(generated);
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
dispatchError(e.getMessage());
}
}
private void cropVideo(final File inputFile) {
String c = "-y -i " + inputFile.getAbsolutePath() + " -vf crop=" +
Constants.VIDEO_ASPECT_RATIO + "*in_h:in_h -preset ultrafast " +
mWorkingDir + File.separator + CROP_PREFIX + inputFile.getName();
String[] cmd = c.split(" ");
try {
mFFmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.e(TAG, message);
dispatchError(message);
}
#Override
public void onSuccess(String message) {
Log.d(TAG, message);
File generated = new File(mWorkingDir +
File.separator + CROP_PREFIX + inputFile.getName());
splitIntoImages(generated);
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
dispatchError(e.getMessage());
}
}
private void splitIntoImages(final File inputFile) {
Log.d(TAG, "splitIntoImages");
final File tempImgsDir = new File(mWorkingDir + File.separator + TEMP_IMG_DIR);
if (tempImgsDir.exists()) {
FileUtils.deleteDirectory(tempImgsDir);
}
tempImgsDir.mkdir();
String c = "-y -i " + inputFile.getAbsolutePath() +
" -strict experimental -r 30 -qscale 1 -f image2 -vcodec mjpeg " +
tempImgsDir.getAbsolutePath() + File.separator + "%03d.jpg";
String[] cmd = c.split(" ");
try {
mFFmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.e(TAG, message);
dispatchError(message);
}
#Override
public void onSuccess(String message) {
Log.d(TAG, message);
reverseImagesOrder(tempImgsDir);
assembleVideo(tempImgsDir);
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
dispatchError(e.getMessage());
}
}
private void reverseImagesOrder(final File inputDirectory) {
File[] files = inputDirectory.listFiles();
Arrays.sort(files);
int nbImages = files.length;
if (nbImages <= 2) {
dispatchError("Not enough images generated");
}
// start from before the last image and duplicate all the images in reverse order
for (int i = nbImages - 2; i > 0; i--) {
File img = files[i];
if (img.exists()) {
String copiedImg = inputDirectory.getAbsolutePath() + File.separator +
String.format(Locale.ENGLISH, "%03d", 2 * nbImages - i - 1) + ".jpg";
Log.d(TAG, copiedImg);
FileUtils.copyAndRenameInDirectory(img.getAbsolutePath(), copiedImg);
} else {
Log.e(TAG, "file not found : " + img.getAbsolutePath());
}
}
}
private void assembleVideo(final File inputDirectory) {
Log.d(TAG, "assembleVideo");
File containingFolder = new File(mFinalOutputDir);
final File assembledVideo = new File(containingFolder.getAbsolutePath() + File.separator + FINAL_VIDEO_NAME);
String[] cmdCreateMP4 = {
"-framerate",
"6",
"-i",
inputDirectory.getAbsolutePath() + File.separator +
"%03d.jpg",
"-c:v",
"libx264",
"-profile:v",
"high",
"-crf",
"20",
"-pix_fmt",
"yuv420p",
assembledVideo.getAbsolutePath()};
String c = "-framerate 70 -y -f image2 -loop 1 -i " + inputDirectory.getAbsolutePath() + File.separator +
"%03d.jpg -r 30 -vcodec mpeg4 -b:v 2100k -t 4 " +
assembledVideo.getAbsolutePath();
String[] cmd = c.split(" ");
try {
mFFmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.e(TAG, message);
dispatchError(message);
}
#Override
public void onSuccess(String message) {
Log.d(TAG, message);
dispatchSuccess(message, assembledVideo);
/*String root = Environment.getExternalStorageDirectory().toString();
File myDir = new File(root + "/boomerang/unsafe");
myDir.mkdirs();
String fname = "VID_"+ System.currentTimeMillis() +".mp4";
File file = new File (myDir, fname);
if (file.exists ())
file.delete ();
try {
InputStream in = new FileInputStream(assembledVideo.getAbsolutePath());
OutputStream out = new FileOutputStream(file);
// Copy the bits from instream to outstream
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
File myDir1 = new File(root + "/boomerang/");
String fname1 = "VID_"+ System.currentTimeMillis() +".mp4";
File file1= new File (myDir1, fname1);
*/
// concatenate(file.getAbsolutePath(),file.getAbsolutePath(),file1.getAbsolutePath());
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
dispatchError(e.getMessage());
}
}
private static String generateList(String[] inputs) {
File list;
Writer writer = null;
try {
list = File.createTempFile("ffmpeg-list", ".txt");
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(list)));
for (String input: inputs) {
writer.write("file '" + input + "'\n");
Log.d(TAG, "Writing to list file: file '" + input + "'");
}
} catch (IOException e) {
e.printStackTrace();
return "/";
} finally {
try {
if (writer != null)
writer.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
Log.d(TAG, "Wrote list file to " + list.getAbsolutePath());
return list.getAbsolutePath();
}
public void concatenate(String inputFile1, String inputFile2, final String outputFile) {
Log.d(TAG, "Concatenating " + inputFile1 + " and " + inputFile2 + " to " + outputFile);
String list = generateList(new String[] {inputFile1, inputFile2});
/* String c = "ffmpeg -i "+inputFile1+" -qscale:v 1 intermediate1.mpg ffmpeg -i "+inputFile2+" -qscale:v 1 intermediate2.mpg cat intermediate1.mpg intermediate2.mpg > intermediate_all.mpg ffmpeg -i intermediate_all.mpg -qscale:v 2 "+
outputFile;
*/
/* new String[] {
"ffmpeg",
"-f",
"concat",
"-i",
list,
"-c",
"copy",
outputFile
}*/
String c = "ffmpeg -i "+inputFile1+" -filter_complex [0]reverse[r];[0][r]concat,loop=5:250,setpts=N/25/TB " +
outputFile;
String[] cmd = c.split(" ");
try {
mFFmpeg.execute(cmd, new ExecuteBinaryResponseHandler() {
#Override
public void onFailure(String message) {
Log.e(TAG, message);
//dispatchError(message);
}
#Override
public void onSuccess(String message) {
Log.d("#Success", "Ho gaya");
dispatchSuccess(message, new File(outputFile));
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
//dispatchError(e.getMessage());
}
/*
Videokit vk = new Videokit();
vk.run(new String[] {
"ffmpeg",
"-f",
"concat",
"-i",
list,
"-c",
"copy",
outputFile
});
*/
}
private void dispatchSuccess(String message, File file) {
if (mWeakListener != null && mWeakListener.get() != null) {
mWeakListener.get().onVideoGenerated(message, file);
}
}
private void dispatchError(String message) {
if (mWeakListener != null && mWeakListener.get() != null) {
mWeakListener.get().onVideoGeneratedError(message);
}
}
}
You can take the split image on this path which is cache temporary (android/data/....) save you change the path also as you want, makedirs and save the images in which you want to save
tempImgsDir.getAbsolutePath() + File.separator + "%03d.jpg"
Cameracontroller
import android.content.Context;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.FileObserver;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import wc.arjs.boomerang.Constants;
/**
* Created by Sylvain on 26/11/2016.
*/
public class CameraController {
private final static String TAG = "CameraController";
public interface CameraRecordListener {
void onCameraRecordSuccess(File file);
void onCameraRecordFailure();
}
private CameraRecordListener mCameraRecordListener;
private Camera mCamera;
private MediaRecorder mMediaRecorder;
private boolean isRecording;
private Context mContext;
private static Camera GetFrontCameraInstance() {
Camera c = null;
int cameraId = GetFrontCameraId();
try {
c = Camera.open(cameraId);
} catch (Exception e) {
e.printStackTrace();
}
return c; // returns null if camera is unavailable
}
private static int GetFrontCameraId() {
Camera.CameraInfo info = new Camera.CameraInfo();
int count = Camera.getNumberOfCameras();
for (int i = 0; i < count; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return i;
}
}
return -1;
}
public static Camera.CameraInfo getFrontCameraInfo(){
Camera.CameraInfo info = new Camera.CameraInfo();
int count = Camera.getNumberOfCameras();
for (int i = 0; i < count; i++) {
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return info;
}
}
return info;
}
public CameraController(Context ctx){
this.mContext = ctx;
mCamera = GetFrontCameraInstance();
Camera.Parameters params = mCamera.getParameters();
params.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
/* params.setExposureCompensation(params.getMaxExposureCompensation());
if(params.isAutoExposureLockSupported()) {
params.setAutoExposureLock(false);
}*/
params.setRecordingHint(true);
mCamera.setParameters(params);
isRecording = false;
}
public void setCameraRecordListener(CameraRecordListener cameraRecordListener){
this.mCameraRecordListener = cameraRecordListener;
}
class RecordedFileObserver extends FileObserver {
private File output;
public RecordedFileObserver(File output, int mask) {
super(output.getAbsolutePath(), mask);
this.output = output;
}
public void onEvent(int event, String path) {
if(event == FileObserver.CLOSE_WRITE){
if(mCameraRecordListener!=null){
mCameraRecordListener.onCameraRecordSuccess(output);
}
}
}
}
public void record(){
final File output = getOutputMediaFile();
if (prepareVideoRecorder(output)) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
#Override
public void onInfo(MediaRecorder mr, int what, int extra) {
if(what==MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED){
if (isRecording) {
RecordedFileObserver fb = new RecordedFileObserver(output, FileObserver.CLOSE_WRITE);
fb.startWatching();
// stop recording and release camera
mMediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
mCamera.lock(); // take camera access back from MediaRecorder
isRecording = false;
}
}else{
if(mCameraRecordListener!=null){
mCameraRecordListener.onCameraRecordFailure();
}
}
}
});
mMediaRecorder.start();
isRecording = true;
} else {
releaseMediaRecorder();
if(mCameraRecordListener!=null){
mCameraRecordListener.onCameraRecordFailure();
}
}
}
public void release(){
this.releaseMediaRecorder();
this.releaseCamera();
}
private boolean prepareVideoRecorder(File output) {
mMediaRecorder = new MediaRecorder();
// store the quality profile required
CamcorderProfile profile = CamcorderProfile.get(CameraController.GetFrontCameraId(), CamcorderProfile.QUALITY_480P);
// Step 1: Unlock and set camera to MediaRecorder
mCamera.unlock();
mMediaRecorder.setCamera(mCamera);
// Step 2: Set sources
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Step 3: Set a CamcorderProfile (requires API Level 8 or higher)
mMediaRecorder.setOutputFormat(profile.fileFormat);
mMediaRecorder.setVideoEncoder(profile.videoCodec);
mMediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mMediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mMediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mMediaRecorder.setVideoSize(640, 480);
// recorder.setVideoSize(640, 480);
// recorder.setVideoFrameRate(16); //might be auto-determined due to lighting
// recorder.setVideoEncodingBitRate(3000000);
// recorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);// MPEG_4_SP
// recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
// Step 4: Set output file
mMediaRecorder.setOutputFile(output.toString());
// Set the duration
mMediaRecorder.setMaxDuration(Constants.VIDEO_DURATION);
// Step 6: Prepare configured MediaRecorder
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d(TAG, "IllegalStateException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
}
return true;
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release(); // release the camera for other applications
mCamera = null;
}
}
private void releaseMediaRecorder() {
if (mMediaRecorder != null) {
mMediaRecorder.reset(); // clear recorder configuration
mMediaRecorder.release(); // release the recorder object
mMediaRecorder = null;
mCamera.lock(); // lock camera for later use
}
}
private File getOutputMediaFile() {
return new File(mContext.getFilesDir().getPath() + File.separator + Constants.VIDEO_TEMP_NAME);
}
public Camera getCamera() {
return mCamera;
}
}
Constantstake
public class Constants {
public final static double VIDEO_ASPECT_RATIO = 0.6; // 1:5
public final static int VIDEO_DURATION = 2000;
public final static String VIDEO_TEMP_NAME = "VID_TEMP.mp4";
}

Related

How to save the captured image from camera in the phone memory to check the file size?

I am testing a camera application which captures high quality images. I want the captured image to be saved in the mobile memory (Internal or external) to check the details of the image.
I've the below code for capturing the image.
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.BottomSheetBehavior;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.WindowManager;
import android.widget.Toast;
import com.otaliastudios.cameraview.CameraListener;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.SessionType;
import com.otaliastudios.cameraview.Size;
import java.io.File;
public class CameraActivity extends AppCompatActivity implements View.OnClickListener, ControlView.Callback {
private CameraView camera;
private ViewGroup controlPanel;
private boolean mCapturingPicture;
private boolean mCapturingVideo;
// To show stuff in the callback
private Size mCaptureNativeSize;
private long mCaptureTime;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
setContentView(R.layout.activity_camera);
CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
camera = findViewById(R.id.camera);
camera.addCameraListener(new CameraListener() {
public void onCameraOpened(CameraOptions options) { onOpened(); }
public void onPictureTaken(byte[] jpeg) { onPicture(jpeg); }
#Override
public void onVideoTaken(File video) {
super.onVideoTaken(video);
onVideo(video);
}
});
findViewById(R.id.edit).setOnClickListener(this);
findViewById(R.id.capturePhoto).setOnClickListener(this);
findViewById(R.id.captureVideo).setOnClickListener(this);
findViewById(R.id.toggleCamera).setOnClickListener(this);
controlPanel = findViewById(R.id.controls);
ViewGroup group = (ViewGroup) controlPanel.getChildAt(0);
Control[] controls = Control.values();
for (Control control : controls) {
ControlView view = new ControlView(this, control, this);
group.addView(view, ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
controlPanel.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
#Override
public void onGlobalLayout() {
BottomSheetBehavior b = BottomSheetBehavior.from(controlPanel);
b.setState(BottomSheetBehavior.STATE_HIDDEN);
}
});
}
private void message(String content, boolean important) {
int length = important ? Toast.LENGTH_LONG : Toast.LENGTH_SHORT;
Toast.makeText(this, content, length).show();
}
private void onOpened() {
ViewGroup group = (ViewGroup) controlPanel.getChildAt(0);
for (int i = 0; i < group.getChildCount(); i++) {
ControlView view = (ControlView) group.getChildAt(i);
view.onCameraOpened(camera);
}
}
private void onPicture(byte[] jpeg) {
mCapturingPicture = false;
long callbackTime = System.currentTimeMillis();
if (mCapturingVideo) {
message("Captured while taking video. Size="+mCaptureNativeSize, false);
return;
}
// This can happen if picture was taken with a gesture.
if (mCaptureTime == 0) mCaptureTime = callbackTime - 300;
if (mCaptureNativeSize == null) mCaptureNativeSize = camera.getPictureSize();
PicturePreviewActivity.setImage(jpeg);
Intent intent = new Intent(CameraActivity.this, PicturePreviewActivity.class);
intent.putExtra("delay", callbackTime - mCaptureTime);
intent.putExtra("nativeWidth", mCaptureNativeSize.getWidth());
intent.putExtra("nativeHeight", mCaptureNativeSize.getHeight());
startActivity(intent);
mCaptureTime = 0;
mCaptureNativeSize = null;
}
private void onVideo(File video) {
mCapturingVideo = false;
Intent intent = new Intent(CameraActivity.this, VideoPreviewActivity.class);
intent.putExtra("video", Uri.fromFile(video));
startActivity(intent);
}
#Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.edit: edit(); break;
case R.id.capturePhoto: capturePhoto(); break;
case R.id.captureVideo: captureVideo(); break;
case R.id.toggleCamera: toggleCamera(); break;
}
}
#Override
public void onBackPressed() {
BottomSheetBehavior b = BottomSheetBehavior.from(controlPanel);
if (b.getState() != BottomSheetBehavior.STATE_HIDDEN) {
b.setState(BottomSheetBehavior.STATE_HIDDEN);
return;
}
super.onBackPressed();
}
private void edit() {
BottomSheetBehavior b = BottomSheetBehavior.from(controlPanel);
b.setState(BottomSheetBehavior.STATE_COLLAPSED);
}
private void capturePhoto() {
if (mCapturingPicture) return;
mCapturingPicture = true;
mCaptureTime = System.currentTimeMillis();
mCaptureNativeSize = camera.getPictureSize();
message("Capturing picture...", false);
camera.capturePicture();
}
private void captureVideo() {
if (camera.getSessionType() != SessionType.VIDEO) {
message("Can't record video while session type is 'picture'.", false);
return;
}
if (mCapturingPicture || mCapturingVideo) return;
mCapturingVideo = true;
message("Recording for 8 seconds...", true);
camera.startCapturingVideo(null, 8000);
}
private void toggleCamera() {
if (mCapturingPicture) return;
switch (camera.toggleFacing()) {
case BACK:
message("Switched to back camera!", false);
break;
case FRONT:
message("Switched to front camera!", false);
break;
}
}
#Override
public boolean onValueChanged(Control control, Object value, String name) {
if (!camera.isHardwareAccelerated() && (control == Control.WIDTH || control == Control.HEIGHT)) {
if ((Integer) value > 0) {
message("This device does not support hardware acceleration. " +
"In this case you can not change width or height. " +
"The view will act as WRAP_CONTENT by default.", true);
return false;
}
}
control.applyValue(camera, value);
BottomSheetBehavior b = BottomSheetBehavior.from(controlPanel);
b.setState(BottomSheetBehavior.STATE_HIDDEN);
message("Changed " + control.getName() + " to " + name, false);
return true;
}
//region Boilerplate
#Override
protected void onResume() {
super.onResume();
camera.start();
}
#Override
protected void onPause() {
super.onPause();
camera.stop();
}
#Override
protected void onDestroy() {
super.onDestroy();
camera.destroy();
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
boolean valid = true;
for (int grantResult : grantResults) {
valid = valid && grantResult == PackageManager.PERMISSION_GRANTED;
}
if (valid && !camera.isStarted()) {
camera.start();
}
}
//endregion
}
I've also added the permissions to read and write the external storage.
But I just want to know the size of the picture once it is saved.
Adding the code to save the image -
public void KickOut(String filename,Bitmap bitmap){
ActivityCompat.requestPermissions(PicturePreviewActivity.this,new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},1);
OutputStream outputStream;
File filepath=Environment.getExternalStorageDirectory();
// File dir=new File(filepath+"/Olaa/");
File dir=new File("/Environment.getExternalStoragePublicDirectory/Imgs/");
dir.mkdirs();
File file=new File(dir,"filename.png");
Toast.makeText(PicturePreviewActivity.this, file + " -> saved" , Toast.LENGTH_SHORT).show();
try{
outputStream=new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG,100,outputStream);
outputStream.flush();outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Toast.makeText(PicturePreviewActivity.this, filepath + " -> path" , Toast.LENGTH_SHORT).show();
}
You can check the size of the captured image you can also save the file in your custom folder.
you can get the result in onActivityResult() and get the image from the bundle and convert it into the byte and lengthbmp_KB will give you size in kb.
if (resultCode == RESULT_OK) {
//Image capture
if (requestCode == 1) {
Bitmap thumbnail = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
thumbnail.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
byte[] imageInByte = bytes.toByteArray();
long lengthbmp = imageInByte.length;
// save on custom folder
File destination1 = createDirectoryAndSaveFile(thumbnail, System.currentTimeMillis() + ".jpg");
/* File destination = new File(Environment.getExternalStorageDirectory(),
System.currentTimeMillis() + ".jpg");*/
picturePath = "" + destination1;
// show image on gallery
scanGallery(getActivity(), picturePath);
long lengthbmp_KB = lengthbmp / 1024;
long length_MB = lengthbmp_KB / 1024;
}
you can save the file at your own folder
private File createDirectoryAndSaveFile(Bitmap imageToSave, String fileName) {
File direct = new File(Environment.getExternalStorageDirectory() + "/My Images");
if (!direct.exists()) {
File wallpaperDirectory = new File("/sdcard/MYfolder Images/");
wallpaperDirectory.mkdirs();
}
File file = new File(new File("/sdcard/Myfolder Images/"), fileName);
/* if (file.exists()) {
file.delete();
}*/
try {
FileOutputStream out = new FileOutputStream(file);
imageToSave.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
return file;
}
You can see your images in Gallery by using this MediaScannerConnection
private void scanGallery(Context cntx, String path) {
try {
MediaScannerConnection.scanFile(cntx, new String[]{path}, null, new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
and use this method in onActivityResult()
To check the size of image saved in phone memory , you could use Bitmapfactory.option
**BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filename, options);
int imageHeight = options.outHeight;
int imageWidth = options.outWidth;**
You can use this to save the file provided you've given the permissions in the manifest.
static final int REQUEST_TAKE_PHOTO = 1;
private void dispatchTakePictureIntent() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// Ensure that there's a camera activity to handle the intent
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
// Create the File where the photo should go
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
// Error occurred while creating the File
...
}
// Continue only if the File was successfully created
if (photoFile != null) {
Uri photoURI = FileProvider.getUriForFile(this,
"com.example.android.fileprovider",
photoFile);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI);
startActivityForResult(takePictureIntent, REQUEST_TAKE_PHOTO);
}
}
}
Take a look at the docs too over here.

Save android logcat in a file on Android device

I want to write Android logcat in a file on my device.
To do that, I used the following code
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
if(isExternalStorageWritable()){
File appDirectory = new File(Environment.getExternalStorageDirectory()+ "/MyAppfolder");
File logDirectory = new File(appDirectory + "/log");
File logFile = new File(logDirectory, "logcat"+System.currentTimeMillis()+".txt");
if(!appDirectory.exists()){
appDirectory.mkdir();
}
if(!logDirectory.exists()){
logDirectory.mkdir();
}
if(!logFile.exists()){
try {
logFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
try{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(checkSelfPermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE}, 1);
Process process = Runtime.getRuntime().exec("logcat -f "+logFile);
}
}
}
catch (IOException e){
e.printStackTrace();
}
}
else if (isExternalStorageReadable()){
Log.i(TAG, "ONLY READABLE");
}
else{
Log.i(TAG, "NOT ACCESSIBLE");
}}
public boolean isExternalStorageReadable(){
String state = Environment.getExternalStorageState();
if(Environment.MEDIA_MOUNTED.equals(state) || Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)){
return true;
}
return false;
}
public boolean isExternalStorageWritable(){
String state = Environment.getExternalStorageState();
if(Environment.MEDIA_MOUNTED.equals(state)){
return true;
}
return false;
}
And I added the permissions in AndroidManifest.xml
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_LOGS"/>
The folders and the file were created, but the file is always empty.
How can I improve the code so the logcat will be written in the file.
if(checkSelfPermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE}, 1);
}
else{
Process process = Runtime.getRuntime().exec("logcat -f "+logFile);
}
Your code is working perfectly may be, by mistake, You had written ask for permission & make log file both together
Maybe you are not writing the log to the file. You are missing this method.
/* Checks if external storage is available for read and write */
public boolean isExternalStorageWritable() {
String state = Environment.getExternalStorageState();
if ( Environment.MEDIA_MOUNTED.equals( state ) ) {
return true;
}
return false;
}
Your took code from the solution of this question Stack overflow question. Check it out.
It's my pleasure to answer your question.
In my project,I use this to solve the problem.
1.add this class
public class LogcatHelper {
private static LogcatHelper INSTANCE = null;
private static String PATH_LOGCAT;
private LogDumper mLogDumper = null;
private int mPId;
/**
* init data
*/
public void init(Context context) {
if (Environment.getExternalStorageState().equals(
Environment.MEDIA_MOUNTED)) {// sd first
PATH_LOGCAT = Environment.getExternalStorageDirectory()
.getAbsolutePath() + File.separator + "logcat";
} else {
PATH_LOGCAT = context.getFilesDir().getAbsolutePath()
+ File.separator + "logcat";
}
File file = new File(PATH_LOGCAT);
if (!file.exists()) {
file.mkdirs();
}
}
public static LogcatHelper getInstance(Context context) {
if (INSTANCE == null) {
INSTANCE = new LogcatHelper(context);
}
return INSTANCE;
}
private LogcatHelper(Context context) {
init(context);
mPId = android.os.Process.myPid();
}
public void start() {
if (mLogDumper == null)
mLogDumper = new LogDumper(String.valueOf(mPId), PATH_LOGCAT);
mLogDumper.start();
}
public void stop() {
if (mLogDumper != null) {
mLogDumper.stopLogs();
mLogDumper = null;
}
}
private class LogDumper extends Thread {
private Process logcatProc;
private BufferedReader mReader = null;
private boolean mRunning = true;
String cmds = null;
private String mPID;
private FileOutputStream out = null;
public LogDumper(String pid, String dir) {
mPID = pid;
try {
out = new FileOutputStream(new File(dir, "logcat"
+ getFileName() + ".log"));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
/**
*
* Level:*:v , *:d , *:w , *:e , *:f , *:s
*
*
* */
// cmds = "logcat *:e *:w | grep \"(" + mPID + ")\""; // print e level and ilevel info
// cmds = "logcat | grep \"(" + mPID + ")\"";// print all
// cmds = "logcat -s way";// print filter info
cmds = "logcat *:e *:i | grep \"(" + mPID + ")\"";
}
public void stopLogs() {
mRunning = false;
}
#Override
public void run() {
try {
logcatProc = Runtime.getRuntime().exec(cmds);
mReader = new BufferedReader(new InputStreamReader(
logcatProc.getInputStream()), 1024);
String line = null;
while (mRunning && (line = mReader.readLine()) != null) {
if (!mRunning) {
break;
}
if (line.length() == 0) {
continue;
}
if (out != null && line.contains(mPID)) {
out.write((getDateEN() + " " + line + "\n")
.getBytes());
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (logcatProc != null) {
logcatProc.destroy();
logcatProc = null;
}
if (mReader != null) {
try {
mReader.close();
mReader = null;
} catch (IOException e) {
e.printStackTrace();
}
}
if (out != null) {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
out = null;
}
}
}
}
public static String getFileName() {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
String date = format.format(new Date(System.currentTimeMillis()));
return date;
}
public static String getDateEN() {
SimpleDateFormat format1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String date1 = format1.format(new Date(System.currentTimeMillis()));
return date1;
}
}
add code in the Application class
LogcatHelper.getInstance((getApplicationContext())).start();
3.add permissions in the Application class
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(checkSelfPermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE}, 1);
Process process = Runtime.getRuntime().exec("logcat -f "+logFile);
}
}
I hope I can help you.

Recieving data on Android app from imu and after displaying save in .csv file

As I am receiving data from imu razor 9 degree of freedom,(giving me the value of accelerometer,gyro,and magnetometer) we can burn code in it with the help of Arduino IDE.
So, currently I am receiving data on my Android app as shown in image is like that and in my Android code I am updating receiving code here private void: updateReceivedData(byte[] data)
So when I append data to show my values it works fine like that: mDumpTextView.append(message);
But according to requirement, I want that data in array so I can easily display that specific value on screen, not the whole data and at the same time. I need data to store in .csv file . but I am not able to do this . When I use to save in arraym it doesn't work for me: data obtain from imu in an append form in Android app
The whole code is available on GitHub:
https://github.com/mik3y/usb-serial-for-android/tree/master/usbSerialExamples/src/main
But in the code:
public class SerialConsoleActivity extends Activity {
private final String TAG = SerialConsoleActivity.class.getSimpleName();
/**
* Driver instance, passed in statically via
* {#link #show(Context, UsbSerialPort)}.
*
* <p/>
* This is a devious hack; it'd be cleaner to re-create the driver using
* arguments passed in with the {#link #startActivity(Intent)} intent. We
* can get away with it because both activities will run in the same
* process, and this is a simple demo.
*/
private static UsbSerialPort sPort = null;
private TextView mTitleTextView;
private TextView mDumpTextView;
private ScrollView mScrollView;
private CheckBox chkDTR;
private CheckBox chkRTS;
private final ExecutorService mExecutor = Executors.newSingleThreadExecutor();
private SerialInputOutputManager mSerialIoManager;
private final SerialInputOutputManager.Listener mListener =
new SerialInputOutputManager.Listener() {
#Override
public void onRunError(Exception e) {
Log.d(TAG, "Runner stopped.");
}
#Override
public void onNewData(final byte[] data) {
SerialConsoleActivity.this.runOnUiThread(new Runnable() {
#Override
public void run() {
SerialConsoleActivity.this.updateReceivedData(data);
}
});
}
};
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.serial_console);
// mDumpTextView = (TextView) findViewById(R.id.consoleText);
mTitleTextView = (TextView) findViewById(R.id.demoTitle);
mDumpTextView = (TextView) findViewById(R.id.consoleText);
// mDumpTextView = (TextView) findViewById(R.id.ex);
mScrollView = (ScrollView) findViewById(R.id.demoScroller);
chkDTR = (CheckBox) findViewById(R.id.checkBoxDTR);
chkRTS = (CheckBox) findViewById(R.id.checkBoxRTS);
chkDTR.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
try {
sPort.setDTR(isChecked);
}catch (IOException x){}
}
});
chkRTS.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
#Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
try {
sPort.setRTS(isChecked);
}catch (IOException x){}
}
});
}
#Override
protected void onPause() {
super.onPause();
stopIoManager();
if (sPort != null) {
try {
sPort.close();
} catch (IOException e) {
// Ignore.
}
sPort = null;
}
finish();
}
void showStatus(TextView theTextView, String theLabel, boolean theValue){
String msg = theLabel + ": " + (theValue ? "enabled" : "disabled") + "\n";
theTextView.append(msg);
}
#Override
protected void onResume() {
super.onResume();
Log.d(TAG, "Resumed, port=" + sPort);
if (sPort == null) {
mTitleTextView.setText("No serial device.");
} else {
final UsbManager usbManager = (UsbManager) getSystemService(Context.USB_SERVICE);
UsbDeviceConnection connection = usbManager.openDevice(sPort.getDriver().getDevice());
if (connection == null) {
mTitleTextView.setText("Opening device failed");
return;
}
try {
sPort.open(connection);
sPort.setParameters(57600, 8, UsbSerialPort.STOPBITS_1, UsbSerialPort.PARITY_NONE);
showStatus(mDumpTextView, "CD - Carrier Detect", sPort.getCD());
// sPort.getCD();
showStatus(mDumpTextView, "CTS - Clear To Send", sPort.getCTS());
//sPort.getCTS();
showStatus(mDumpTextView, "DSR - Data Set Ready", sPort.getDSR());
//sPort.getDSR();
showStatus(mDumpTextView, "DTR - Data Terminal Ready", sPort.getDTR());
//sPort.getDTR();
showStatus(mDumpTextView, "DSR - Data Set Ready", sPort.getDSR());
//sPort.getDSR();
showStatus(mDumpTextView, "RI - Ring Indicator", sPort.getRI());
//sPort.getRI();
showStatus(mDumpTextView, "RTS - Request To Send", sPort.getRTS());
//sPort.getRTS();
} catch (IOException e) {
Log.e(TAG, "Error setting up device: " + e.getMessage(), e);
mTitleTextView.setText("Error opening device: " + e.getMessage());
try {
sPort.close();
} catch (IOException e2) {
// Ignore.
}
sPort = null;
return;
}
mTitleTextView.setText("Serial device: " + sPort.getClass().getSimpleName());
}
onDeviceStateChange();
}
private void stopIoManager() {
if (mSerialIoManager != null) {
Log.i(TAG, "Stopping io manager ..");
mSerialIoManager.stop();
mSerialIoManager = null;
}
}
public void writeToCsv1(String x,String y,String z) throws IOException {
Calendar c = Calendar.getInstance();
File folder = new File(Environment.getExternalStorageDirectory() + "/project");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdir();
}
if (success) {
// Do something on success
String csv = folder + "/AccelerometerValue1.csv";
FileWriter file_writer = new FileWriter(csv, true);
;
String s = c.get(Calendar.YEAR) + "," + c.get(Calendar.MONTH) + "," + c.get(Calendar.DATE) + "," + c.get(Calendar.HOUR) + "," + c.get(Calendar.MINUTE) + "," + c.get(Calendar.SECOND) + "," + c.get(Calendar.MILLISECOND) + "," + x + "," + y + "," + z + "\n";
file_writer.append(s);
file_writer.close();
}
}
private void startIoManager() {
if (sPort != null) {
Log.i(TAG, "Starting io manager ..");
mSerialIoManager = new SerialInputOutputManager(sPort, mListener);
mExecutor.submit(mSerialIoManager);
}
}
private String convert(byte[] data) {
StringBuilder sb = new StringBuilder(data.length);
for (int i = 0; i < data.length; ++ i) {
if (data[i] < 0) throw new IllegalArgumentException();
sb.append((char) data[i]);
}
return sb.toString();
}
private void onDeviceStateChange() {
stopIoManager();
startIoManager();
}
private void updateReceivedData(byte[] data) {
final String message = convert(data);
if (convert(data) =="A") {
mDumpTextView.append("\n\n");
}
mDumpTextView.append(message);
mScrollView.smoothScrollTo(0, mDumpTextView.getBottom());
}
/**
* Starts the activity, using the supplied driver instance.
*
* #param context
* #param driver
*/
static void show(Context context, UsbSerialPort port) {
sPort = port;
final Intent intent = new Intent(context, SerialConsoleActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_NO_HISTORY);
context.startActivity(intent);
}
[enter image description here][1]}

Getting result.getDriveFolder().getDriveId().getResourceId() always null

Trying to put all files inside folder but getting result.getDriveFolder().getDriveId().getResourceId() always null. i searched and found some links and i tried to go with the steps mentioned in this linkenter link description here
Here's my code :-
public class MainActivity extends Activity implements ConnectionCallbacks,
OnConnectionFailedListener {
public ArrayList songsList = new ArrayList();
private static final String TAG = "drive-quickstart";
private GoogleApiClient mGoogleApiClient;
public static ArrayList<File> listAllMusicFiles = new ArrayList<File>();
protected static final int REQUEST_CODE_RESOLUTION = 1;
protected static final int NEXT_AVAILABLE_REQUEST_CODE = 2;
private DriveId mFolderDriveId;
public static final String EXISTING_FOLDER_ID = "0B2EEtIjPUdX6MERsWlYxN3J6RU0";
public static final String EXISTING_FILE_ID = "0ByfSjdPVs9MZTHBmMVdSeWxaNTg";
/**
* Extra for account name.
*/
protected static final String EXTRA_ACCOUNT_NAME = "account_name";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final File mainDir = new File(Environment.getExternalStorageDirectory()
.getPath());
loadSdcardfiles(mainDir);
}
/**
* Create a new file and save it to Drive.
*/
private void saveFileToDrive(final File file) {
Log.i(TAG, "Creating new contents.");
Drive.DriveApi.newDriveContents(mGoogleApiClient).setResultCallback(
new ResultCallback<DriveApi.DriveContentsResult>() {
#Override
public void onResult(DriveApi.DriveContentsResult result) {
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
final DriveContents driveContents = result
.getDriveContents();
Log.i(TAG, "New contents created.");
OutputStream outputStream = result.getDriveContents()
.getOutputStream();
try {
#SuppressWarnings("resource")
FileInputStream fileInputStream = new FileInputStream(
file);
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = fileInputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, bytesRead);
}
} catch (IOException e1) {
Log.i(TAG, "Unable to write file contents.");
}
DriveFolder folder = mFolderDriveId.asDriveFolder();
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setTitle(file.getName()).setStarred(true)
.build();
// create a file on root folder
folder.createFile(mGoogleApiClient, metadataChangeSet,
driveContents).setResultCallback(fileCallback);
// Drive.DriveApi
// .getRootFolder(mGoogleApiClient)
// .createFile(mGoogleApiClient,
// metadataChangeSet, driveContents)
// .setResultCallback(fileCallback);
try {
} catch (Exception e) {
}
}
});
}
private void loadSdcardfiles(File aFile) {
if (aFile.isFile()) {
if (aFile.getAbsolutePath().endsWith(".mp3")) {
listAllMusicFiles.add(aFile);
}
} else if (aFile.isDirectory()) {
File[] listOfFiles = aFile.listFiles();
if (listOfFiles != null) {
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
loadSdcardfiles(listOfFiles[i]);
}
}
} else {
}
}
}
#Override
protected void onResume() {
super.onResume();
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API).addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this).build();
}
mGoogleApiClient.connect();
}
#Override
protected void onPause() {
if (mGoogleApiClient != null) {
mGoogleApiClient.disconnect();
}
super.onPause();
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_RESOLUTION && resultCode == RESULT_OK) {
mGoogleApiClient.connect();
}
}
public void showMessage(String message) {
Toast.makeText(this, message, Toast.LENGTH_LONG).show();
}
#Override
public void onConnectionFailed(ConnectionResult result) {
Log.i(TAG, "GoogleApiClient connection failed: " + result.toString());
if (!result.hasResolution()) {
GoogleApiAvailability.getInstance()
.getErrorDialog(this, result.getErrorCode(), 0).show();
return;
}
try {
result.startResolutionForResult(this, REQUEST_CODE_RESOLUTION);
} catch (SendIntentException e) {
Log.e(TAG, "Exception while starting resolution activity", e);
}
}
#Override
public void onConnected(Bundle connectionHint) {
Toast.makeText(this, "API client connected" + listAllMusicFiles.get(0),
1000).show();
if (listAllMusicFiles.size() == 0) {
return;
} else {
MetadataChangeSet changeSet = new MetadataChangeSet.Builder()
.setTitle("Siddharth").build();
Drive.DriveApi.getRootFolder(mGoogleApiClient)
.createFolder(mGoogleApiClient, changeSet)
.setResultCallback(callback);
}
}
#Override
public void onConnectionSuspended(int cause) {
Toast.makeText(this, "GoogleApiClient connection suspended", 1000)
.show();
}
final private ResultCallback<DriveIdResult> idCallback = new ResultCallback<DriveIdResult>() {
#Override
public void onResult(DriveIdResult result) {
if (!result.getStatus().isSuccess()) {
showMessage("Cannot find DriveId. Are you authorized to view this file?");
return;
}
mFolderDriveId = result.getDriveId();
for (int i = 0; i < listAllMusicFiles.size(); i++) {
saveFileToDrive(listAllMusicFiles.get(i));
}
}
};
final ResultCallback<DriveFolderResult> callback = new ResultCallback<DriveFolderResult>() {
#Override
public void onResult(final DriveFolderResult result) {
if (!result.getStatus().isSuccess()) {
showMessage("Error while trying to create the folder");
return;
}
showMessage("Created a folder: "
+ result.getDriveFolder().getDriveId());
mFolderDriveId = result.getDriveFolder().getDriveId();
DriveFolder folder = Drive.DriveApi.getFolder(mGoogleApiClient,
result.getDriveFolder().getDriveId());
folder.addChangeSubscription(mGoogleApiClient);
}
};
final private ResultCallback<DriveFileResult> fileCallback = new ResultCallback<DriveFileResult>() {
#Override
public void onResult(DriveFileResult result) {
if (!result.getStatus().isSuccess()) {
showMessage("Error while trying to create the file");
return;
}
showMessage("Created a file with content: "
+ result.getDriveFile().getDriveId());
}
};
public class MyDriveEventService extends DriveEventService {
#Override
public void onChange(ChangeEvent event) {
Log.d(TAG, event.toString());
Drive.DriveApi.fetchDriveId(mGoogleApiClient,
mFolderDriveId.getResourceId()).setResultCallback(
idCallback);
}
}
}
Can any one suggest me that how to do this, Thanks In advance
finally yes , it was there in my provided links...
here are the changes i made
#Override
public void onConnected(Bundle connectionHint) {
Toast.makeText(this, "API client connected" + listAllMusicFiles.get(0),
1000).show();
if (listAllMusicFiles.size() == 0) {
return;
} else {
if (count == 0) {
count++;
mcreateFolder();
}
}
}
public void mcreateFolder() {
MetadataChangeSet changeSet = new MetadataChangeSet.Builder().setTitle(
"Siddharth").build();
Drive.DriveApi.getRootFolder(mGoogleApiClient)
.createFolder(mGoogleApiClient, changeSet)
.setResultCallback(callback);
}
final ResultCallback<DriveFolderResult> callback = new ResultCallback<DriveFolderResult>() {
#Override
public void onResult(final DriveFolderResult result) {
if (!result.getStatus().isSuccess()) {
showMessage("Error while trying to create the folder");
return;
}
showMessage("Created a folder: "
+ result.getDriveFolder().getDriveId());
mFolderDriveId = result.getDriveFolder().getDriveId();
Drive.DriveApi.getFolder(mGoogleApiClient, mFolderDriveId)
.addChangeListener(mGoogleApiClient, mChgeLstnr);
mCnt = 0;
mPoker = new Handler();
mPoker.postDelayed(mPoke, mWait);
}
};
final private ChangeListener mChgeLstnr = new ChangeListener() {
#Override
public void onChange(ChangeEvent event) {
// showMessage("event: " + event + " resId: "
// + event.getDriveId().getResourceId());
}
};
private final Runnable mPoke = new Runnable() {
public void run() {
if (mPoker != null && mFolderDriveId != null
&& mFolderDriveId.getResourceId() == null
&& (mCnt++ < ENOUGH)) {
MetadataChangeSet meta = new MetadataChangeSet.Builder()
.build();
Drive.DriveApi
.getFolder(mGoogleApiClient, mFolderDriveId)
.updateMetadata(mGoogleApiClient, meta)
.setResultCallback(
new ResultCallback<DriveResource.MetadataResult>() {
#Override
public void onResult(
DriveResource.MetadataResult result) {
if (result.getStatus().isSuccess()
&& result.getMetadata()
.getDriveId()
.getResourceId() != null) {
showMessage("resId COOL "
+ result.getMetadata()
.getDriveId()
.getResourceId());
mPoker.removeCallbacksAndMessages(mPoke);
Drive.DriveApi.fetchDriveId(
mGoogleApiClient,
result.getMetadata()
.getDriveId()
.getResourceId())
.setResultCallback(
idCallback);
} else {
mPoker.postDelayed(mPoke,
mWait *= 2);
}
}
});
} else {
mPoker = null;
}
}
};
Alot many thanks to #seanpj for such a beautiful elaboration.
cannot get folderId that i just created on google drive
Unpredictable result of DriveId.getResourceId() in Google Drive Android API
I really appreciate your effort.

OnCameraFrame method of CvCameraViewListener interface not being called

I am new to opencv and trying to create a simple application which will open the camera and capture the photo. I have implemented the CvCameraViewListener interface for this purpose. My code looks as follows:
MainActivity.java
public class MainActivity extends Activity implements CvCameraViewListener2{
public String TAG = "MainActivity";
private int mCameraIndex;
private Mat mBgr;
private Boolean mIsPhotoPending;
private CameraBridgeViewBase mCameraView;
private static final String STATE_CAMERA_INDEX = "cameraIndex";
private Boolean mIsMenuLocked;
private CameraBridgeViewBase.CvCameraViewFrame inputFrame;
int screen_w, screen_h;
private Mat gray, frame, lowRes;
static {
if (!OpenCVLoader.initDebug()) {
Log.v("MainActivity","Loading of OpenCv Failed");
}
}
private BaseLoaderCallback mLoaderCallBack = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status) {
case LoaderCallbackInterface.SUCCESS:
{
String TAG = "";
Log.i(TAG, "Open CV successfully loaded");
mCameraView.enableView();
mBgr = new Mat();
}break;
default:
{
super.onManagerConnected(status);
}break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if(savedInstanceState != null)
{
mCameraIndex = savedInstanceState.getInt(STATE_CAMERA_INDEX, 0);
}
else
{
mCameraIndex = 0;
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD)
{
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraIndex, cameraInfo);
}
mCameraView = new NativeCameraView(this, mCameraIndex);
//mCameraView.setCvCameraViewListener(this);
findViewById(R.id.HelloOpenCvView);
//mOpenCvCameraView = new JavaCameraView(this,-1);
setContentView(mCameraView);
}
#Override
public void onResume() {
super.onResume();
mLoaderCallBack.onManagerConnected(LoaderCallbackInterface.SUCCESS);
// OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallBack);
mIsMenuLocked = false;
}
#Override
public void onPause() {
super.onPause();
if(mCameraView != null)
mCameraView.disableView();
}
public void onDestroy() {
super.onDestroy();
if(mCameraView != null)
mCameraView.disableView();
}
#Override
public void onCameraViewStarted(int width, int height) {
}
#Override
public void onCameraViewStopped() {
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
Mat rgba = inputFrame.rgba();
if(mIsPhotoPending)
{
takePhoto(rgba);
}
return rgba;
}
private void takePhoto(Mat rgba)
{
//get the path of the photo
final long currentTimeMillis = System.currentTimeMillis();
final String appName = getString(R.string.app_name);
final String galleryPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).toString();
final String albumPath = galleryPath + "/" + appName;
final String photoPath = albumPath + "/" + currentTimeMillis + ".png";
final ContentValues values = new ContentValues();
values.put(MediaStore.MediaColumns.DATA, photoPath);
values.put(MediaStore.Images.Media.MIME_TYPE, showActivity.PHOTO_MIME_TYPE);
values.put(MediaStore.Images.Media.TITLE, appName);
values.put(MediaStore.Images.Media.DESCRIPTION, appName);
values.put(MediaStore.Images.Media.DATE_TAKEN, currentTimeMillis);
//check if the album directory exists
File album = new File(albumPath);
if(!album.isDirectory() && !album.mkdirs())
{
Log.e(TAG,"Failed to create album directory at" + albumPath);
return;
}
//try to create the photo
Imgproc.cvtColor(rgba, mBgr, Imgproc.COLOR_RGBA2BGR, 3);
if(!Highgui.imwrite(photoPath, mBgr))
{
Log.d(TAG,"Photo saved successfully");
onTakePhotoFailed();
}
Log.d(TAG, "Photo saved successfully");
//insert photo in mediastore
Uri uri;
final Intent intent = new Intent();
try
{
uri = getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
intent.putExtra(showActivity.EXTRA_PHOTO_URI, uri);
}catch(final Exception e)
{
Log.e(TAG, "Failed to insert photo into media store");
e.printStackTrace();
}
//delete the photo because insertion failed
File photo = new File(photoPath);
if(!photo.delete())
{
Log.e(TAG, "Failed to delete non-inserted photo");
}
onTakePhotoFailed();
intent.putExtra(showActivity.EXTRA_PHOTO_DATA_PATH, photoPath);
startActivity(intent);
return;
}
private void onTakePhotoFailed()
{
mIsMenuLocked = false;
//display error message
final String errorMessage = getString(R.string.photo_error_message);
runOnUiThread(new Runnable()
{
#Override
public void run()
{
Toast.makeText(MainActivity.this, errorMessage, Toast.LENGTH_SHORT).show();
}
});
}
My problem is that the onCameraFrame() method is never being called which in turn does not call the takePhoto() method and I am not able to capture the photo. I have called the takePhoto() method within the onCamerFrame() method as the method will take the Mat details. Kindly let me know where did I go wrong.
Any help would be highly appreciated.
You've commented out the camera listener. That's why onCameraFrame() is never called. Uncomment this in onCreate():
mCameraView.setCvCameraViewListener(this);
You may need to implement the PictureCallBack interface your activity. Refer the Tutorial 3 - Camera Control App.

Categories

Resources