i have an IntentService that receives messages from a server using GCM, everything is working fine except when i try to take a picture using android.hardware.Camera the InentService is destroyed before calling onPictureTaken ! , so what should i do to save captured photo before InentService stops ?
public class GcmIntentService extends IntentService {
String function;
public GcmIntentService() {
super("GcmIntentService");
}
#Override
protected void onHandleIntent(Intent intent) {
Bundle extras = intent.getExtras();
String msg = intent.getStringExtra("message");
GoogleCloudMessaging gcm = GoogleCloudMessaging.getInstance(this);
String messageType = gcm.getMessageType(intent);
if (!extras.isEmpty()) {
if (GoogleCloudMessaging.MESSAGE_TYPE_MESSAGE.equals(messageType)) {
System.out.println("message received");
try {
json = new JSONObject(msg);
function = json.getString("function");
} catch (JSONException e) {
e.printStackTrace();
}
if (function.equals("capture")) {
System.out
.println("capture entereeeeed =====================");
Capture cap = new Capture(getApplicationContext());
cap.captureNow();
}
}
}
GcmBroadcastReceiver.completeWakefulIntent(intent);
}
#Override
public void onDestroy() {
if (Capture.camera != null) {
System.out.println("camera released");
Capture.camera.release();
}
super.onDestroy();
}
}
the Capture Class , from the vogella tutorial http://www.vogella.com/tutorials/AndroidCamera/article.html
public class Capture {
Context captureContext = null;
public static Camera camera;
private int cameraId = 0;
final static String DEBUG_TAG = "Capture Class";
public Capture(Context context) {
captureContext = context;
// do we have a camera?
if (!captureContext.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_CAMERA)) {
System.out.println("no camera found on this device");
} else {
cameraId = findFrontFacingCamera();
if (cameraId < 0) {
System.out.println("no front camera found");
} else {
camera = Camera.open(cameraId);
}
}
}
public void captureNow() {
System.out.println("capture now entered");
camera.takePicture(null, null, new PhotoHandler(captureContext));
}
public int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
Log.d(DEBUG_TAG, "Camera found");
cameraId = i;
break;
}
}
return cameraId;
}
}
the PhotoHandler Class
public class PhotoHandler implements PictureCallback {
private final Context context;
public static String filename;
public PhotoHandler(Context context) {
System.out.println("enetered photoHandler");
this.context = context;
}
#Override
public void onPictureTaken(byte[] data, android.hardware.Camera camera) {
System.out.println("entered onPicture taken");
File pictureFileDir = getDir();
System.out.println("file directory = " + pictureFileDir);
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
Log.d(Capture.DEBUG_TAG, "Can't create directory to save image.");
return;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "Picture_" + date + ".jpg";
filename = pictureFileDir.getPath() + File.separator + photoFile;
File pictureFile = new File(filename);
try {
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.close();
System.out.println("New Image saved: " + photoFile);
Capture.camera.release();
} catch (Exception error) {
Log.d(Capture.DEBUG_TAG,
"File" + filename + "not saved: " + error.getMessage());
System.out.println("image could not be saved");
}
}
private File getDir() {
File sdDir = Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
return new File(sdDir, "blah");
}
}
Answer is simple Just do the Camera stuff in an AsyncTask. Probably will work.
#Override
protected void onHandleIntent(Intent intent) {
Bundle extras = intent.getExtras();
String msg = intent.getStringExtra("message");
GoogleCloudMessaging gcm = GoogleCloudMessaging.getInstance(this);
String messageType = gcm.getMessageType(intent);
if (!extras.isEmpty()) {
if (GoogleCloudMessaging.MESSAGE_TYPE_MESSAGE.equals(messageType)) {
System.out.println("message received");
try {
json = new JSONObject(msg);
function = json.getString("function");
} catch (JSONException e) {
e.printStackTrace();
}
if (function.equals("capture")) {
System.out.println("capture entereeeeed =====================");
new AsyncTask<Void, Void, String>() {
#Override
protected String doInBackground(Void...params) {
Capture cap = new Capture(getApplicationContext());
cap.captureNow();
String result = "ok";
return result;
}
#Override
protected void onPostExecute(String result) {
}
}.execute();
}
}
}
Related
Is there any way to get photo without preview and put it to database in android sdk 19
In the below code automaticly getting location on every 5 minutes and putting marker on map additionally i need to take photo automaticly and put it to database
public void startCountDown() {
if (this._countDownTimer != null) {
this._countDownTimer.cancel();
}
_countDownTimer = new CountDownTimer(60000 * 5, 60000*5) {
#Override
public void onFinish() {
}
#Override
public void onTick(long millisUntilFinished) {
putMarkerToLocation();
takePhotoAndSaveIt2Db(); // ==> here what i need
}
}.start();
}
Below service works percfectly on api19
public class CapPhoto extends Service {
private Camera mCamera;
#Override
public void onCreate() {
super.onCreate();
Log.d("CAM", "start");
if (android.os.Build.VERSION.SDK_INT > 9) {
StrictMode.ThreadPolicy policy =
new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
}
Thread myThread = null;
}
#Override
public void onStart(Intent intent, int startId) {
takePhoto();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
private void takePhoto() {
System.out.println("Fotoraf Cekimi Hazirligi Basladi");
Camera camera = null;
int cameraCount = 0;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
SystemClock.sleep(1000);
Camera.getCameraInfo(camIdx, cameraInfo);
try {
camera = Camera.open(camIdx);
} catch (RuntimeException e) {
System.out.println("Camera not available: " + camIdx);
camera = null;
//e.printStackTrace();
}
try {
if (null == camera) {
System.out.println("Could not get camera instance");
} else {
System.out.println("Got the camera, creating the dummy surface texture");
//SurfaceTexture dummySurfaceTextureF = new SurfaceTexture(0);
try {
//camera.setPreviewTexture(dummySurfaceTextureF);
camera.setPreviewTexture(new SurfaceTexture(0));
camera.startPreview();
} catch (Exception e) {
System.out.println("Could not set the surface preview texture");
e.printStackTrace();
}
camIdx = cameraCount;
Camera.Parameters params = camera.getParameters();
params.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
params.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
params.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
params.setExposureCompensation(0);
params.setPictureFormat(ImageFormat.JPEG);
params.setJpegQuality(100);
params.setRotation(90);
camera.setParameters(params);
camera.takePicture(null, null, new Camera.PictureCallback() {
#Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFileDir =new File(Environment.getExternalStorageDirectory(), "A");
if(!pictureFileDir.exists()){
pictureFileDir.mkdirs();
} // File pictureFileDir = getDir();
if (!pictureFileDir.exists() && !pictureFileDir.mkdirs()) {
return;
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmddhhmmss");
String date = dateFormat.format(new Date());
String photoFile = "TaksiResim_" + "_" + date + ".jpg";
String filename = pictureFileDir.getPath() + File.separator + photoFile;
File mainPicture = new File(filename);
//addImageFile(mainPicture);
try {
FileOutputStream fos = new FileOutputStream(mainPicture);
fos.write(data);
fos.close();
System.out.println("resim kayit edildi");
} catch (Exception error) {
System.out.println("resim kayit edilemedi");
}
camera.release();
}
});
}
} catch (Exception e) {
camera.release();
}
}
} }
I am not play video on google drive which m uploading from application
this code will record screen nd store file as video nd then upload on google drive but m not able to play uploaded video on google drive provide some solution .
My code is as Below.
String email = null;
Pattern gmailPattern = Patterns.EMAIL_ADDRESS;
Account[] accounts = AccountManager.get(this).getAccounts();
for (Account account : accounts) {
if (gmailPattern.matcher(account.name).matches()) {
email = account.name;
}
}
Toast.makeText(this, "Android Device Registered Email Address: " + email, Toast.LENGTH_LONG).show();
mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
// Connect to Google Drive
mCredential = GoogleAccountCredential.usingOAuth2(getApplicationContext(), Arrays.asList(DriveScopes.DRIVE));
mCredential.setSelectedAccountName(email);
mService = getDriveService(mCredential);
b1 = (Button) findViewById(R.id.button1);
b1.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
new LongOperation().execute();
}
});
}
#Override
public void onStart() {
super.onStart();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mRecorder != null) {
mRecorder.quit();
mRecorder = null;
}
}
private class LongOperation extends AsyncTask<String, Void, String> {
#Override
protected String doInBackground(String... params) {
for (int i = 0; i < 10; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.interrupted();
}
}
return "Executed";
}
#Override
protected void onPreExecute() {
if (mRecorder == null) {
Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(captureIntent, REQUEST_CODE);
}
Log.d("result", "Starting Recording");
}
#Override
protected void onProgressUpdate(Void... values) {
}
#Override
protected void onPostExecute(String result) {
if (mRecorder != null) {
mRecorder.quit();
mRecorder = null;
Log.d("result", "Stopping Recording");
}
for (int i = 0; i < 4; i++) {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
Thread.interrupted();
}
}
saveFileToDrive();
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
MediaProjection mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);
if (mediaProjection == null) {
Log.e("##", "media projection is null");
return;
}
String newFolder = "/FIREBASE";
String extSdcard = Environment.getExternalStorageDirectory().toString();
File file = new File(extSdcard + newFolder);
file.mkdir();
Log.d("result", "DIR:--" + file.getPath());
Log.d("result", "DIR:--" + file.mkdir());
final int width = 1280;
final int height = 720;
final int bitrate = 6000000;
if (file.exists()) {
File_Path = "record-" + width + "x" + height + "-" + System.currentTimeMillis() + ".mp4";
video_file = new File(file, File_Path);
Log.d("result", "VideoDIR:--" + video_file.getPath());
Log.d("Result", "Video file Name" + video_file);
Log.d("Result", "File Path" + File_Path);
mRecorder = new ScreenRecorder(width, height, bitrate, 1, mediaProjection, video_file.getPath());
mRecorder.start();
Toast.makeText(this, "Screen recorder is running...", Toast.LENGTH_SHORT).show();
moveTaskToBack(false);
}
}
#NonNull
private Drive getDriveService(GoogleAccountCredential credential) {
return new Drive.Builder(AndroidHttp.newCompatibleTransport(), new GsonFactory(), credential)
.build();
}
private void saveFileToDrive() {
Thread t = new Thread(new Runnable() {
#Override
public void run() {
try {
// File's binary content
java.io.File fileContent = new java.io.File(video_file.getPath());
FileContent mediaContent = new FileContent("video/mp4", fileContent);
Log.d("Result", "File Content:-" + fileContent);
showToast("Selected " + fileContent + "to upload");
Log.d("Result", "File URI:-" + fileContent);
// File's meta data.
com.google.api.services.drive.model.File body = new com.google.api.services.drive.model.File();
body.setTitle(fileContent.getName());
body.setMimeType("video/mp4");
Drive.Files f1 = mService.files();
Drive.Files.Insert i1 = f1.insert(body, mediaContent);
com.google.api.services.drive.model.File file = i1.execute();
if (file != null) {
showToast("Uploaded: " + file.getTitle());
Log.d("Result", "File Titile:-" + file.getTitle());
} else {
Log.d("Result", "Else Part");
}
} catch (UserRecoverableAuthIOException e) {
Log.d("Result", "Exception" + e.toString());
} catch (IOException e) {
e.printStackTrace();
Log.d("Result", "Exception" + e.toString());
}
}
});
t.start();
}
public void showToast(final String toast) {
runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(getApplicationContext(), toast, Toast.LENGTH_SHORT).show();
}
});
}
I am very new to OpenCV, trying it for the first time. I ran the Tutorial3CameraControl, camera preview was fine portrait and full screen, when I integrate the same code in my project, I somehow get the camera in landscape mode and its not full screen. I get it this question has been asked quite a few times, but none of the solutions were helpful. I want my application to open up org.opencv.JavaCameraView in full screen portrait mode. Could someone please help me?
Below is the activity:
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
// OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
List<String> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator<Size> resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
}
else if (item.getGroupId() == 2)
{
int id = item.getItemId();
Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
//
// #SuppressLint("SimpleDateFormat")
// #Override
// public boolean onTouch(View v, MotionEvent event) {
// Log.i(TAG,"onTouch event");
// SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
// String currentDateandTime = sdf.format(new Date());
// String fileName = Environment.getExternalStorageDirectory().getPath() +
// "/sample_picture_" + currentDateandTime + ".jpg";
// mOpenCvCameraView.takePicture(fileName);
// Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
// return false;
// }
#Override
public void onClick(View v) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
}
}
This is the custom view that extends org.opencv.JavaCameraView.
public class Tutorial3View extends JavaCameraView implements PictureCallback {
private static final String TAG = "Sample::Tutorial3View";
private String mPictureFileName;
public Tutorial3View(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
Just check the AndroidManifest.xml in the sample, the orientation of the activity is android:screenOrientation="landscape"
This is the code that I'm using to capture video for 7 seconds, then automatically upload the video to Google Drive using Google Drive Android API. The problem is: when the upload is finished, I can't play the video. The saved video in the smartphone can be played, but not the uploaded video.
public class Video extends
AppCompatActivity implements
SurfaceHolder.Callback, GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener
{
public static File file;
public static GoogleApiClient mGoogleApiClient;
MediaRecorder recorder;
SurfaceHolder holder;
boolean recording = false;
FileOutputStream fileOut;
public static final String TAG = "SaveMe";
private Camera camera;
private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000;
Calendar calendar = Calendar.getInstance();
final int hours = calendar.get(Calendar.HOUR_OF_DAY);
final int minutes = calendar.get(Calendar.MINUTE);
int seconds = calendar.get(Calendar.SECOND);
Date dateNow = new Date();
SimpleDateFormat dateformatJava = new SimpleDateFormat("dd-MM-yyyy");
final String date = dateformatJava.format(dateNow);
private static final String STATE_RESOLVING_ERROR = "resolving_error";
private static final int REQUEST_RESOLVE_ERROR = 1001;
private static final String DIALOG_ERROR = "dialog_error";
private boolean mResolvingError = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
mResolvingError = savedInstanceState != null && savedInstanceState.getBoolean(STATE_RESOLVING_ERROR, false);
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API)
.addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
recorder = new MediaRecorder();
initRecorder();
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.CameraView);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_video, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private void initRecorder() {
File folder = new File(Environment.getExternalStorageDirectory() + "/SaveMe");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdir();
}
File folder2 = new File(Environment.getExternalStorageDirectory() + "/SaveMe/" + date);
boolean success2 = true;
if (!folder2.exists()) {
success2 = folder2.mkdir();
}
int cameraId = -1;
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
Log.d("saveme", "Camera found");
cameraId = i;
break;
}
}
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
file = new java.io.File(Environment.getExternalStorageDirectory() + "/SaveMe/" + date + "/Video " + hours + " " + minutes + ".mp4");
recorder.setOutputFile(Environment.getExternalStorageDirectory() + "/SaveMe/" + date + "/Video " + hours + " " + minutes + ".mp4");
Log.i(TAG, "saved");
recorder.setMaxDuration(9000); // 5 seconds
recorder.setMaxFileSize(5000000); // Approximately 5 megabytes
Log.i(TAG, "Start counting");
new Timer().schedule(new TimerTask() {#Override
public void run() {
runOnUiThread(new Runnable() {#Override
public void run() {
Log.i(TAG, "Thread 7 sec");
saveFiletoDrive(file);
}
});
}
}, 9000);
}
private void saveFiletoDrive(final File file) {
Log.i(TAG, "Saving....");
Drive.DriveApi.newDriveContents(getGoogleApiClient()).setResultCallback(
new ResultCallback < DriveApi.DriveContentsResult > () {#Override
public void onResult(DriveApi.DriveContentsResult result) {
String mime = "video/mp4";
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
Log.i(TAG, "Connection successful, creating new contents...");
OutputStream outputStream = result.getDriveContents().getOutputStream();
FileInputStream fis;
try {
fis = new FileInputStream(file.getPath());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int n;
while ((n = fis.read(buf)) != -1)
baos.write(buf, 0, n);
outputStream.write(baos.toByteArray());
baos.flush();
outputStream.close();
outputStream = null;
fis.close();
fis = null;
} catch (FileNotFoundException e) {
Log.w(TAG, "FileNotFoundException: " + e.getMessage());
} catch (IOException e1) {
Log.w(TAG, "Unable to write file contents." + e1.getMessage());
}
String title = file.getName();
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setMimeType(mime).setTitle(title).build();
Drive.DriveApi.getRootFolder(getGoogleApiClient())
.createFile(getGoogleApiClient(), metadataChangeSet, result.getDriveContents())
.setResultCallback(fileCallback);
Log.i(TAG, "Creating new video on Drive (" + title + ")");
}
});
}
final public ResultCallback < DriveFolder.DriveFileResult > fileCallback = new
ResultCallback < DriveFolder.DriveFileResult > () {#Override
public void onResult(DriveFolder.DriveFileResult result) {
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Error while trying to create the file");
return;
}
Log.i(TAG, "Successfull !");
}
};
private void prepareRecorder() {
recorder.setPreviewDisplay(holder.getSurface());
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceCreated(SurfaceHolder holder) {
File folder = new File(Environment.getExternalStorageDirectory() + "/SaveMe");
boolean success = true;
if (!folder.exists()) {
success = folder.mkdir();
}
File folder2 = new File(Environment.getExternalStorageDirectory() + "/SaveMe/" + date);
boolean success2 = true;
if (!folder2.exists()) {
success2 = folder2.mkdir();
}
prepareRecorder();
recorder.start();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (recording) {
recorder.stop();
recording = false;
}
recorder.release();
}
public static GoogleApiClient getGoogleApiClient() {
return mGoogleApiClient;
}
#Override
public void onConnected(Bundle bundle) {
Log.w(TAG, "Connected to google ");
}
#Override
public void onConnectionSuspended(int i) {
}
#Override
protected void onStart() {
super.onStart();
if (!mResolvingError) { // more about this later
mGoogleApiClient.connect();
}
}
#Override
protected void onResume() {
super.onResume();
mGoogleApiClient.connect();
}
#Override
protected void onPause() {
super.onPause();
recorder.release();
if (mGoogleApiClient.isConnected()) {
mGoogleApiClient.disconnect();
}
}
#Override
protected void onStop() {
recorder.release();
mGoogleApiClient.disconnect();
super.onStop();
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
Log.w(TAG, "failed to connected to google ");
if (connectionResult.hasResolution()) {
try {
connectionResult.startResolutionForResult(this, CONNECTION_FAILURE_RESOLUTION_REQUEST);
} catch (IntentSender.SendIntentException e) {
e.printStackTrace();
}
} else {
Log.i(TAG, "Location services connection failed with code " + connectionResult.getErrorCode());
}
}
private class MyAsyncTask extends AsyncTask < Void, Void, Void > {#Override
protected Void doInBackground(Void...params) {
Log.i(TAG, "exec");
saveFiletoDrive(file);
return null;
}#Override
protected void onPostExecute(Void result) {
}
}
}
I want capture image from android camera but without user knowledge.
public class Capture extends Activity {
public Uri fileUri;
public String filepath1="";
public static final int DONE=1;
public static final int NEXT=2;
public static final int PERIOD=0;
private Camera camera;
private int cameraId;
private Timer timer;
public static final int MEDIA_TYPE_IMAGE = 2;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.autocapture);
try
{
cameraMethod();
}
catch(Exception e)
{
Log.e("camera","Not",e);
}
}
public void cameraMethod()
{
if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
} else {
android.hardware.Camera.CameraInfo info =new android.hardware.Camera.CameraInfo();
cameraId = findFrontFacingCamera();
if (cameraId < 0) {
} else {
safeCameraOpen(cameraId);
}
}
fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE);
SurfaceView view = new SurfaceView(this);
try {
camera.setPreviewDisplay(view.getHolder());
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
Camera.Parameters params = camera.getParameters();
params.setJpegQuality(100);
camera.setParameters(params);
timer=new Timer(getApplicationContext(),threadHandler);
timer.execute();
}
////////////////////////////////////thread Handler///////////////////////////////////////
private Handler threadHandler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch(msg.what){
case DONE:
// Trigger camera callback to take pic
camera.takePicture(null, null, photoCallback);
break;
case NEXT:
timer=new Timer(getApplicationContext(),threadHandler);
timer.execute();
break;
}
}
};
Camera.PictureCallback mCall = new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
//decode the data obtained by the camera into a Bitmap
//display.setImageBitmap(photo);
Bitmap bitmapPicture = BitmapFactory.decodeByteArray(data, 0, data.length);
Message.obtain(threadHandler, Capture.NEXT, "").sendToTarget();
//Log.v("MyActivity","Length: "+data.length);
}
};
private int findFrontFacingCamera() {
int cameraId = 0;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 1; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
}
else
{
break;
}
}
return cameraId;
}
#Override
protected void onPause() {
if (timer!=null){
timer.cancel(true);
}
releaseCamera();
super.onPause();
}
private boolean safeCameraOpen(int id) {
boolean qOpened = false;
try {
releaseCamera();
camera = Camera.open(id);
qOpened = (camera != null);
} catch (Exception e) {
Log.e(getString(R.string.app_name), "failed to open Camera");
e.printStackTrace();
}
return qOpened;
}
private void releaseCamera() {
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}
Camera.PictureCallback photoCallback=new Camera.PictureCallback() {
public void onPictureTaken(byte[] data, Camera camera) {
OutputStream imageFileOS;
try {
imageFileOS = getContentResolver().openOutputStream(fileUri);
imageFileOS.write(data);
imageFileOS.flush();
imageFileOS.close();
Toast.makeText(Capture.this, "Image saved: " + fileUri, Toast.LENGTH_LONG).show();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
String sadf = fileUri.toString();
Log.e("File url for sd card", ""+sadf);
Intent myintent1=new Intent(Capture.this,MailSenderGmail1.class);
myintent1.putExtra("uris", sadf);
myintent1.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(myintent1);
finish();
}
};
private static Uri getOutputMediaFileUri(int type){
return Uri.fromFile(getOutputMediaFile(type));
}
/** Create a File for saving an image or video */
private static File getOutputMediaFile(int type){
// Check that the SDCard is mounted
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "TheftImageCapture");
// Create the storage directory(MyCameraVideo) if it does not exist
if (! mediaStorageDir.exists()){
if (! mediaStorageDir.mkdirs()){
Log.d("MyCameraVideo", "Failed to create directory Theft Image.");
return null;
}
}
java.util.Date date= new java.util.Date();
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss")
.format(date.getTime());
File mediaFile;
if(type == MEDIA_TYPE_IMAGE) {
// For unique video file name appending current timeStamp with file name
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_"+ timeStamp + ".jpg");
} else {
return null;
}
return mediaFile;
}
}
TimerThread Class:
public class Timer extends AsyncTask<Void, Void, Void> {
Context mContext;
private Handler threadHandler;
public Timer(Context context,Handler threadHandler) {
super();
this.threadHandler=threadHandler;
mContext = context;
}
#Override
protected Void doInBackground(Void...params) {
try {
Thread.sleep(Capture.PERIOD);
} catch (InterruptedException e) {
e.printStackTrace();
}
Message.obtain(threadHandler, Capture.DONE, "").sendToTarget();
return null;
}
}