Getting content from OnActivityResult - android

I am doing a texttospeech project to import txt file and read its contents using storage access framework . After OnActivityResult how to get the content to string variable and pass it to next activity . I need to copy the txt file content to next activity edittext
package com.texttospeech.texttospeech;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import com.texttospeech.Main2Activity;
public class MainActivity extends AppCompatActivity {
private static final int READ_REQUEST_CODE = 42;
private Button button;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
button = (Button) findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
performFileSearch();
}
});
}
private static final int EDIT_REQUEST_CODE = 42;
/**
* Fires an intent to spin up the "file chooser" UI and select an image.
*/
private void performFileSearch() {
// ACTION_OPEN_DOCUMENT is the intent to choose a file via the system's
// file browser.
Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT);
// Filter to only show results that can be "opened", such as a
// file (as opposed to a list of contacts or timezones).
intent.addCategory(Intent.CATEGORY_OPENABLE);
// Filter to show only text files.
intent.setType("text/plain");
startActivityForResult(intent, EDIT_REQUEST_CODE);
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent resultData){
if(requestCode == READ_REQUEST_CODE && resultCode == Activity.RESULT_OK){
Uri uri = null;
if (resultData!= null){
uri = resultData.getData();
}
}
}
}

You open an InputStream for the obtained uri and then read from the stream as you would do if you had used a FileInputStream.
InputStream is = getContentResolver().openInputStream(resultData.getData());

I guess this will help you:
String text = readText(uri);
private String readText(Uri uri) {
File f = new File(uri.toString());
FileInputStream inputStream = null;
try {
inputStream = new FileInputStream(f);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int i;
try {
i = inputStream.read();
while (i != -1) {
byteArrayOutputStream.write(i);
i = inputStream.read();
}
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
return byteArrayOutputStream.toString();
}
Let me know if you had any question.

Problem I ran into was that the class couldn't reccognise getContentResolver() but later I realised that I am in fragment so for that you need to pass context first :
getActivity().getContentResolver() .. or simply
context.getContentResolver()

Related

The app crashes when file is not selected by file picker

I am trying to build an OCR app using Google's API. Now this activity lets user select an image file using file picker, from where text is extracted. The app works fine if a user selects an image file but if you open file picker and do not select file then the app crashes.
Suppose you clicked on browse button and file picker opens but you do not select any file and press back button then it that case app crashes,but if you will select a file then it works fine.
Here I am attaching the code.
package com.example.rahulranjan.synthesize;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.util.SparseArray;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.vision.Frame;
import com.google.android.gms.vision.text.Text;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.UUID;
public class ocr extends AppCompatActivity {
Button b, convert;
EditText t;
Uri uri;
InputStream inputStream = null;
String str = "";
StringBuffer buf = new StringBuffer();
String text;
Bitmap bitmap;
private String imagePath = "";
TextView txtView;
StringBuilder strBuilder2 = new StringBuilder();
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ocr);
b = (Button) findViewById(R.id.button);
convert = (Button) findViewById(R.id.button2);
t = (EditText) findViewById(R.id.editText);
txtView = (TextView) findViewById(R.id.textView5);
txtView.setMovementMethod(new ScrollingMovementMethod());
b.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType(" */* ");
startActivityForResult(intent, 7);
}
});
File imagesFolder = new File(Environment.getExternalStorageDirectory(), "Syntesize Text");
imagesFolder.mkdirs();
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
String fileSelected = data.getStringExtra("fileSelected");
Bundle result = data.getExtras();
if (resultCode == RESULT_OK) {
if (data != null) {
// Get the URI of the selected file
Uri uri = data.getData();
t.setText(uri.getPath().toString());
try {
inputStream = getContentResolver().openInputStream(uri);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
TextRecognizer txtRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!txtRecognizer.isOperational()) {
// Shows if your Google Play services is not up to date or OCR is not supported for the device
// txtView.setText("Detector dependencies are not yet available");
Toast toast = Toast.makeText(getApplicationContext(), "Detector dependencies are not yet available", Toast.LENGTH_LONG);
toast.show();
} else {
// Set the bitmap taken to the frame to perform OCR Operations.
Frame frame = new Frame.Builder().setBitmap(bitmap).build();
SparseArray items = txtRecognizer.detect(frame);
StringBuilder strBuilder = new StringBuilder();
// The following Process is used to show how to use lines & elements as well
for (int i = 0; i < items.size(); i++) {
TextBlock item = (TextBlock) items.valueAt(i);
strBuilder.append(item.getValue());
strBuilder.append(".");
for (Text line : item.getComponents()) {
//extract scanned text lines here
Log.v("lines", line.getValue());
for (Text element : line.getComponents()) {
//extract scanned text words here
Log.v("element", element.getValue());
}
}
}
strBuilder2 = strBuilder;
}
txtView.setText(strBuilder2.toString());
}
}
super.onActivityResult(requestCode, resultCode, data);
}
public void clear(View view)
{
txtView.setText("");
}
public void convert(View view)
{
String data = strBuilder2.toString();
File gpxfile = new File(Environment.getExternalStorageDirectory().toString()+"/Syntesize Text"+"/"+ UUID.randomUUID().toString()+".txt");
try (FileWriter writer = new FileWriter(gpxfile)) {
try {
writer.append(data);
} catch (IOException e1) {
e1.printStackTrace();
}
try {
writer.flush();
} catch (IOException e1) {
e1.printStackTrace();
}
writer.close();
} catch (IOException e1) {
e1.printStackTrace();
}
Toast.makeText(getApplicationContext(), "Saved", Toast.LENGTH_SHORT).show();
}
}
might be NPE move these two lines
String fileSelected = data.getStringExtra("fileSelected");
Bundle result = data.getExtras();
inside
if (data != null) {
final result
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (data != null) {
// Get the URI of the selected file
String fileSelected = data.getStringExtra("fileSelected");
Bundle result = data.getExtras();
Uri uri = data.getData();
t.setText(uri.getPath().toString());
try {
inputStream = getContentResolver().openInputStream(uri);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), uri);
} catch (IOException e) {
e.printStackTrace();
}
TextRecognizer txtRecognizer = new TextRecognizer.Builder(getApplicationContext()).build();
if (!txtRecognizer.isOperational()) {
// Shows if your Google Play services is not up to date or OCR is not supported for the device
// txtView.setText("Detector dependencies are not yet available");
Toast toast = Toast.makeText(getApplicationContext(), "Detector dependencies are not yet available", Toast.LENGTH_LONG);
toast.show();
} else {
// Set the bitmap taken to the frame to perform OCR Operations.
Frame frame = new Frame.Builder().setBitmap(bitmap).build();
SparseArray items = txtRecognizer.detect(frame);
StringBuilder strBuilder = new StringBuilder();
// The following Process is used to show how to use lines & elements as well
for (int i = 0; i < items.size(); i++) {
TextBlock item = (TextBlock) items.valueAt(i);
strBuilder.append(item.getValue());
strBuilder.append(".");
for (Text line : item.getComponents()) {
//extract scanned text lines here
Log.v("lines", line.getValue());
for (Text element : line.getComponents()) {
//extract scanned text words here
Log.v("element", element.getValue());
}
}
}
strBuilder2 = strBuilder;
}
txtView.setText(strBuilder2.toString());
}
}
super.onActivityResult(requestCode, resultCode, data);
}
also better check for requestCode too
condition should be
if (resultCode == RESULT_OK && data!=null && requestCode==7) {
//your code
}

Camera is giving very big size image on device

In my project, I am capturing image from the camera. I am taking the full-size image from the app (instead of taking thumbnail). Captured image is of very big size which is 7 to 18 mb. When I have taken image from my default camera app, the size was roughly 2.5 mb only. As well as it's taking lot of time(6-10 seconds) to load and save to the folder. This happening only when I am using the android device, on emulator it's working good. This is my code:
package com.stegano.strenggeheim.fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaScannerConnection;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v4.app.Fragment;
import android.support.v4.content.FileProvider;
import android.support.v7.app.AlertDialog;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.stegano.strenggeheim.BuildConfig;
import com.stegano.strenggeheim.R;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.UUID;
public class FragmentEncode extends Fragment {
private static final String MESSAGE_IMAGE_SAVED = "Image Saved!";;
private static final String MESSAGE_FAILED = "Failed!";
private static final String IMAGE_DIRECTORY = "/StrengGeheim";
private static final int GALLERY = 0, CAMERA = 1;
private File capturedImage;
TextView imageTextMessage;
ImageView loadImage;
public FragmentEncode() {
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
}
private void galleryIntent() {
Intent galleryIntent = new Intent(Intent.ACTION_PICK,
MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(galleryIntent, GALLERY);
}
private void cameraIntent() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
Uri fileUri = getOutputMediaFileUri();
intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
startActivityForResult(intent, CAMERA);
}
private Uri getOutputMediaFileUri() {
try {
capturedImage = getOutputMediaFile();
return FileProvider.getUriForFile(getActivity(), BuildConfig.APPLICATION_ID + ".provider", capturedImage);
}
catch (IOException ex){
ex.printStackTrace();
Toast.makeText(getContext(), MESSAGE_FAILED, Toast.LENGTH_SHORT).show();
}
return null;
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_encode, container, false);
imageTextMessage = view.findViewById(R.id.imageTextMessage);
loadImage = view.findViewById(R.id.loadImage);
loadImage.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
showPictureDialog();
}
});
return view;
}
private void showPictureDialog(){
AlertDialog.Builder pictureDialog = new AlertDialog.Builder(getContext());
pictureDialog.setTitle("Select Action");
String[] pictureDialogItems = {
"Select photo from gallery",
"Capture photo from camera",
"Cancel"
};
pictureDialog.setItems(pictureDialogItems,
new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case 0:
galleryIntent();
break;
case 1:
cameraIntent();
break;
case 2:
dialog.dismiss();
break;
}
}
});
pictureDialog.show();
}
#Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == getActivity().RESULT_CANCELED) {
return;
}
try {
if (requestCode == GALLERY && data != null) {
Bitmap bitmap = getBitmapFromData(data, getContext());
File mediaFile = getOutputMediaFile();
String path = saveImage(bitmap, mediaFile);
Log.println(Log.INFO, "Message", path);
Toast.makeText(getContext(), MESSAGE_IMAGE_SAVED, Toast.LENGTH_SHORT).show();
loadImage.setImageBitmap(bitmap);
imageTextMessage.setVisibility(View.INVISIBLE);
} else if (requestCode == CAMERA) {
final Bitmap bitmap = BitmapFactory.decodeFile(capturedImage.getAbsolutePath());
loadImage.setImageBitmap(bitmap);
saveImage(bitmap, capturedImage);
Toast.makeText(getContext(), MESSAGE_IMAGE_SAVED, Toast.LENGTH_SHORT).show();
imageTextMessage.setVisibility(View.INVISIBLE);
}
} catch (Exception ex) {
ex.printStackTrace();
Toast.makeText(getContext(), MESSAGE_FAILED, Toast.LENGTH_SHORT).show();
}
}
private Bitmap getBitmapFromData(Intent intent, Context context){
Uri selectedImage = intent.getData();
String[] filePathColumn = { MediaStore.Images.Media.DATA };
Cursor cursor = context.getContentResolver()
.query(selectedImage,filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
return BitmapFactory.decodeFile(picturePath);
}
private String saveImage(Bitmap bmpImage, File mediaFile) {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmpImage.compress(Bitmap.CompressFormat.PNG, 50, bytes);
try {
FileOutputStream fo = new FileOutputStream(mediaFile);
fo.write(bytes.toByteArray());
MediaScannerConnection.scanFile(getContext(),
new String[]{mediaFile.getPath()},
new String[]{"image/png"}, null);
fo.close();
return mediaFile.getAbsolutePath();
} catch (IOException ex) {
ex.printStackTrace();
}
return "";
}
private File getOutputMediaFile() throws IOException {
File encodeImageDirectory =
new File(Environment.getExternalStorageDirectory() + IMAGE_DIRECTORY);
if (!encodeImageDirectory.exists()) {
encodeImageDirectory.mkdirs();
}
String uniqueId = UUID.randomUUID().toString();
File mediaFile = new File(encodeImageDirectory, uniqueId + ".png");
mediaFile.createNewFile();
return mediaFile;
}
}
Something you could do is download an available API online, or, if need be, dowload the source code of some online compressor. Then you could use it as a model. Never directly use the source code. One that is widely supported across languages is: https://optimus.keycdn.com/support/image-compression-api/
I am taking the image from the camera and getting the File. So, I am saving the image directly in file location which I generated using getOutputMediaFile() method. For that I am overloading saveImage() method like this:
private void saveImage(File mediaImage) {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
Uri contentUri = Uri.fromFile(mediaImage);
mediaScanIntent.setData(contentUri);
getContext().sendBroadcast(mediaScanIntent);
}
This method will put the image in the desired file location and also accessible to the Gallery for other apps. This method is same as galleryAddPic() method on this link Taking Photos Simply
But In the case of picking a photo from the Gallery, I will have to create the File in the desired location and write the bytes of the picked image into that file, so the old saveImage() method will not change.
In onActivityResult method, this is how I used overloaded saveImage() method:
else if (requestCode == CAMERA) {
saveImage(imageFile);
Bitmap bitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath());
loadImage.setImageBitmap(bitmap);
Toast.makeText(getContext(), MESSAGE_IMAGE_SAVED, Toast.LENGTH_SHORT).show();
imageTextMessage.setVisibility(View.INVISIBLE);
}

Android: How to upload a file to OneDrive?

Iam trying to upload a file using the OneDrive Saver sample app. Instead of creating a text file, I would like to select a file from the gallery and then upload it to OneDrive. I have edited the sample app but the file cannot be uploaded. I don't have any errors.
Sample app available on GitHub (https://github.com/OneDrive/onedrive-picker-android/tree/master/SaverSample).
package com.example.onedrivesdk.saversample;
import java.io.*;
import android.accounts.AccountManager;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.*;
import android.provider.MediaStore;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.*;
import com.microsoft.onedrivesdk.saver.*;
public class SaverMain extends Activity {
private static final int DEFAULT_FILE_SIZE_KB = 100;
static final int RESULT_STORE_FILE = 4;
private static Uri mFileUri;
private static final String ONEDRIVE_APP_ID = "4813EF88";
private final OnClickListener mStartPickingListener = new OnClickListener() {
#Override
public void onClick(final View v) {
final Intent galleryIntent = new Intent(Intent.ACTION_PICK);
galleryIntent.setType("*/*");
startActivityForResult(galleryIntent, RESULT_STORE_FILE);
}
};
private ISaver mSaver;
#Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_saver_main);
// Create the picker instance
mSaver = Saver.createSaver(ONEDRIVE_APP_ID);
// Add the start saving listener
findViewById(R.id.startSaverButton).setOnClickListener(mStartPickingListener);
}
#Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
try {
mSaver.handleSave(requestCode, resultCode, data);
} catch (final SaverException e) {
}
switch (requestCode)
{
case RESULT_STORE_FILE:
mFileUri = data.getData();
saveFileToDrive();
break;
}
}
private void saveFileToDrive()
{
Thread t = new Thread(new Runnable()
{
#Override
public void run()
{
try
{
// Create URI from real path
String path;
path = getPathFromUri(mFileUri);
mFileUri = Uri.fromFile(new java.io.File(path));
ContentResolver cR = SaverMain.this.getContentResolver();
// File's binary content
java.io.File fileContent = new java.io.File(mFileUri.getPath());
View v = null;
final Activity activity = (Activity) v.getContext();
mSaver.startSaving(activity, path, Uri.parse(mFileUri.toString()));
} catch (Exception e) {
}
}
});
t.start();
}
public String getPathFromUri(Uri uri)
{
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = getContentResolver().query(uri, projection, null, null, null);
int column_index = cursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}
}
I think your example pretty much worked. I just cleaned it up a little and created a branch with your sample inside it. Notably, I'd expect a null ref here, which if you remove works nicely.
View v = null;
final Activity activity = (Activity) v.getContext();
See the full working example here: https://github.com/peternied/onedrive-picker-android/blob/GalleryPicker/SaverSample/src/com/example/onedrivesdk/saversample/SaverMain.java

Taking a picture via camera intent in Android

I am using the following code for clicking a photo on Button click.
package com.example.clickpic;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
public class MainActivity extends Activity {
int TAKE_PHOTO_CODE = 0;
public static int count=0;
private ImageView imageView;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//here,we are making a folder named picFolder to store pics taken by the camera using this application
final String dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES) + "/picFolder/";
File newdir = new File(dir);
newdir.mkdirs();
this.imageView = (ImageView)this.findViewById(R.id.imageView1);
Button capture = (Button) findViewById(R.id.btnCapture);
capture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// here,counter will be incremented each time,and the picture taken by camera will be stored as 1.jpg,2.jpg and likewise.
count++;
String file = dir+count+".jpg";
File newfile = new File(file);
try {
newfile.createNewFile();
} catch (IOException e) {}
Uri outputFileUri = Uri.fromFile(newfile);
Intent cameraIntent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent, 1888);
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 1888 && resultCode == RESULT_OK) {
Bitmap photo = (Bitmap) data.getExtras().get("data");
imageView.setImageBitmap(photo);
}
}
}
The code works perfectly well but it just opens up the camera activity. What must I do so that on button click, it clicks a picture and saves it as well?
Right now, this just opens the camera app of the phone, and expects me to click the picture and click on OK. I don't want these things.
All I want is, when I click on the button from my app it should click a picture without expecting any additional input from me.
Edit
I have already tried to create a camera app of my own, but I ran into some issues.
That's why I am trying this approach. Camera API not working on KITKAT
All I want is, when I click on the button from my app it should click a picture without expecting any additional input from me. -
Its not achieved by Camera Intent like android.provider.MediaStore.ACTION_IMAGE_CAPTURE. You have to implement camera interface portion in your code without using native camera application of Device.
Look at portion of Building Camera App
I have written a camera class which takes picture, arranges the orientation (some devices takes photo horizontal as default) and saves the photo taken. You can check it from the link below:
Camera capture orientation on samsung devices in android
Edit: Sorry, savePhoto functions are not written in my example. Adding them now.
savePhoto function:
public void savePhoto(Bitmap bmp) {
imageFileFolder = new File(Environment.getExternalStorageDirectory(),
cc.getDirectoryName());
imageFileFolder.mkdir();
FileOutputStream out = null;
Calendar c = Calendar.getInstance();
String date = fromInt(c.get(Calendar.MONTH))
+ fromInt(c.get(Calendar.DAY_OF_MONTH))
+ fromInt(c.get(Calendar.YEAR))
+ fromInt(c.get(Calendar.HOUR_OF_DAY))
+ fromInt(c.get(Calendar.MINUTE))
+ fromInt(c.get(Calendar.SECOND));
imageFileName = new File(imageFileFolder, date.toString() + ".jpg");
try {
out = new FileOutputStream(imageFileName);
bmp.compress(Bitmap.CompressFormat.JPEG, 70, out);
out.flush();
out.close();
scanPhoto(imageFileName.toString());
out = null;
} catch (Exception e) {
e.printStackTrace();
}
}
scanPhoto function:
public void scanPhoto(final String imageFileName) {
geniusPath = imageFileName;
msConn = new MediaScannerConnection(MyClass.this,
new MediaScannerConnectionClient() {
public void onMediaScannerConnected() {
msConn.scanFile(imageFileName, null);
}
#Override
public void onScanCompleted(String path, Uri uri) {
msConn.disconnect();
}
});
msConn.connect();
}
SavePhotoTask class:
class SavePhotoTask extends AsyncTask<byte[], String, String> {
#Override
protected String doInBackground(byte[]... jpeg) {
File photo = new File(Environment.getExternalStorageDirectory(),
"photo.jpg");
if (photo.exists()) {
photo.delete();
}
try {
FileOutputStream fos = new FileOutputStream(photo.getPath());
fos.write(jpeg[0]);
fos.close();
} catch (java.io.IOException e) {
}
return (null);
}
}

Variable types of videos and images

Am working on some application. However, i have finished two modules which invoke the phones' native camera to take a snapshot and an also record a video. I intend to use the phone application to send the image and the video taken and recorded by the phone to a website i intend to create. However, for textual information, i could store the information as strings, for the image and the video, i am not sure if i should leave them as Uris upon submission. Below is my picture, and video programs respectively. Thanx
Picture code:
package com.project;
import java.io.File;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
public class MyPicture extends Activity {
/** Called when the activity is first created. */
/*constant and variable created so as to work with the taken pictures*/
private static int TAKE_PICTURE = 1;
private Uri outputFileUri;
Uri imageUri;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.pic);
Button pictureButton=(Button) findViewById(R.id.pictureButton);
pictureButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
File file = new File(Environment.getExternalStorageDirectory(),"test.jpg");
outputFileUri = Uri.fromFile(file);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, TAKE_PICTURE);
}
});
}
#Override
protected void onActivityResult(int requestCode,int resultCode, Intent data){
if (requestCode == TAKE_PICTURE){
imageUri = data.getData();
//do something about the image in the in outputFileUri
Toast.makeText(MyPicture.this,
"Picture successfully taken",
Toast.LENGTH_SHORT).show();
Intent i = new Intent(MyPicture.this,/*program execution proceeds back to MyPicture, our start page after success of image takin*/
Myindex.class);
startActivity(i);
}else{
Toast.makeText(MyPicture.this,
"Picture Unsuccessfully taken",
Toast.LENGTH_SHORT).show();
Intent i = new Intent(MyPicture.this,/*program execution proceeds back to MyPicture, so we can redo the recording*/
MyPicture.class);
startActivity(i);
}
}
}
Video code:
package com.project;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;
public class MyVideo extends Activity {
/*program for the vid button*/
private static int RECORD_VIDEO = 1;
private static int HIGH_VIDEO_QUALITY = 1;
//private static int MMS_VIDEO_QUALITY = 0;
Uri recordedVideo;
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.vid);
Button videoButton=(Button) findViewById(R.id.videoButton);
videoButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
//intent.putExtra(MediaStore.EXTRA_OUTPUT, output);
intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, HIGH_VIDEO_QUALITY);
startActivityForResult(intent, RECORD_VIDEO);
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data){
if (requestCode == RECORD_VIDEO){
recordedVideo = data.getData();
//to do something with the recorded video
//we shall insert this information in the database
Toast.makeText(MyVideo.this,
"Video successfully recorded",
Toast.LENGTH_SHORT).show();
Intent i = new Intent(MyVideo.this,/*program execution proceeds back to Myindex, our start page*/
Myindex.class);
startActivity(i);
}
else{
/*Happens after unsuccessfull recording of video*/
Toast.makeText(MyVideo.this,
"Video Unsuccessfully recorded",
Toast.LENGTH_SHORT).show();
Intent i = new Intent(MyVideo.this,/*program execution proceeds back to MyVideo, so we can redo the recording*/
MyVideo.class);
startActivity(i);
}
}
}
For images what I usually do is to decode the data as a Bitmapand then I send it via Http Post using the multipart content-type.
You can decode the image file as a Bitmap using: BitmapFactory.decodeFile.
Here is an example of how I send the Bitmap with multipart using the Apache library:
public String doHttpMultipart(String url,
List<NameValuePair> pairs,
Bitmap bitmap,
String fileName) throws IOException,
ClientProtocolException,
UnsupportedEncodingException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bitmap.compress(CompressFormat.PNG, 100, bos);
byte[] imageData = bos.toByteArray();
ByteArrayBody byteArrayBody = new ByteArrayBody(imageData, fileName);
MultipartEntity reqEntity =
new MultipartEntity(HttpMultipartMode.BROWSER_COMPATIBLE);
reqEntity.addPart("image", byteArrayBody);
for(NameValuePair p : pairs) {
reqEntity.addPart(p.getName(), new StringBody(p.getValue()));
}
HttpPost request = new HttpPost(url);
request.setEntity(reqEntity);
HttpClient client = new DefaultHttpClient();
HttpResponse httpResponse = client.execute(request);
String response = "";
BufferedReader in = null;
try {
response = super.readHttpStream(response, in, httpResponse);
} catch(IllegalStateException e) {
throw new IllegalStateException();
} catch(IOException e) {
throw new IOException();
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return response;
}
For a video file, it should be the same you should see how to decode it as an array of bytes and send it with multipart.

Categories

Resources