How to capture image by custom button from already opened front camera - android

Im working on a augmented reality app which displays 3d models on the users face.I made a class which extends ArFragment in which i set the front camera to be opened once a model is selected.I made a custom button for capturing the image,but i dont know how to implement the capture of the image itself with the 3d model displayed and store it in the phone(preferably without the custom made buttons being shown in the picture).Tried some code found on internet for programatical screenshoting but it screenshots only the buttons(the arrow and the circle).Here is how the activity looks like:
Here is the class that extends ArFragment:
public class CustomArFragment extends ArFragment {
#Override
protected Config getSessionConfiguration(Session session) {
Config config = new Config(session);
config.setAugmentedFaceMode(Config.AugmentedFaceMode.MESH3D);
this.getArSceneView().setupSession(session);
return config;
}
#Override
protected Set<Session.Feature> getSessionFeatures() {
return EnumSet.of(Session.Feature.FRONT_CAMERA);
}
#Override
public View onCreateView(LayoutInflater inflater, #Nullable ViewGroup container, #Nullable Bundle savedInstanceState) {
FrameLayout frameLayout = (FrameLayout) super.onCreateView(inflater, container, savedInstanceState);
getPlaneDiscoveryController().hide();
getPlaneDiscoveryController().setInstructionView(null);
return frameLayout;
}
}
Here is the activity in which i set the 3d model:
public class ArActivity extends AppCompatActivity {
int pos;
private int[] images = {R.raw.glasses1, R.raw.glasses3, R.raw.glasses4, R.raw.glasses5,
R.raw.glasses6, R.raw.glasses7, R.raw.glasses8, R.raw.glasses9, R.raw.glasses10};
private ModelRenderable modelRenderable;
private boolean isAdded = false;
#Override
protected void onCreate(Bundle savedInstanceState) {
Bundle extras = getIntent().getExtras();
if (extras != null) {
pos = extras.getInt("pos");
}
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ar);
CustomArFragment customArFragment = (CustomArFragment) getSupportFragmentManager().findFragmentById(R.id.arFragment);
ModelRenderable
.builder().setSource(this, images[pos])
.build()
.thenAccept(renderable -> {
modelRenderable = renderable;
modelRenderable.setShadowCaster(false);
modelRenderable.setShadowReceiver(false);
});
customArFragment.getArSceneView().setCameraStreamRenderPriority(Renderable.RENDER_PRIORITY_FIRST);
customArFragment.getArSceneView().getScene().addOnUpdateListener(frameTime -> {
if (modelRenderable == null)
return;
Frame frame = customArFragment.getArSceneView().getArFrame();
Collection<AugmentedFace> augmentedFaces = frame.getUpdatedTrackables(AugmentedFace.class);
for (AugmentedFace augmentedFace : augmentedFaces) {
if (isAdded)
return;
AugmentedFaceNode augmentedFaceNode = new AugmentedFaceNode(augmentedFace);
augmentedFaceNode.setParent(customArFragment.getArSceneView().getScene());
augmentedFaceNode.setFaceRegionsRenderable(modelRenderable);
isAdded = true;
}
});
}
}

I had a similar problem before.
Capturing the Camera image with placed 3d models.
i solved it with PixelCopy.
I'm adding my code of that part below.
I'm sorry that it's not going to be the straight-answer to your problem but i hope you get some ideas
ImageButton btn3 = (ImageButton)findViewById(R.id.camera_btn);
btn3.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
takePhoto();
}
});
private String generateFilename() {
//현재시간을 기준으로 파일 이름 생성
String date =
new SimpleDateFormat("yyyyMMddHHmmss", java.util.Locale.getDefault()).format(new Date());
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES) + File.separator + "IM/" + date + "_screenshot.jpg";
}
private void saveBitmapToDisk(Bitmap bitmap, String filename) throws IOException {
//사용자의 갤러리에 IM 디렉토리 생성 및 Bitmap 을 JPEG 형식으로 갤러리에 저장
File out = new File(filename);
if (!out.getParentFile().exists()) {
out.getParentFile().mkdirs();
}
try (FileOutputStream outputStream = new FileOutputStream(filename);
ByteArrayOutputStream outputData = new ByteArrayOutputStream()) {
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputData);
outputData.writeTo(outputStream);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
throw new IOException("Failed to save bitmap to disk", ex);
}
}
private void takePhoto(){
//PixelCopy 를 사용하여 카메라 화면과 object 를 bitmap 으로 생성
final String filename = generateFilename();
ArSceneView view = arFragment.getArSceneView();
final Bitmap bitmap = Bitmap.createBitmap(view.getWidth(),view.getHeight(),
Bitmap.Config.ARGB_8888);
final HandlerThread handlerThread = new HandlerThread("PixelCopier");
handlerThread.start();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
PixelCopy.request(view, bitmap, (copyResult) -> {
if (copyResult == PixelCopy.SUCCESS) {
try {
saveBitmapToDisk(bitmap, filename);
//Media Scanning 실시
Uri uri = Uri.parse("file://" + filename);
Intent i = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
i.setData(uri);
sendBroadcast(i);
} catch (IOException e) {
Toast toast = Toast.makeText(AR_Activity.this, e.toString(),
Toast.LENGTH_LONG);
toast.show();
return;
}
Snackbar snackbar = Snackbar.make(findViewById(android.R.id.content),
"스크린샷이 저장되었습니다.", Snackbar.LENGTH_LONG);
snackbar.setAction("갤러리에서 보기", v -> {
//어플 내에서 저장한 스크린샷을 확인 가능
File photoFile = new File(filename);
Uri photoURI = FileProvider.getUriForFile(AR_Activity.this,
AR_Activity.this.getPackageName() + ".ar.codelab.name.provider",
photoFile);
Intent intent = new Intent(Intent.ACTION_VIEW, photoURI);
intent.setDataAndType(photoURI, "image/*");
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
startActivity(intent);
});
snackbar.show();
} else {
Toast toast = Toast.makeText(AR_Activity.this,
"스크린샷 저장 실패!: " + copyResult, Toast.LENGTH_LONG);
toast.show();
}
handlerThread.quitSafely();
}, new Handler(handlerThread.getLooper()));
}
}

Related

Tesseract in android (Tess Two): Application crashes while using nep.traineddata and Input from Camera is not working

I am not much of an expert.I am using tesseract (tess-two) for developing an android application for my college project. The application crashes when I select the Nepali trained data.
*The application works only with Image selected from gallery. The image taken from camera returns empty result. So, yes! I am in big trouble here!!
Here is the snippet of the LogCat when I used eng.trainddata:
04-26 22:52:02.286 26503-26509/com.l.android.neptext I/zygote64: Do partial code cache collection, code=124KB, data=69KB
After code cache collection, code=124KB, data=69KB
Increasing code cache capacity to 512KB
04-26 22:52:02.347 26503-26520/com.l.android.neptext I/vndksupport: sphal namespace is not configured for this process. Loading /vendor/lib64/hw/gralloc.msm8996.so from the current namespace instead.
04-26 22:52:08.559 26503-26503/com.l.android.neptext D/com.l.android.neptext.MainActivity$3#2314718: onClick:
04-26 22:52:08.612 26503-26503/com.l.android.neptext D/AppTracker: App Event: stop
04-26 22:52:21.431 26503-26503/com.l.android.neptext D/AppTracker: App Event: start
04-26 22:52:21.603 26503-26589/com.l.android.neptext I/com.l.android.neptext.MainActivity$5#8074bd5: bitmap size8294400
04-26 22:52:22.232 26503-26589/com.l.android.neptext I/Tesseract(native): Initialized Tesseract API with language=eng
04-26 22:52:35.031 26503-26509/com.l.android.neptext I/zygote64: Compiler allocated 6MB to compile void android.view.ViewRootImpl.performTraversals()
04-26 22:52:39.452 26503-26503/com.l.android.neptext D/AppTracker: App Event: stop
Another snippet when I use nep.traineddata:
04-26 22:53:44.007 26764-26769/com.l.android.neptext I/zygote64: Compiler allocated 6MB to compile void android.view.ViewRootImpl.performTraversals()
04-26 22:53:44.095 26764-26780/com.l.android.neptext D/OpenGLRenderer: endAllActiveAnimators on 0x7e841e2000 (DropDownListView) with handle 0x7e7abf6840
04-26 22:53:46.978 26764-26764/com.l.android.neptext D/com.l.android.neptext.MainActivity$3#cc3f660: onClick:
04-26 22:53:47.033 26764-26764/com.l
.android.neptext D/AppTracker: App Event: stop
04-26 22:54:00.276 26764-26764/com.l.android.neptext D/AppTracker: App Event: start
04-26 22:54:00.449 26764-26815/com.l.android.neptext I/com.l.android.neptext.MainActivity$5#c8be754: bitmap size8294400
The app crashes without other error message.
Code of Project:
public class MainActivity extends AppCompatActivity {
public static Button camera,gallery,cut,copy,speech;
public static EditText text;
public static TextView textView;
public static final int GALERY_ACTION=100;
public static final int CAMERA_ACTION=101;
public static Uri imageuri=null;
Handler texthandler;
TextToSpeech t1;
Spinner spinner;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
camera=findViewById(R.id.button1);
gallery=findViewById(R.id.button2);
cut=findViewById(R.id.cut_btn);
copy=findViewById(R.id.copy_btn);
speech=findViewById(R.id.speech_btn);
text = findViewById(R.id.result_text);
textView=findViewById(R.id.textView);
spinner=findViewById(R.id.spinner);
spinner=findViewById(R.id.spinner);
List<String> categories = new ArrayList<String>();
categories.add("eng");
categories.add("nep");
// Creating adapter for spinner
ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, categories);
// Drop down layout style - list view with radio button
dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// attaching data adapter to spinner
spinner.setAdapter(dataAdapter);
t1=new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
#Override
public void onInit(int status) {
if(status != TextToSpeech.ERROR) {
t1.setLanguage(Locale.UK);
}
}
});
texthandler=new Handler(Looper.myLooper()){
#Override
public void handleMessage(Message msg) {
String t=(String)msg.obj;
if(t==null){
Toast.makeText(getApplicationContext(),"Cannot find any letters ",Toast.LENGTH_LONG).show();
}
text = findViewById(R.id.result_text);
text.setText((String)msg.obj);
}
};
onButtonClickListiner();
//copying tranning datas
try {
MainApplication.instance.copydata("eng");
MainApplication.instance.copydata("nep");
}catch(Exception e){
Log.d("OcrManager",e.getMessage());
}
}
public void copy(View view){
text = findViewById(R.id.result_text);
Log.d(this.toString(),text.getText().toString());
ClipboardManager clipboardManager=(ClipboardManager)getApplicationContext().getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clipData=ClipData.newPlainText("label",text.getText().toString());
clipboardManager.setPrimaryClip(clipData);
Toast.makeText(MainActivity.this,"Text copied to clipbaord",Toast.LENGTH_LONG).show();
}
public void cut(View view){
text = findViewById(R.id.result_text);
Log.d(this.toString(),text.getText().toString());
ClipboardManager clipboardManager=(ClipboardManager)getApplicationContext().getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clipData=ClipData.newPlainText("label",text.getText().toString());
clipboardManager.setPrimaryClip(clipData);
text.setText("");
Toast.makeText(MainActivity.this,"Text copied to clipbaord",Toast.LENGTH_LONG).show();
}
public void speech(View view){
text = findViewById(R.id.result_text);
Log.d(this.toString(),text.getText().toString());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
t1.speak(text.getText().toString(),TextToSpeech.QUEUE_FLUSH,null,null);
} else {
t1.speak(text.getText().toString(), TextToSpeech.QUEUE_FLUSH, null);
}
Toast.makeText(this,"Speaking now",Toast.LENGTH_LONG).show();
}
private void onButtonClickListiner(){
gallery.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try{
Log.d(this.toString(), "onClick: ");
Intent galaryIntent=new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.INTERNAL_CONTENT_URI);
startActivityForResult(galaryIntent,GALERY_ACTION);
gallery.setVisibility(View.GONE);
camera.setVisibility(View.GONE);
textView.setVisibility(View.GONE);
cut.setVisibility(View.VISIBLE);
copy.setVisibility(View.VISIBLE);
speech.setVisibility(View.VISIBLE);
text.setVisibility(View.VISIBLE);
spinner.setVisibility(View.GONE);
}catch(Exception e){
Log.d("Main actiity",e.getMessage());
}
}
});
camera.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
try{
Intent cameraIntent=new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent,CAMERA_ACTION);
gallery.setVisibility(View.GONE);
camera.setVisibility(View.GONE);
textView.setVisibility(View.GONE);
cut.setVisibility(View.VISIBLE);
copy.setVisibility(View.VISIBLE);
speech.setVisibility(View.VISIBLE);
text.setVisibility(View.VISIBLE);
spinner.setVisibility(View.GONE);
}catch(Exception e){
Log.d("Main actiity",e.getMessage());
}
}
});
}
#Override
protected void onActivityResult(int requestCode, int resultCode, final Intent data) {
if (resultCode == RESULT_OK && data != null) {
if (requestCode == GALERY_ACTION) {
imageuri = data.getData();
try {
MainApplication.instance.showToast("Rendering Started,Please wait");
new Thread(new Runnable() {
#Override
public void run() {
Looper.prepare();
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), imageuri);
Log.i(this.toString(),"bitmap size"+bitmap.getByteCount());
OcrManager ocrManager= new OcrManager();
ocrManager.ocrStart(spinner.getSelectedItem().toString());
final String s = ocrManager.getText(bitmap);
Message m = new Message();
m.obj = s;
texthandler.sendMessage(m);
} catch (Exception e) {
Log.e("Main actiity", e.getMessage());
}
}
}).start();
} catch (Exception e) {
Log.d(this.toString(), e.getMessage());
}
}else if(requestCode==CAMERA_ACTION){
try {
MainApplication.instance.showToast("Rendering Started,Please wait");
new Thread(new Runnable() {
#Override
public void run() {
Looper.prepare();
try {
Bundle bundle=data.getExtras();
Bitmap bitmap = (Bitmap)bundle.get("data");
OcrManager ocrManager= new OcrManager();
ocrManager.ocrStart(spinner.getSelectedItem().toString());
final String s = ocrManager.getText(bitmap);
Message m = new Message();
m.obj = s;
texthandler.sendMessage(m);
} catch (Exception e) {
Log.d("Main actiity", e.getMessage());
}
}
}).start();
} catch (Exception e) {
Log.d(this.toString(), e.getMessage());
}
}
}
}
}
OCRManager Class:
public class OcrManager {
public static TessBaseAPI base=null;
public void ocrStart(String lang) {
try{
base = new TessBaseAPI();
String dataDirectory = MainApplication.instance.tessDataPathParent();
base.init(dataDirectory, lang);
}catch(Exception e){}
}
public String getText(Bitmap bitmap){
base.setImage(bitmap);
MainApplication.instance.showToast("Conversion Started");
String out=base.getUTF8Text();
MainApplication.instance.showToast("Conversion finished");
return out;
}
}
Also there is another class which copies nepali and english trained data to the storage successfully.
So, what am I doing wrong here? There is no any result when camera is used and the app crashes when nepali trained data is used with gallery source. Please help me out here. I do not want to do this again in resit.
So the other thing I had done wrong was the value of dataDirectory and just changing its value made the application a lot stable. The nep.trained data was found and initialized by the tesseract after that.
private static final String dataDirectory = Environment.getExternalStorageDirectory().getAbsolutePath() + "/com.lokensapkota.android.neptext/";
Also, if I changed the whole function in MainApplication class; which was just there to copy the trained data, to a method and ran it in background.:
private void copyAssets() {
AssetManager assetManager = getAssets();
String[] files = null;
try {
files = assetManager.list("trainneddata");
} catch (IOException e) {
Log.e("tag", "Failed to get asset file list.", e);
}
for(String filename : files) {
Log.i("files",filename);
InputStream in = null;
OutputStream out = null;
String dirout= dataDirectory + "tessdata/";
File outFile = new File(dirout, filename);
if(!outFile.exists()) {
try {
in = assetManager.open("trainneddata/"+filename);
(new File(dirout)).mkdirs();
out = new FileOutputStream(outFile);
copyFile(in, out);
in.close();
in = null;
out.flush();
out.close();
out = null;
} catch (IOException e) {
Log.e("tag", "Error creating files", e);
}
}
}
}
private void copyFile(InputStream in, OutputStream out) throws IOException {
byte[] buffer = new byte[1024];
int read;
while((read = in.read(buffer)) != -1){
out.write(buffer, 0, read);
}
}
Even though the the previous MainApplication.class copied both english and nepali trained data to the app directory, the nep.traineddata failed to initialized. this method, however did the same thing but it made things work.

Take picture in non-activity class

I want to take picture in an non-activity class. However I get error: Attempt to invoke virtual method on a null object reference. Here is my code:
MainAcvitity.java
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Print myprint = new Print();
myprint.Picture();
}
Print.java
public class Print {
private static final String TAG = "Secret";
Context context;
public void Picture() {
Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
((Activity) this.context).startActivityForResult(intent, 1);
Log.v(TAG, "I took a picture.");
}
}
I don't know what goes wrong. Can you tell me why this happens and how can I fix it? I don't want my Print class to be an Activity class. I know how how to take picture if Print is an Activity class. 0.0
Because your context is null. You haven't init it yet.
If you want to create a class to open your camera, try this:
public class Print{
private static final String TAG = "Secret";
private Context context;
private File photoCaptured;
public Print(Context context) {
this.context = context;
}
public void openCamera() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
String timeStamp = System.currentTimeMillis() + "";
String fileName = "JPEG_" + timeStamp;
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
photoCaptured = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES),
fileName + ".jpg");
} else {
String dir = Environment.getExternalStorageDirectory() + File.separator + "myDirectory";
//create folder
File folder = new File(dir); //folder name
if (!folder.exists()) {
folder.mkdirs();
}
//create file
photoCaptured = new File(dir, fileName + ".jpg");
}
Uri temUri = Uri.fromFile(photoCaptured);
intent.putExtra(MediaStore.EXTRA_OUTPUT, temUri);
intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 0);
if (intent.resolveActivity(context.getPackageManager()) != null) {
((AppCompatActivity) context).startActivityForResult(intent, REQUEST_TAKE_PHOTO);
}
}
}
Then in your MainActivity, init Print and use it.
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Print myprint = new Print(this);
myprint.openCamera();
}
For your question, just init your Print class and it will work. (Just for your case only, but this is not the best approach!!!)
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Print myprint = new Print(this);
myprint.Picture()
}

How to set directory for sending files using Android Beam

I am working on an app that allows user to select a file from external storage and send it using Android Beam.
Here is the FileBrowser Activity to select a file from a directory and return the file name and directory path back to main activity:
public class FileBrowser extends Activity {
private String root;
private String currentPath;
private ArrayList<String> targets;
private ArrayList<String> paths;
private File targetFile;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_file_browser);
getActionBar().setDisplayHomeAsUpEnabled(true);
root = "/";
currentPath = root;
targets = null;
paths = null;
targetFile = null;
showDir(currentPath);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_file_browser, menu);
return true;
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
public void selectDirectory(View view) {
File f = new File(currentPath);
targetFile = f;
//Return target File to activity
returnTarget();
}
public void setCurrentPathText(String message)
{
TextView fileTransferStatusText = (TextView) findViewById(R.id.current_path);
fileTransferStatusText.setText(message);
}
private void showDir(String targetDirectory){
setCurrentPathText("Current Directory: " + currentPath);
targets = new ArrayList<String>();
paths = new ArrayList<String>();
File f = new File(targetDirectory);
File[] directoryContents = f.listFiles();
if (!targetDirectory.equals(root))
{
targets.add(root);
paths.add(root);
targets.add("../");
paths.add(f.getParent());
}
for(File target: directoryContents)
{
paths.add(target.getPath());
if(target.isDirectory())
{
targets.add(target.getName() + "/");
}
else
{
targets.add(target.getName());
}
}
ListView fileBrowserListView = (ListView) findViewById(R.id.file_browser_listview);
ArrayAdapter<String> directoryData = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, targets);
fileBrowserListView.setAdapter(directoryData);
fileBrowserListView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View view, int pos,long id) {
File f = new File(paths.get(pos));
if(f.isFile())
{
targetFile = f;
returnTarget();
//Return target File to activity
}
else
{
//f must be a dir
if(f.canRead())
{
currentPath = paths.get(pos);
showDir(paths.get(pos));
}
}
}
});
}
public void returnTarget()
{
Intent returnIntent = new Intent();
returnIntent.putExtra("file", targetFile);
returnIntent.putExtra("path", currentPath);
setResult(RESULT_OK, returnIntent);
finish();
}
}
Here is the code for MainActivity where the file returned by FileBrowser Activity is send using android beam:
public class MainActivity extends Activity {
private NfcAdapter nfcAdapter;
public final int fileRequestID = 98;
String name;
String[] extension={".png",".docx",".jpeg",".pdf",".doc"};
ArrayList <String>supportedExtension=new ArrayList<String>(Arrays.asList(extension));
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
PackageManager pm = this.getPackageManager();
// Check whether NFC is available on device
if (!pm.hasSystemFeature(PackageManager.FEATURE_NFC)) {
// NFC is not available on the device.
Toast.makeText(this, "The device does not has NFC hardware.",
Toast.LENGTH_SHORT).show();
}
// Check whether device is running Android 4.1 or higher
else if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
// Android Beam feature is not supported.
Toast.makeText(this, "Android Beam is not supported.",
Toast.LENGTH_SHORT).show();
}
else {
// NFC and Android Beam file transfer is supported.
Toast.makeText(this, "Android Beam is supported on your device.",
Toast.LENGTH_SHORT).show();
}
}
public void browseForFile(View view) {
Intent clientStartIntent = new Intent(this, FileBrowser.class);
startActivityForResult(clientStartIntent, fileRequestID);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
//fileToSend
boolean filePathProvided;
File fileToSend;
if (resultCode == Activity.RESULT_OK && requestCode == fileRequestID) {
//Fetch result
File targetDir = (File) data.getExtras().get("file");
String path = (String)data.getExtras().get("path");
Log.i("Path=",path);
if(targetDir.isFile())
{
if(targetDir.canRead()) {
try{
String ext=targetDir.getName().substring(targetDir.getName().lastIndexOf("."));
if (supportedExtension.contains(ext)) {
fileToSend = targetDir;
filePathProvided = true;
setTargetFileStatus(targetDir.getName() + " selected for file transfer");
Button btn = (Button) findViewById(R.id.send);
btn.setVisibility(View.VISIBLE);
name = targetDir.getName();
}
else{
Toast.makeText(getApplicationContext(), "File with this extension cannot be printed",
Toast.LENGTH_LONG).show();
}
}catch (Exception e){e.printStackTrace();}
}
else
{
filePathProvided = false;
setTargetFileStatus("You do not have permission to read the file " + targetDir.getName());
}
}
else
{
filePathProvided = false;
setTargetFileStatus("You may not transfer a directory, please select a single file");
}
}
}
public void setTargetFileStatus(String message)
{
TextView targetFileStatus = (TextView) findViewById(R.id.selected_filename);
targetFileStatus.setText(message);
}
public void sendFile(View view) {
nfcAdapter = NfcAdapter.getDefaultAdapter(this);
// Check whether NFC is enabled on device
if(!nfcAdapter.isEnabled()){
Toast.makeText(this, "Please enable NFC.", Toast.LENGTH_SHORT).show();
startActivity(new Intent(Settings.ACTION_NFC_SETTINGS));
}
else if(!nfcAdapter.isNdefPushEnabled()) {
Toast.makeText(this, "Please enable Android Beam.",
Toast.LENGTH_SHORT).show();
startActivity(new Intent(Settings.ACTION_NFCSHARING_SETTINGS));
}
else {
Uri[] mFileUris = new Uri[1];
String fileName=name;
// Retrieve the path to the user's public pictures directory
File fileDirectory = Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
File fileToTransfer;
fileToTransfer = new File(fileDirectory, fileName);
fileToTransfer.setReadable(true, false);
mFileUris[0] = Uri.fromFile(fileToTransfer);
nfcAdapter.setBeamPushUris(mFileUris, this);
}
}
}
Now, as you can see in my MainActivity, I am setting my directory as Pictures.
File fileDirectory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
My question is How can I dynamically change my directory every time based on the actual directory value obtained from FileBrowser Activity?
I have already went through the android documentation of How to send files using Android Beam, but didn't find it much useful for my problem. I also went through the android documentation of Environment, but couldn't understand much.
Any help regarding this will really be appreciated. Thanks in advance!
You already have the file selected in OnActivityResult method. Just change
mFileUris[0] = Uri.fromFile(fileToTransfer);
to
mFileUris[0] = Uri.fromFile(targetDir);

How to Record Android Screen Video programmatically in KitKat 4.4

I know this question has been asked so many times and there are so many questions, answers and discussions available. But I don't know what to do and what not to do.
I already referred to the links below to get solution with no luck.
https://stackoverflow.com/questions/23438767/how-to-record-video-on-kitkat-4-4
https://stackoverflow.com/questions/23185125/i-cannot-screen-record-with-my-kitkat-4-4-moto-x
Android KitKat start screenrecord from App
screen recorder with kitkat
Screen Recording kitkat with button
With lots of search I didn't get any simple example to achieve this task. Since 2 days i am trying to achieve this but with no success.
So the simple question is whether it is possible to record video of our own screen in android. I just heard that it is possible from android 4.4 Kitkat and i also check some app from market.
I know to do this our device should be rooted and other things which required to do this.
But I am not getting how to develop this programmatically. If any one have any idea then please guide me how to do this. or any example or code will be great help.
I appreciate your any kind of help.
I try to develop with this simple piece of code but not getting anything
public void startRecording(View v) {
File recordfolder = Environment.getExternalStorageDirectory();
String record = "su — bit rate 8000000 --time-limit 30 "
+ recordfolder + "Record.mp4";
recordfolder.mkdir();
try {
Process screenrecording = Runtime.getRuntime().exec(record);
} catch (IOException e) {
e.printStackTrace();
}
}
so basically I don't know what i have to do with this Process screenrecording, I mean how can I start progress.
Good Question But the answer depends on what type of platform you want to use to record the screen in android.
Here are some tricks.....
1) Using this class you can record the screen fort that you required rooted device you can test this in genymotion 4.4 also.
public static class MainFragment extends Fragment {
private Context mContext;
private EditText mWidthEditText;
private EditText mHeightEditText;
private EditText mBitrateEditText;
private EditText mTimeEditText;
private Button mRecordButton;
public MainFragment() {
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container,
false);
mContext = getActivity();
mRecordButton = (Button) rootView.findViewById(R.id.btn_record);
mRecordButton.setOnClickListener(RecordOnClickListener);
mWidthEditText = (EditText) rootView.findViewById(R.id.et_width);
mHeightEditText = (EditText) rootView.findViewById(R.id.et_height);
mBitrateEditText = (EditText) rootView
.findViewById(R.id.et_bitrate);
mBitrateEditText.addTextChangedListener(BitrateTextWatcher);
mTimeEditText = (EditText) rootView.findViewById(R.id.et_time);
mTimeEditText.addTextChangedListener(TimeTextWatcher);
return rootView;
}
private TextWatcher BitrateTextWatcher = new TextWatcher() {
#Override
public void beforeTextChanged(CharSequence charSequence, int i,
int i2, int i3) {
// Not used.
}
#Override
public void onTextChanged(CharSequence charSequence, int i, int i2,
int i3) {
if (TextUtils.isEmpty(charSequence)) {
return;
}
int value = Integer.valueOf(charSequence.toString());
if (value > 50 || value == 0) {
mBitrateEditText.setError(mContext
.getString(R.string.error_bitrate_edittext));
return;
}
mTimeEditText.setError(null);
}
#Override
public void afterTextChanged(Editable editable) {
// Not used.
}
};
private TextWatcher TimeTextWatcher = new TextWatcher() {
#Override
public void beforeTextChanged(CharSequence charSequence, int i,
int i2, int i3) {
// Not used.
}
#Override
public void onTextChanged(CharSequence charSequence, int i, int i2,
int i3) {
if (TextUtils.isEmpty(charSequence)) {
return;
}
int value = Integer.valueOf(charSequence.toString());
if (value > 180 || value == 0) {
mTimeEditText.setError(mContext
.getString(R.string.error_time_editext));
return;
}
mTimeEditText.setError(null);
}
#Override
public void afterTextChanged(Editable editable) {
// Not used.
}
};
private View.OnClickListener RecordOnClickListener = new View.OnClickListener() {
#Override
public void onClick(View view) {
if (!TextUtils.isEmpty(mTimeEditText.getError())
|| !TextUtils.isEmpty(mBitrateEditText.getError())) {
Toast.makeText(mContext,
mContext.getString(R.string.toast_invalid_values),
Toast.LENGTH_LONG).show();
return;
}
boolean widthSet = !TextUtils.isEmpty(mWidthEditText.getText());
boolean heightSet = !TextUtils.isEmpty(mHeightEditText
.getText());
if ((!widthSet && heightSet) || (widthSet && !heightSet)) {
Toast.makeText(mContext,
mContext.getString(R.string.error_invalid_wxh),
Toast.LENGTH_LONG).show();
return;
}
boolean bitrateSet = !TextUtils.isEmpty(mBitrateEditText
.getText());
boolean timeSet = !TextUtils.isEmpty(mTimeEditText.getText());
StringBuilder stringBuilder = new StringBuilder(
"/system/bin/screenrecord");
if (widthSet) {
stringBuilder.append(" --size ")
.append(mWidthEditText.getText()).append("x")
.append(mHeightEditText.getText());
}
if (bitrateSet) {
stringBuilder.append(" --bit-rate ").append(
mBitrateEditText.getText());
}
if (timeSet) {
stringBuilder.append(" --time-limit ").append(
mTimeEditText.getText());
}
// TODO User definable location.
stringBuilder
.append(" ")
.append(Environment.getExternalStorageDirectory()
.toString()).append("/recording.mp4");
Log.d("TAG", "comamnd: " + stringBuilder.toString());
try {
new SuTask(stringBuilder.toString().getBytes("ASCII"))
.execute();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
};
private class SuTask extends AsyncTask<Boolean, Void, Boolean> {
private final byte[] mCommand;
public SuTask(byte[] command) {
super();
this.mCommand = command;
}
#Override
protected Boolean doInBackground(Boolean... booleans) {
try {
Process sh = Runtime.getRuntime().exec("su", null, null);
OutputStream outputStream = sh.getOutputStream();
outputStream.write(mCommand);
outputStream.flush();
outputStream.close();
final NotificationManager notificationManager = (NotificationManager) mContext
.getSystemService(NOTIFICATION_SERVICE);
notificationManager.notify(RUNNING_NOTIFICATION_ID,
createRunningNotification(mContext));
sh.waitFor();
return true;
} catch (InterruptedException e) {
e.printStackTrace();
Toast.makeText(mContext,
mContext.getString(R.string.error_start_recording),
Toast.LENGTH_LONG).show();
} catch (IOException e) {
e.printStackTrace();
Toast.makeText(mContext,
mContext.getString(R.string.error_start_recording),
Toast.LENGTH_LONG).show();
}
return false;
}
#Override
protected void onPostExecute(Boolean bool) {
super.onPostExecute(bool);
if (bool) {
final NotificationManager notificationManager = (NotificationManager) mContext
.getSystemService(NOTIFICATION_SERVICE);
notificationManager.cancel(RUNNING_NOTIFICATION_ID);
File file = new File(Environment
.getExternalStorageDirectory().toString()
+ "/recording.mp4");
notificationManager.notify(FINISHED_NOTIFICATION_ID,
createFinishedNotification(mContext, file));
}
}
private Notification createRunningNotification(Context context) {
Notification.Builder mBuilder = new Notification.Builder(
context)
.setSmallIcon(android.R.drawable.stat_notify_sdcard)
.setContentTitle(
context.getResources().getString(
R.string.app_name))
.setContentText("Recording Running")
.setTicker("Recording Running")
.setPriority(Integer.MAX_VALUE).setOngoing(true);
return mBuilder.build();
}
private Notification createFinishedNotification(Context context,
File file) {
Intent intent = new Intent();
intent.setAction(Intent.ACTION_VIEW);
intent.setDataAndType(Uri.fromFile(file), "video/mp4");
PendingIntent pendingIntent = PendingIntent.getActivity(
context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
Notification.Builder mBuilder = new Notification.Builder(
context)
.setSmallIcon(android.R.drawable.stat_notify_sdcard)
.setContentTitle(
context.getResources().getString(
R.string.app_name))
.setContentText("Recording Finished")
.setTicker("Recording Finished")
.setContentIntent(pendingIntent).setOngoing(false)
.setAutoCancel(true);
return mBuilder.build();
}
}
}
2) You can capture the screen shots and make the video from it and it will work for the 3.0+ devices And for converting the images to video you can use FFMPEG or JavaCV.
-For Rooted devices(in that you can capture the keyboard screen also)
if (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState())) {
// we check if external storage is\ available, otherwise
// display an error message to the user using Toast Message
File sdCard = Environment.getExternalStorageDirectory();
File directory = new File(sdCard.getAbsolutePath() + "/ScreenShots");
directory.mkdirs();
String filename = "screenshot_jpeg_" + i + ".png";
File yourFile = new File(directory, filename);
try {
Process sh = Runtime.getRuntime().exec("su", null, null);
OutputStream os = sh.getOutputStream();
os.write(("/system/bin/screencap -p " + "/sdcard/ScreenShots/" + filename).getBytes("ASCII"));
os.flush();
os.close();
sh.waitFor();
i++;
} catch (Exception e) {
e.printStackTrace();
}
}
-For without Rooted devices(in that you can not capture the keyboard screen)
if (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState())) {
// we check if external storage is\ available, otherwise
// display an error message to the user using Toast Message
File sdCard = Environment.getExternalStorageDirectory();
File directory = new File(sdCard.getAbsolutePath() + "/ScreenShots");
directory.mkdirs();
String filename = "screenshot_jpeg_" + i + ".png";
File yourFile = new File(directory, filename);
try {
Process sh = Runtime.getRuntime().exec("su", null, null);
OutputStream os = sh.getOutputStream();
os.write(("/system/bin/screencap -p " + "/sdcard/ScreenShots/" + filename).getBytes("ASCII"));
os.flush();
os.close();
sh.waitFor();
i++;
} catch (Exception e) {
e.printStackTrace();
}
}

Generate bitmap from HTML in Android

How do can you generate a bitmap from HTML in Android?
Can the WebView be used for this or is there a better approach (like maybe using the WebView rendering engine directly)? How?
I would like to implement the following method...
public Bitmap toBitmap(Context context, String html, Rect rect);
...where html is the html to render and rect is the frame of the desired bitmap.
A synchronous method that generates a bitmap from an HTML string using a WebView, and can be used within an AsyncTask:
public Bitmap getBitmap(final WebView w, int containerWidth, int containerHeight, final String baseURL, final String content) {
final CountDownLatch signal = new CountDownLatch(1);
final Bitmap b = Bitmap.createBitmap(containerWidth, containerHeight, Bitmap.Config.ARGB_8888);
final AtomicBoolean ready = new AtomicBoolean(false);
w.post(new Runnable() {
#Override
public void run() {
w.setWebViewClient(new WebViewClient() {
#Override
public void onPageFinished(WebView view, String url) {
ready.set(true);
}
});
w.setPictureListener(new PictureListener() {
#Override
public void onNewPicture(WebView view, Picture picture) {
if (ready.get()) {
final Canvas c = new Canvas(b);
view.draw(c);
w.setPictureListener(null);
signal.countDown();
}
}
});
w.layout(0, 0, rect.width(), rect.height());
w.loadDataWithBaseURL(baseURL, content, "text/html", "UTF-8", null);
}});
try {
signal.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
return b;
}
It has some limitations, but it's a start.
You can use the draw method to let it draw in a Bitmap of your choice. I made an example, don't forget internet and external storage rights of your manifest:
public class MainActivity extends Activity {
private WebView mWebView;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mWebView = new WebView(this);
setContentView(mWebView);
mWebView.loadUrl("http://tea.ch");
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode != KeyEvent.KEYCODE_BACK) return super.onKeyDown(keyCode, event);
Bitmap bm = Bitmap.createBitmap(200, 300, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
mWebView.draw(c);
OutputStream stream = null;
try {
stream = new FileOutputStream(Environment.getExternalStorageDirectory() +"/teach.png");
bm.compress(CompressFormat.PNG, 80, stream);
if (stream != null) stream.close();
} catch (IOException e) {
} finally {
bm.recycle();
}
return super.onKeyDown(keyCode, event);
}
}
Why not use the WebView method : capturePicture() which returns a Picture and is available since API level 1 ?
It returns a picture of the entire document.
You could then crop the result with your rectangle and save the bitmap from there.
This example shows how to capture webView content last picture (it waits until webview complete rendering picture), it is an example of convert HTML to PNG using Android
Activity Code
public class HtmlViewer extends Activity {
private String HTML;
private Context ctx;
private Picture pic = null;
private int i=0; suppose this is the last pic
private int oldi = 0;
private Timer myTimer; // timer for waiting until last picture loaded
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_html_viewer);
Intent intent = getIntent();
HTML = intent.getStringExtra("HTML");
ctx = this;
WebView wv = (WebView)findViewById(R.id.webView1);
wv.setPictureListener(new PictureListener(){
public void onNewPicture(WebView view, Picture picture) {
Log.w("picture", "loading.." + String.valueOf(view.getProgress()));
pic = picture;
i++;
}
});
wv.loadData(HTML, "text/html; charset=utf-8", null);
wv.setWebViewClient(new WebViewClient()
{
public void onPageFinished(WebView wv, String url)
{
Picture p = wv.capturePicture();
myTimer = new Timer();
myTimer.schedule(new TimerTask() {
#Override
public void run() {
if (i > oldi)
oldi = i;
else
if (i != 0)
{
Log.w("picture", "finished");
cancel();
Picture picture = pic;
Log.w("picture", "onNewPicture- Height"+ picture.getHeight());
Log.w("picture", "onNewPicture- Width"+ picture.getWidth());
File sdCard = Environment.getExternalStorageDirectory();
if (picture != null)
{
Log.w("picture", " P OK");
Bitmap image = Bitmap.createBitmap(picture.getWidth(),picture.getHeight(), Config.ARGB_8888);
Canvas canvas = new Canvas(image);
picture.draw(canvas);
Log.w("picture", "C OK");
if (image != null) {
Log.w("picture", "I OK");
ByteArrayOutputStream mByteArrayOS = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.PNG, 90, mByteArrayOS);
try {
File file = new File(sdCard, "AccountView.PNG");
FileOutputStream fos = new FileOutputStream(file);
fos.write(mByteArrayOS.toByteArray());
fos.flush();
fos.close();
Log.w("picture", "F OK " + String.valueOf(mByteArrayOS.size()) + " ? " + String.valueOf(file.length()));
Intent sharingIntent = new Intent(Intent.ACTION_SEND);
Uri screenshotUri = Uri.fromFile(file);
sharingIntent.setType("image/png");
sharingIntent.putExtra(Intent.EXTRA_STREAM, screenshotUri);
startActivity(Intent.createChooser(sharingIntent, getResources().getString(R.string.ACCOUNT_VIEW_TITLE)));
((Activity)ctx).finish();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
}, 0, 1000);
Log.w("picture", "done");
loadcompleted = true;
}
});
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.activity_html_viewer, menu);
return true;
}
}
Layout
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context=".HtmlViewer" >
<WebView
android:id="#+id/webView1"
android:layout_width="match_parent"
android:layout_height="match_parent" />
This is a good library that can be used to convert any HTML content to bitmap.
It supports both URL and HTML String
https://github.com/iZettle/android-html2bitmap

Categories

Resources