Help me guys, I'm creating augmented reality (unity+vuforia). I have button for screen shot screen, is work but file location on (/Data/Data/com.companyname.gamename/Files). How to change folder? (storage/emulated/0/DCIM/Camera/) .
using UnityEngine;
using System.Collections;
using System.IO;
public class SnapshotShare : MonoBehaviour
{
private AndroidUltimatePluginController androidUltimatePluginController;
Camera mainCamera;
RenderTexture renderTex;
Texture2D screenshot;
Texture2D LoadScreenshot;
int width = Screen.width;
int height = Screen.height;
string fileName;
string screenShotName = "Animal3D_";
void Start ()
{
androidUltimatePluginController = AndroidUltimatePluginController.GetInstance ();
}
public void Snapshot ()
{
StartCoroutine (CaptureScreen ());
}
public IEnumerator CaptureScreen ()
{
yield return null;
GameObject.Find ("Canvas").GetComponent<Canvas> ().enabled = false;
yield return new WaitForEndOfFrame ();
if (Screen.orientation == ScreenOrientation.Portrait || Screen.orientation == ScreenOrientation.PortraitUpsideDown) {
mainCamera = Camera.main.GetComponent<Camera> ();
renderTex = new RenderTexture (height, width, 24);
mainCamera.targetTexture = renderTex;
RenderTexture.active = renderTex;
mainCamera.Render ();
screenshot = new Texture2D (height, width, TextureFormat.RGB24, false);
screenshot.ReadPixels (new Rect (0, 0, height, width ), 0, 0);
screenshot.Apply ();
RenderTexture.active = null;
mainCamera.targetTexture = null;
}
if (Screen.orientation == ScreenOrientation.LandscapeLeft || Screen.orientation == ScreenOrientation.LandscapeRight) {
mainCamera = Camera.main.GetComponent<Camera> ();
renderTex = new RenderTexture (width, height, 24);
mainCamera.targetTexture = renderTex;
RenderTexture.active = renderTex;
mainCamera.Render ();
screenshot = new Texture2D (width, height, TextureFormat.RGB24, false);
screenshot.ReadPixels (new Rect (0, 0, width, height), 0, 0);
screenshot.Apply (); //false
RenderTexture.active = null;
mainCamera.targetTexture = null;
}
File.WriteAllBytes (Application.persistentDataPath + "/" +screenShotName+Time.frameCount+".jpg", screenshot.EncodeToJPG ());
GameObject.Find ("Canvas").GetComponent<Canvas> ().enabled = true;
}
public void LoadImage ()
{
string path = Application.persistentDataPath + "/" + screenShotName;
byte[] bytes;
bytes = System.IO.File.ReadAllBytes(path);
LoadScreenshot = new Texture2D(1,1);
LoadScreenshot.LoadImage(bytes);
GameObject.FindGameObjectWithTag ("Picture").GetComponent<Renderer> ().material.mainTexture = screenshot;
}
public void close ()
{
Application.Quit ();
}
}
Took from here (more details) and here (discussion).
I suggest you to save your captured screenshot in app location (/Data/Data/com.companyname.gamename/Files) and then use File.Move(source, dest) to move it:
if(Shot_Taken == true)
{
string Origin_Path = System.IO.Path.Combine(Application.persistentDataPath, Screen_Shot_File_Name);
// This is the path of my folder.
string Path = "/mnt/sdcard/DCIM/Inde/" + Screen_Shot_File_Name;
if(System.IO.File.Exists(Origin_Path))
{
System.IO.File.Move(Origin_Path, Path);
Shot_Taken = false;
}
}
Related
I have a picture that contains 2 icons at the bottom of the picture i'm already croping the picture to get only the buttom and comparing it with the icon picture opencv temple image detecter it works perfectly when the icon is there but the problem is when i delete the icon still the rectangle appears and it appears in wrong place all i want if there is a match show the rectangle if there is not don't show it
here is my code
public class Test123 extends AppCompatActivity {
ImageView a,b,c;
String resultImgPath,baseDir,lol;
Button x;
private static final String TAG = "Test123";
String aa,bb;
static {
if(!OpenCVLoader.initDebug()){
Log.e(TAG, "OpenCV not loaded");
} else {
Log.e(TAG, "OpenCV loaded");
}
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test123);
System.loadLibrary("opencv_java3");
a = (ImageView)findViewById(R.id.imageView);
b = (ImageView)findViewById(R.id.imageView2);
c = (ImageView)findViewById(R.id.imageView3);
x = (Button) findViewById(R.id.button);
baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
resultImgPath = baseDir+"/Test/result.jpg";
aa = baseDir+"/Test/d.jpg";
bb = baseDir+"/Test/dd1.jpg";
lol = baseDir+"/Test/c.jpg";
x.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
Bitmap bmImg = BitmapFactory.decodeFile(aa);
int fromHere = (int) (bmImg.getHeight() * 0.06);
final Bitmap croppedBitmap = Bitmap.createBitmap(bmImg, (int) (bmImg.getWidth() * 0.3), (int) (bmImg.getHeight() * 0.94), (int) (bmImg.getWidth() * 0.6), fromHere);
String root = Environment.getExternalStorageDirectory().toString();
File myDir = new File(root + "/Test");
myDir.mkdirs();
String fname = "c.jpg";
File file = new File(myDir, fname);
if (file.exists())
file.delete();
try {
FileOutputStream out = new FileOutputStream(file);
croppedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
out.flush();
out.close();
} catch (Exception e) {
e.printStackTrace();
}
a.setImageBitmap(croppedBitmap);
Bitmap bmImg2 = BitmapFactory.decodeFile(bb);
b.setImageBitmap(bmImg2);
matchingDemo(lol, bb, resultImgPath, Imgproc.TM_SQDIFF);
}
});
}
public void matchingDemo(String imgPath,String templatePath,String resPath, int matchType){
// to read the entered image from its path and make a mat object
Mat img = Imgcodecs.imread(imgPath);
Mat templ = Imgcodecs.imread(templatePath);
// Create the result matrix
int result_cols = img.cols() - templ.cols() + 1;
int result_rows = img.rows() - templ.rows() + 1;
Mat result = new Mat(result_rows, result_cols, CvType.CV_8UC3);
// performing matching and do normalization
Imgproc.matchTemplate(img, templ, result, matchType);
int type = Imgproc.THRESH_TOZERO;
Imgproc.threshold(result, result, 0.8, 1., type);
Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());
// / finding the best match from minMaxLoc
Core.MinMaxLocResult mmr = Core.minMaxLoc(result);
double bb = mmr.maxVal;
Log.e("hey",bb+"");
Point matchLoc;
if (matchType == Imgproc.TM_SQDIFF || matchType == Imgproc.TM_SQDIFF_NORMED) {
matchLoc = mmr.minLoc;
} else {
matchLoc = mmr.maxLoc;
}
// draw a rectangle on searched object
Imgproc.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(),
matchLoc.y + templ.rows()), new Scalar(0, 255, 0));
// store the result image here
Imgcodecs.imwrite(resPath, img);
Mat image = new Mat();
image =Imgcodecs.imread(resPath);
Bitmap bm = Bitmap.createBitmap(image.cols(),image.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(image, bm);
c.setImageBitmap(bm);
image.release();
}
}
EDIT
i know that i need something here
if(......................)
Imgproc.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(),
matchLoc.y + templ.rows()), new Scalar(0, 255, 0));
}
I tried to put minVal inside the if block but every picture gives me different numbers i tried with normalize and without it i can put the right number on 1 picture but other picture gives me different numbers so it is not detecting at all if the icon is visible or if it is not it givin the same issue drawing in wrong place i just need 1 number or something to draw if it is there to not draw if there is no match i don't want to have value if there is no match
I am attempting to use tesseract in Xamarin.Forms for text character recognition on images. On Android, I've implemented the logic using the steps below;
Grab bitmaps of the TextureView.
Convert bitmaps to black and white.
Crop needed from the bitmap image
Set tesseract image with cropped image
Currently facing memory issues and haven't got a way around it so far
Snippets below show the main functions of the code.
public async void takeContinuousPhotos(){
byte[] newBytes;
byte[] bytes;
while (ocrTextChosen == false){
bytes = await CapturePhoto();
newBytes = this.CropPhoto(bytes, new System.Drawing.Rectangle(40, view.Height / 2 - 200, view.Width - 40, 100), (view.Width - 40) * 2, 200);
if (!_tesseract.Initialized)
{
await _tesseract.Init("eng");
var result = await _tesseract.SetImage(new MemoryStream(newBytes));
if (result)
{
Device.BeginInvokeOnMainThread(() => {
this.ocrText.Text = this._tesseract.Text;
});
_tesseract.Clear();
bytes = new byte[0];
newBytes = new byte[0];
}
}
else {
var result = await _tesseract.SetImage(new MemoryStream(newBytes));
if (result)
{
Device.BeginInvokeOnMainThread(() => {
this.ocrText.Text = this._tesseract.Text;
});
_tesseract.Clear();
bytes = new byte[0];
newBytes = new byte[0];
}
}
}
}
// Capture photo from stream
public async Task<byte[]> CapturePhoto()
{
var ratio = ((decimal)Height) / Width;
var blackwhiteBitmap = this.toGrayscale(liveView.Bitmap);
var image = Bitmap.CreateBitmap(blackwhiteBitmap, 0, 0, blackwhiteBitmap.Width, (int)(blackwhiteBitmap.Width * ratio));
byte[] imageBytes = null;
using (var imageStream = new System.IO.MemoryStream())
{
await image.CompressAsync(Bitmap.CompressFormat.Jpeg, 10, imageStream);
image.Recycle();
imageBytes = imageStream.ToArray();
imageStream.Dispose();
}
image = null;
return imageBytes;
}
takeContinuousPhotos() is then run on the TextureView delegate method as shown below:
public void OnSurfaceTextureAvailable(SurfaceTexture surface, int width, int height)
{
camera = Android.Hardware.Camera.Open();
var parameters = camera.GetParameters();
var aspect = ((decimal)height) / ((decimal)width);
var previewSize = parameters.SupportedPreviewSizes
.OrderBy(s => System.Math.Abs(s.Width / (decimal)s.Height - aspect))
.First();
parameters.SetPreviewSize(previewSize.Width, previewSize.Height);
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeContinuousPicture;
camera.SetParameters(parameters);
camera.SetPreviewTexture(surface);
StartCamera();
Task.Run(() => {
this.takeContinuousPhotos();
});
}
I want to create the image of the barcode/QR code etc on my app. I have searched a lot and have found different libraries to do this task but since I am already using Zxing so i would like to work in it.
Following is the code that I have writen:
This is my Scanner Activity class:
public void handleResult(Result rawResult) {
// Do something with the result here
Log.v(TAG, rawResult.getText()); // Prints scan results
Toast.makeText(SimpleScannerActivity.this, rawResult.toString() + " WOW scanned", Toast.LENGTH_LONG).show();
Toast.makeText(SimpleScannerActivity.this, rawResult.getBarcodeFormat().toString(), Toast.LENGTH_LONG).show();
Log.v(TAG, rawResult.getBarcodeFormat().toString()); // Prints the scan format (qrcode, pdf417 etc.)
//Intent scanScreenResult= new Intent("com.aaa.fyp.ScanResultScreen");
setFormat(rawResult);
Intent nextScreen = new Intent("com.aaa.fyp.ScanResultScreen");
nextScreen.putExtra("barcode",rawResult.toString());
nextScreen.putExtra("format", rawResult.getBarcodeFormat().toString());
finish();
startActivity(nextScreen);
}
public void setFormat(Result result){
r=result.getBarcodeFormat();
System.out.println("============================== setformat main"+ r);
}
public BarcodeFormat getFormat(){
System.out.println("============================== getformat main"+ r);
return r;
}
Using the results from the above activity in ScanResultScreen activity.
public class ScanResultScreen extends SimpleScannerActivity {
ImageView scanned;
TextView bc;
TextView f;
String Barcode;
String format;
BarcodeFormat form;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.scan_screen_with_button);
ViewGroup layout = (ViewGroup) findViewById(R.id.scanScreenWithButton);
setContentView(layout);
Intent prevScreen = getIntent(); // gets the previously created intent
Barcode=prevScreen.getStringExtra("barcode");
bc= (TextView)findViewById(R.id.barcode_label);
bc.setText(Barcode);
format=prevScreen.getStringExtra("format");
f=(TextView) findViewById(R.id.format_label);
f.setText(prevScreen.getStringExtra("format").toString());
SimpleScannerActivity obj=new SimpleScannerActivity();
form=obj.getFormat();
d=(TextView)findViewById(R.id.date_label);
String formattedDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(Calendar.getInstance().getTime());
d.setText(formattedDate);
Bitmap bitmap = null;
ImageView iv = new ImageView(this);
try {
bitmap = encodeAsBitmap(Barcode, form, 600, 300);
iv.setImageBitmap(bitmap);
} catch (WriterException e) {
e.printStackTrace();
}
layout.addView(iv);
}
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
Bitmap encodeAsBitmap(String contents, BarcodeFormat format, int img_width, int img_height) throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<EncodeHintType, Object>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contentsToEncode, format, img_width, img_height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
Now I am getting a Null value in the variable "form". Even though I am able to get the barcodeFormat in my second activity by passing it through intent but it's in the type String. Whereas the built-in methods that I am using here requires it in BarcodeFormat that is available in Zxing.
Help!!
BarcodeFormat is an enum type. If you want to pass a String value, you have to convert it to BarcodeFormat.
For example, passing a barcode format "AZTEC":
BarcodeFormat format = Enum.valueOf(BarcodeFormat.class, "AZTEC");
I want to create a QR code for my existing Android application (built in Android Studio). I need to get some ID's from the back end & generate the QR code then view it in my layout.
Please help me.
Here are some of my sample codes, you can refer, some values may be different depend on the mobile phone model. Hope this help! I use zxing library.
private void startScan(Context context, String parameter) {
Intent intent = new Intent();
intent.setAction("com.motorolasolutions.emdk.datawedge.api.ACTION_SOFTSCANTRIGGER");
intent.putExtra("com.motorolasolutions.emdk.datawedge.api.EXTRA_PARAMETER", parameter);
context.sendBroadcast(intent);
}
#Override
public boolean onKeyUp(int keyCode, #NonNull KeyEvent event) {
if ((event.getAction() == KeyEvent.ACTION_UP) && keyCode == KeyEvent.KEYCODE_...) {
startScan(this, "START_SCANNING");
registerReceiver(new BroadcastReceiver() {
#Override
public void onReceive(Context context, Intent intent) {
String code = intent.getExtras().getString("com.motorolasolutions.emdk.datawedge.data_string");
ImageView imageView = (ImageView) findViewById(R.id.imageView);
TextView textView = (TextView) findViewById(R.id.textView);
if ((imageView != null) && (textView != null)) {
textView.setText(code);
// barcode image
try {
mBarCodeBitmap = encodeBarCodeAsBitmap(value, BarcodeFormat.CODE_128, 200, 100);
imageView.setImageBitmap(mBarCodeBitmap);
} catch (WriterException e) {
}
}
unregisterReceiver(this);
}
}, new IntentFilter("com.example.SoftScanIntentAction")); // this value must be set in DataWedge profile (Intent Output - Intent action...)
}
return super.onKeyUp(keyCode, event);
}
private Bitmap encodeBarCodeAsBitmap(String contents, BarcodeFormat format, int width, int height) throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
MultiFormatWriter writer = new MultiFormatWriter();
BitMatrix result;
try {
result = writer.encode(contentsToEncode, format, width, height, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int imgWidth = result.getWidth();
int imgHeight = result.getHeight();
int[] pixels = new int[imgWidth * imgHeight];
for (int y = 0; y < imgHeight; y++) {
int offset = y * imgWidth;
for (int x = 0; x < imgWidth; x++) {
pixels[offset + x] = result.get(x, y) ? 0xFF000000 : 0xFFFFFFFF;
}
}
Bitmap bitmap = Bitmap.createBitmap(imgWidth, imgHeight,
Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, imgWidth, 0, 0, imgWidth, imgHeight);
return bitmap;
}
Mat last;
//Mat last2;
ArrayList<Scene> scenes = new ArrayList<Scene>();
//ArrayList<Scene> scenes23 = new ArrayList<Scene>();
ArrayList<Bitmap> myImageList2 = new ArrayList<Bitmap>();
int[] myImageList = new int[]{R.drawable.baldglassy,
R.drawable.baldglassy2,
R.drawable.bandedarcherfish,
R.drawable.bandedarcherfish2,
R.drawable.bluegill,
R.drawable.bluegill2,
R.drawable.bluespotmullet};
/*Bitmap[] images2 = { BitmapFactory.decodeResource(getResources(),R.drawable.baldglassy),
BitmapFactory.decodeResource(getResources(),R.drawable.baldglassy2),
BitmapFactory.decodeResource(getResources(),R.drawable.bandedarcherfish),
BitmapFactory.decodeResource(getResources(),R.drawable.bluegill),
BitmapFactory.decodeResource(getResources(),R.drawable.bluegill2),
BitmapFactory.decodeResource(getResources(),R.drawable.bluespotmullet),
};*/
Scene refScene;
ProgressDialog progress;
//Mat imgMAT;
public void takePic1(View w) {
//Bitmap bmp32 = images[](Bitmap.Config.ARGB_8888, true);
/*for(int i=0;i<=5;i++){
Bitmap bmp32 = images[i].copy(Bitmap.Config.ARGB_8888, true);
Utils.bitmapToMat(bmp32, imgMAT);
}
Scene scene2 = new Scene(imgMAT);
scenes.add(scene2);*/
Scene scene = new Scene(last);
scenes.add(scene);
addBtn.setText("Add (" + scenes.size() + ")");
}
public void takePic2(View w) {
Mat im = last.clone();
// Imgproc.cvtColor(im, im, Imgproc.COLOR_BGR2RGB);
Bitmap bmp = Bitmap.createBitmap(im.cols(), im.rows(),
Bitmap.Config.ARGB_8888);
Utils.matToBitmap(im, bmp);
matchDrawArea.setImageBitmap(bmp);
refScene = new Scene(last);
}
when i use a bitmap array, the app crashes
it says that change the type of my int array to Scene. But it is impossible. Thanks in advance to those who will help.
here is the scene class
public class Scene {
final Mat image;
final Mat descriptors = new Mat();
final MatOfKeyPoint keypoints = new MatOfKeyPoint();
boolean firstTime = true;
public Scene(Mat image) {
this.image = image.clone();
// DetectUtility.analyze(image, keypoints, descriptors);
}
public void preCompute() {
if (firstTime) {
DetectUtility.analyze(image, keypoints, descriptors);
firstTime = false;
}
}
public SceneDetectData compare(Scene frame, boolean isHomogrpahy, boolean imageOnly) {
// Info to store analysis stats
SceneDetectData s = new SceneDetectData();
// Detect key points and compute descriptors for inputFrame
MatOfKeyPoint f_keypoints = frame.keypoints;
Mat f_descriptors = frame.descriptors;
this.preCompute();
frame.preCompute();
// Compute matches
MatOfDMatch matches = DetectUtility.match(descriptors, f_descriptors);
// Filter matches by distance
MatOfDMatch filtered = DetectUtility.filterMatchesByDistance(matches);
// If count of matches is OK, apply homography check
s.original_key1 = (int) descriptors.size().height;
s.original_key2 = (int) f_descriptors.size().height;
s.original_matches = (int) matches.size().height;
s.dist_matches = (int) filtered.size().height;
if (isHomogrpahy) {
MatOfDMatch homo = DetectUtility.filterMatchesByHomography(
keypoints, f_keypoints, filtered);
Bitmap bmp = DetectUtility.drawMatches(image, keypoints,
frame.image, f_keypoints, homo, imageOnly);
s.bmp = bmp;
s.homo_matches = (int) homo.size().height;
return s;
} else {
Bitmap bmp = DetectUtility.drawMatches(image, keypoints,
frame.image, f_keypoints, filtered, imageOnly);
s.bmp = bmp;
s.homo_matches = -1;
return s;
}
}
}