Issue with OCR Scan on Android - android

I'm trying to create an app with a OCR Scanner by using the tessract/tess-two library, I've successful access to the Phone camera, I can do the manual focus but when I take a picture the following error:
07-18 19:07:06.335 2585-2585/com.fastnetserv.myapp D/DBG_com.fastnetserv.myapp.MainActivity: Picture taken
07-18 19:07:06.335 2585-2585/com.fastnetserv.myapp D/DBG_com.fastnetserv.myapp.MainActivity: Got null data
07-18 19:07:06.405 2585-2585/com.fastnetserv.myapp D/DBG_com.fastnetserv.myapp.MainActivity: Picture taken
07-18 19:07:06.426 2585-2585/com.fastnetserv.myapp D/DBG_com.fastnetserv.myapp.MainActivity: Got bitmap
07-18 19:07:06.427 2585-11599/com.fastnetserv.myapp E/DBG_com.fastnetserv.myapp.TessAsyncEngine: Error passing parameter to execute(context, bitmap)
07-18 19:07:14.111 2585-2585/com.fastnetserv.myapp D/DBG_com.fastnetserv.myapp.CameraUtils: CameraEngine Stopped
Here the CameraFragment code:
package com.fastnetserv.myapp;
import android.content.Context;
import android.graphics.Bitmap;
import android.hardware.Camera;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import com.googlecode.tesseract.android.TessBaseAPI;
/**
* A simple {#link Fragment} subclass.
* Activities that contain this fragment must implement the
* {#link //CameraFragment.//OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {#link CameraFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class CameraFragment extends Fragment implements SurfaceHolder.Callback, View.OnClickListener,
Camera.PictureCallback, Camera.ShutterCallback {
static final String TAG = "DBG_" + MainActivity.class.getName();
Button shutterButton;
Button focusButton;
FocusBoxView focusBox;
SurfaceView cameraFrame;
CameraEngine cameraEngine;
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private OnFragmentInteractionListener mListener;
public CameraFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* #param param1 Parameter 1.
* #param param2 Parameter 2.
* #return A new instance of fragment CameraFragment.
*/
// TODO: Rename and change types and number of parameters
public static CameraFragment newInstance(String param1, String param2) {
CameraFragment fragment = new CameraFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_camera, container, false);
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
#Override
public void onAttach(Context context) {
super.onAttach(context);
try {
mListener = (OnFragmentInteractionListener) context;
} catch (ClassCastException e) {
throw new ClassCastException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
#Override
public void onDetach() {
super.onDetach();
mListener = null;
}
// Camera Code
public String detectText(Bitmap bitmap) {
TessDataManager.initTessTrainedData(getActivity());
TessBaseAPI tessBaseAPI = new TessBaseAPI();
String path = "/mnt/sdcard/com.fastnetserv.myapp/tessdata/ita.traineddata";
Log.d(TAG, "Check data path: " + path);
tessBaseAPI.setDebug(true);
tessBaseAPI.init(path, "ita"); //Init the Tess with the trained data file, with english language
//For example if we want to only detect numbers
tessBaseAPI.setVariable(TessBaseAPI.VAR_CHAR_WHITELIST, "1234567890");
tessBaseAPI.setVariable(TessBaseAPI.VAR_CHAR_BLACKLIST, "!##$%^&*()_+=-qwertyuiop[]}{POIU" +
"YTREWQasdASDfghFGHjklJKLl;L:'\"\\|~`xcvXCVbnmBNM,./<>?");
tessBaseAPI.setImage(bitmap);
String text = tessBaseAPI.getUTF8Text();
//Log.d(TAG, "Got data: " + result);
tessBaseAPI.end();
return text;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "Surface Created - starting camera");
if (cameraEngine != null && !cameraEngine.isOn()) {
cameraEngine.start();
}
if (cameraEngine != null && cameraEngine.isOn()) {
Log.d(TAG, "Camera engine already on");
return;
}
cameraEngine = CameraEngine.New(holder);
cameraEngine.start();
Log.d(TAG, "Camera engine started");
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
#Override
public void onResume() {
super.onResume();
cameraFrame = (SurfaceView) getActivity().findViewById(R.id.camera_frame);
shutterButton = (Button) getActivity().findViewById(R.id.shutter_button);
focusBox = (FocusBoxView) getActivity().findViewById(R.id.focus_box);
focusButton = (Button) getActivity().findViewById(R.id.focus_button);
shutterButton.setOnClickListener(this);
focusButton.setOnClickListener(this);
SurfaceHolder surfaceHolder = cameraFrame.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraFrame.setOnClickListener(this);
}
#Override
public void onPause() {
super.onPause();
if (cameraEngine != null && cameraEngine.isOn()) {
cameraEngine.stop();
}
SurfaceHolder surfaceHolder = cameraFrame.getHolder();
surfaceHolder.removeCallback(this);
}
#Override
public void onClick(View v) {
if(v == shutterButton){
if(cameraEngine != null && cameraEngine.isOn()){
cameraEngine.takeShot(this, this, this);
}
}
if(v == focusButton){
if(cameraEngine!=null && cameraEngine.isOn()){
cameraEngine.requestFocus();
}
}
}
#Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.d(TAG, "Picture taken");
if (data == null) {
Log.d(TAG, "Got null data");
return;
}
Bitmap bmp = Tools.getFocusedBitmap(getActivity(), camera, data, focusBox.getBox());
Log.d(TAG, "Got bitmap");
new TessAsyncEngine().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, this, bmp);
}
#Override
public void onShutter() {
}
}
And here the TessAsyncEngine:
package com.fastnetserv.myapp;
import android.app.Activity;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.util.Log;
import com.fastnetserv.myapp.ImageDialog;
import com.fastnetserv.myapp.Tools;
/**
* Created by Fadi on 6/11/2014.
*/
public class TessAsyncEngine extends AsyncTask<Object, Void, String> {
static final String TAG = "DBG_" + TessAsyncEngine.class.getName();
private Bitmap bmp;
private Activity context;
#Override
protected String doInBackground(Object... params) {
try {
if(params.length < 2) {
Log.e(TAG, "Error passing parameter to execute - missing params");
return null;
}
if(!(params[0] instanceof Activity) || !(params[1] instanceof Bitmap)) {
Log.e(TAG, "Error passing parameter to execute(context, bitmap)");
return null;
}
context = (Activity)params[0];
bmp = (Bitmap)params[1];
if(context == null || bmp == null) {
Log.e(TAG, "Error passed null parameter to execute(context, bitmap)");
return null;
}
int rotate = 0;
if(params.length == 3 && params[2]!= null && params[2] instanceof Integer){
rotate = (Integer) params[2];
}
if(rotate >= -180 && rotate <= 180 && rotate != 0)
{
bmp = Tools.preRotateBitmap(bmp, rotate);
Log.d(TAG, "Rotated OCR bitmap " + rotate + " degrees");
}
TessEngine tessEngine = TessEngine.Generate(context);
bmp = bmp.copy(Bitmap.Config.ARGB_8888, true);
String result = tessEngine.detectText(bmp);
Log.d(TAG, result);
return result;
} catch (Exception ex) {
Log.d(TAG, "Error: " + ex + "\n" + ex.getMessage());
}
return null;
}
#Override
protected void onPostExecute(String s) {
if(s == null || bmp == null || context == null)
return;
ImageDialog.New()
.addBitmap(bmp)
.addTitle(s)
.show(context.getFragmentManager(), TAG);
super.onPostExecute(s);
}
}
I have followed this tutorial (http://www.codeproject.com/Tips/840623/Android-Character-Recognition) but probably I forgot something due to the lack of my knowledge with Android

That if(context == null || bmp == null) is not needed, as you already tested those values with instanceof.
But I'm guessing your main problem is passing this from Fragment as Activity parameter, which is not.
To fix.. I overall would try to not toss Activity pointer around wildly, as those have quite limited life cycle on android. I have an app with tess-two and I don't recall ever needing Activity to init it (although usually I init it from native C++, so YMMV).
Isn't just the Context needed for that call? If yes, I would suggest to move to getApplicationContext() value instead. I think this is directly or indirectly accessible from Fragment too.
Sorry for not trying your code, but this is something you can debug quite easily.
One more note to android and tesseract usage. What is Tools.getFocusedBitmap? Will it cut down the pic reasonably? If it keeps full size, and your Camera is set to full size, you are tossing around 5-10+MP Bitmaps around, which in Android means to hit Out-Of-Memory (OOM) almost instantly. Either set Camera to reasonably low resolution, or cut-out designed part of photo ASAP and drop the whole Image, ideally as first step of processing.
Also you may want to reconsider whole tess-two thing, and try the official Google Text API from Google play services.
https://developers.google.com/android/reference/com/google/android/gms/vision/text/Text
It's brand new addition, inside I guess it will use the second generation of Tesseract engine with latest improvements, so very likely to get better results and better speed, than from tess-two.
I think it's accessible only from Android 4.4 and only on devices with Google Play Services, and cross-platform sucks, so I'm staying with tess-two in my projects - as I have to support also iOS and Windows Phone.
And generally I don't believe things which don't come with source along, SW without source is zombie, already dead while you are using it (will take at most 30-50y to die), and it's a waste of time and skill of those programmers.

Related

Android Studio: Bluetooth Low Energy BLE Advertisements

on line 370 i need a way to look for a string of 'f's' in the advertisement data from a TI CC2650. I found this template online but I'm looking for specific advertising data. Please let me know what string array I need to look at to find this.
package net.jmodwyer.beacon.beaconPoC;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentSender;
import android.content.SharedPreferences;
import android.location.Location;
import android.os.Bundle;
import android.os.RemoteException;
import android.preference.PreferenceManager;
import android.text.util.Linkify;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ScrollView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesClient;
import com.google.android.gms.location.LocationClient;
import net.jmodwyer.ibeacon.ibeaconPoC.R;
import org.altbeacon.beacon.Beacon;
import org.altbeacon.beacon.BeaconConsumer;
import org.altbeacon.beacon.BeaconManager;
import org.altbeacon.beacon.RangeNotifier;
import org.altbeacon.beacon.Region;
import org.altbeacon.beacon.utils.UrlBeaconUrlCompressor;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
/**
* Adapted from original code written by D Young of Radius Networks.
* #author dyoung, jodwyer
*
*/
public class ScanActivity extends Activity implements BeaconConsumer,
GooglePlayServicesClient.ConnectionCallbacks,
GooglePlayServicesClient.OnConnectionFailedListener {
// Constant Declaration
private static final String PREFERENCE_SCANINTERVAL = "scanInterval";
private static final String PREFERENCE_TIMESTAMP = "timestamp";
private static final String PREFERENCE_POWER = "power";
private static final String PREFERENCE_PROXIMITY = "proximity";
private static final String PREFERENCE_RSSI = "rssi";
private static final String PREFERENCE_MAJORMINOR = "majorMinor";
private static final String PREFERENCE_UUID = "uuid";
private static final String PREFERENCE_INDEX = "index";
private static final String PREFERENCE_LOCATION = "location";
private static final String PREFERENCE_REALTIME = "realTimeLog";
private static final String MODE_SCANNING = "Stop Scanning";
private static final String MODE_STOPPED = "Start Scanning";
protected static final String TAG = "ScanActivity";
/*
* Define a request code to send to Google Play services
* This code is returned in Activity.onActivityResult
*/
private final static int
CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000;
private FileHelper fileHelper;
private BeaconManager beaconManager;
private Region region;
private int eventNum = 1;
// This StringBuffer will hold the scan data for any given scan.
private StringBuffer logString;
// Preferences - will actually have a boolean value when loaded.
private Boolean index;
private Boolean location;
private Boolean uuid;
private Boolean majorMinor;
private Boolean rssi;
private Boolean proximity;
private Boolean power;
private Boolean timestamp;
private String scanInterval;
// Added following a feature request from D.Schmid.
private Boolean realTimeLog;
// LocationClient for Google Play Location Services
LocationClient locationClient;
private ScrollView scroller;
private EditText editText;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_scan);
verifyBluetooth();
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
BeaconScannerApp app = (BeaconScannerApp)this.getApplication();
beaconManager = app.getBeaconManager();
//beaconManager.setForegroundScanPeriod(10);
region = app.getRegion();
beaconManager.bind(this);
locationClient = new LocationClient(this, this, this);
fileHelper = app.getFileHelper();
scroller = (ScrollView)ScanActivity.this.findViewById(R.id.scanScrollView);
editText = (EditText)ScanActivity.this.findViewById(R.id.scanText);
// Initialise scan button.
getScanButton().setText(MODE_STOPPED);
}
#Override
public void onResume() {
super.onResume();
beaconManager.bind(this);
}
#Override
public void onPause() {
super.onPause();
// Uncommenting the following leak prevents a ServiceConnection leak when using the back
// arrow in the Action Bar to come out of the file list screen. Unfortunately it also kills
// background scanning, and as I have no workaround right now I'm settling for the lesser of
// two evils.
// beaconManager.unbind(this);
}
public String getCurrentLocation() {
/** Default "error" value is set for location, will be overwritten with the correct lat and
* long values if we're ble to connect to location services and get a reading.
*/
String location = "Unavailable";
if (locationClient.isConnected()) {
Location currentLocation = locationClient.getLastLocation();
if (currentLocation != null) {
location = Double.toString(currentLocation.getLatitude()) + "," +
Double.toString(currentLocation.getLongitude());
}
}
return location;
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_activity_actions, menu);
return super.onCreateOptionsMenu(menu);
}
#Override
public void onBeaconServiceConnect() {}
/**
*
* #param view
*/
public void onScanButtonClicked(View view) {
toggleScanState();
}
// Handle the user selecting "Settings" from the action bar.
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.Settings:
// Show settings
Intent api = new Intent(this, AppPreferenceActivity.class);
startActivityForResult(api, 0);
return true;
case R.id.action_listfiles:
// Launch list files activity
Intent fhi = new Intent(this, FileHandlerActivity.class);
startActivity(fhi);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Start and stop scanning, and toggle button label appropriately.
*/
private void toggleScanState() {
Button scanButton = getScanButton();
String currentState = scanButton.getText().toString();
if (currentState.equals(MODE_SCANNING)) {
stopScanning(scanButton);
} else {
startScanning(scanButton);
}
}
/**
* start looking for beacons.
*/
private void startScanning(Button scanButton) {
// Set UI elements to the correct state.
scanButton.setText(MODE_SCANNING);
((EditText)findViewById(R.id.scanText)).setText("");
// Reset event counter
eventNum = 1;
// Get current values for logging preferences
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this);
HashMap <String, Object> prefs = new HashMap<String, Object>();
prefs.putAll(sharedPrefs.getAll());
index = (Boolean)prefs.get(PREFERENCE_INDEX);
location = (Boolean)prefs.get(PREFERENCE_LOCATION);
uuid = (Boolean)prefs.get(PREFERENCE_UUID);
majorMinor = (Boolean)prefs.get(PREFERENCE_MAJORMINOR);
rssi = (Boolean)prefs.get(PREFERENCE_RSSI);
proximity = (Boolean)prefs.get(PREFERENCE_PROXIMITY);
power = (Boolean)prefs.get(PREFERENCE_POWER);
timestamp = (Boolean)prefs.get(PREFERENCE_TIMESTAMP);
scanInterval = (String)prefs.get(PREFERENCE_SCANINTERVAL);
realTimeLog = (Boolean)prefs.get(PREFERENCE_REALTIME);
// Get current background scan interval (if specified)
if (prefs.get(PREFERENCE_SCANINTERVAL) != null) {
beaconManager.setBackgroundBetweenScanPeriod(Long.parseLong(scanInterval));
}
logToDisplay("Scanning...");
// Initialise scan log
logString = new StringBuffer();
//Start scanning again.
beaconManager.setRangeNotifier(new RangeNotifier() {
#Override
public void didRangeBeaconsInRegion(Collection<Beacon> beacons, Region region) {
if (beacons.size() > 0) {
Iterator <Beacon> beaconIterator = beacons.iterator();
while (beaconIterator.hasNext()) {
Beacon beacon = beaconIterator.next();
// Debug - logging a beacon - checking background logging is working.
System.out.println("Logging another beacon.");
logBeaconData(beacon);
}
}
}
});
try {
beaconManager.startRangingBeaconsInRegion(region);
} catch (RemoteException e) {
// TODO - OK, what now then?
}
}
/**
* Stop looking for beacons.
*/
private void stopScanning(Button scanButton) {
try {
beaconManager.stopRangingBeaconsInRegion(region);
} catch (RemoteException e) {
// TODO - OK, what now then?
}
String scanData = logString.toString();
if (scanData.length() > 0) {
// Write file
fileHelper.createFile(scanData);
// Display file created message.
Toast.makeText(getBaseContext(),
"File saved to:" + getFilesDir().getAbsolutePath(),
Toast.LENGTH_SHORT).show();
scanButton.setText(MODE_STOPPED);
} else {
// We didn't get any data, so there's no point writing an empty file.
Toast.makeText(getBaseContext(),
"No data captured during scan, output file will not be created.",
Toast.LENGTH_SHORT).show();
scanButton.setText(MODE_STOPPED);
}
}
/**
*
* #return reference to the start/stop scanning button
*/
private Button getScanButton() {
return (Button)findViewById(R.id.scanButton);
}
/**
*
* #param beacon The detected beacon
*/
private void logBeaconData(Beacon beacon) {
StringBuilder scanString = new StringBuilder();
if (index) {
scanString.append(eventNum++);
}
if (beacon.getServiceUuid() == 0xfeaa) {
if (beacon.getBeaconTypeCode() == 0x00) {
scanString.append(" Eddystone-UID -> ");
scanString.append(" Namespace : ").append(beacon.getId1());
scanString.append(" Identifier : ").append(beacon.getId2());
logEddystoneTelemetry(scanString, beacon);
} else if (beacon.getBeaconTypeCode() == 0x10) {
String url = UrlBeaconUrlCompressor.uncompress(beacon.getId1().toByteArray());
scanString.append(" Eddystone-URL -> " + url);
} else if (beacon.getBeaconTypeCode() == 0x20) {
scanString.append(" Eddystone-TLM -> ");
logEddystoneTelemetry(scanString, beacon);
}
} else {
// Just an old fashioned iBeacon or AltBeacon...
logGenericBeacon(scanString, beacon);
}
logToDisplay(scanString.toString());
scanString.append("\n");
// Code added following a feature request by D.Schmid - writes a single entry to a file
// every time a beacon is detected, the file will only ever have one entry as it will be
// recreated on each call to this method.
// Get current background scan interval (if specified)
if (realTimeLog) {
// We're in realtime logging mode, create a new log file containing only this entry.
fileHelper.createFile(scanString.toString(), "realtimelog.txt");
}
logString.append(scanString.toString());
}
/**
* Logs iBeacon & AltBeacon data.
*/
private void logGenericBeacon(StringBuilder scanString, Beacon beacon) {
// Comment stuff out for whatever reason
/*
if (location) {
scanString.append(" Location: ").append(getCurrentLocation()).append(" ");
}
`
*/
if (uuid) {
scanString.append(" UUID: ").append(beacon.getId1());
if (beacon.getId1().equals("ffffffff-ffff-ffff-ffff-ffffffffffff ")){
scanString.append("WE DID IT!!!!!!!!!!!");
}else{
scanString.append(" WE DID NOT DO IT =( ");
}
/*
if ((beacon.getId1()).equals ("f")){
scanString.append("WE DID IT!!!!!!!!!!!");
}else{
scanString.append(" WE DID NOT DO IT!!!!!!!!!!! ");
}
*/
}
// Making if statements to test for advertising data
/*
if (majorMinor) {
scanString.append(" Maj. Mnr.: ");
if (beacon.getId2() != null) {
scanString.append(beacon.getId2());
}
scanString.append("-");
if (beacon.getId3() != null) {
scanString.append(beacon.getId3());
}
}
if (rssi) {
scanString.append(" RSSI: ").append(beacon.getRssi());
}
if (proximity) {
scanString.append(" Proximity: ").append(BeaconHelper.getProximityString(beacon.getDistance()));
}
if (power) {
scanString.append(" Power: ").append(beacon.getTxPower());
}
if (timestamp) {
scanString.append(" Timestamp: ").append(BeaconHelper.getCurrentTimeStamp());
} */
}
private void logEddystoneTelemetry(StringBuilder scanString, Beacon beacon) {
// Do we have telemetry data?
if (beacon.getExtraDataFields().size() > 0) {
long telemetryVersion = beacon.getExtraDataFields().get(0);
long batteryMilliVolts = beacon.getExtraDataFields().get(1);
long pduCount = beacon.getExtraDataFields().get(3);
long uptime = beacon.getExtraDataFields().get(4);
scanString.append(" Telemetry version : " + telemetryVersion);
scanString.append(" Uptime (sec) : " + uptime);
scanString.append(" Battery level (mv) " + batteryMilliVolts);
scanString.append(" Tx count: " + pduCount);
}
}
/**
*
* #param line
*/
private void logToDisplay(final String line) {
runOnUiThread(new Runnable() {
public void run() {
editText.append(line + "\n");
// Temp code - don't really want to do this for every line logged, will look for a
// workaround.
Linkify.addLinks(editText, Linkify.WEB_URLS);
scroller.fullScroll(View.FOCUS_DOWN);
}
});
}
private void verifyBluetooth() {
try {
if (!BeaconManager.getInstanceForApplication(this).checkAvailability()) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Bluetooth not enabled");
builder.setMessage("Please enable bluetooth in settings and restart this application.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
finish();
System.exit(0);
}
});
builder.show();
}
}
catch (RuntimeException e) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Bluetooth LE not available");
builder.setMessage("Sorry, this device does not support Bluetooth LE.");
builder.setPositiveButton(android.R.string.ok, null);
builder.setOnDismissListener(new DialogInterface.OnDismissListener() {
#Override
public void onDismiss(DialogInterface dialog) {
finish();
System.exit(0);
}
});
builder.show();
}
}
/* Location services code follows */
#Override
protected void onStart() {
super.onStart();
// Connect the client.
locationClient.connect();
}
#Override
protected void onStop() {
// Disconnect the client.
locationClient.disconnect();
super.onStop();
}
#Override
public void onConnected(Bundle dataBundle) {
// Uncomment the following line to display the connection status.
// Toast.makeText(this, "Connected", Toast.LENGTH_SHORT).show();
}
#Override
public void onDisconnected() {
// Display the connection status
Toast.makeText(this, "Disconnected. Please re-connect.",
Toast.LENGTH_SHORT).show();
}
#Override
public void onConnectionFailed(ConnectionResult connectionResult) {
/* Google Play services can resolve some errors it detects.
* If the error has a resolution, try sending an Intent to
* start a Google Play services activity that can resolve
* error.
*/
if (connectionResult.hasResolution()) {
try {
// Start an Activity that tries to resolve the error
connectionResult.startResolutionForResult(
this,
CONNECTION_FAILURE_RESOLUTION_REQUEST);
/*
* Thrown if Google Play services canceled the original
* PendingIntent
*/
} catch (IntentSender.SendIntentException e) {
// Log the error
e.printStackTrace();
}
} else {
/*
* If no resolution is available, display a dialog to the
* user with the error.
*/
Toast.makeText(getBaseContext(),
"Location services not available, cannot track device location.",
Toast.LENGTH_SHORT).show();
}
}
// Define a DialogFragment that displays the error dialog
public static class ErrorDialogFragment extends DialogFragment {
// Global field to contain the error dialog
private Dialog mDialog;
// Default constructor. Sets the dialog field to null
public ErrorDialogFragment() {
super();
mDialog = null;
}
// Set the dialog to display
public void setDialog(Dialog dialog) {
mDialog = dialog;
}
// Return a Dialog to the DialogFragment.
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return mDialog;
}
}
/*
* Handle results returned to the FragmentActivity
* by Google Play services
*/
#Override
protected void onActivityResult(
int requestCode, int resultCode, Intent data) {
// Decide what to do based on the original request code
switch (requestCode) {
case CONNECTION_FAILURE_RESOLUTION_REQUEST :
/*
* If the result code is Activity.RESULT_OK, try
* to connect again
*/
switch (resultCode) {
case Activity.RESULT_OK :
/*
* TODO - Try the request again
*/
break;
}
}
}
}
Need to cast to string first .toString
if (uuid) {
scanString.append(" UUID: ").append(beacon.getId1());
// Making if statements to look for all f's in advertising data
if (beacon.getId1().toString().equals(Str1)){
scanString.append("\nAlarm ACTIVATED\n");
}else{
scanString.append("\n Alarm NOT active\n");
}
}

Stuck in Connection Failed loop with GoogleApiClient

I am trying to implement the com.google.android.gms.common.api.GoogleApiClient in my project.
The problem is that every time I try to connect, I get the call back to the onConnectionFailed listener with a pending intent that I execute. On a clean install, the very first pending intent will launch an account select screen. This is expected. Every subsequent restart of the app will bypass the account selection, unless the app's data is cleared in the Application Manager.
After the account-select screen, the signing-in screen will appear breifly. It never signs in though. The onActivityResult will be called after the signing-in screen flashes, which tries to connect the client. It doesn't connect, and calls the onConnectionFailed listener again.
If I keep trying to execute the intents, I get stuck in loop with the signing in screen breifly appearing, then disappearing, but never connecting or getting signed in. The ConnectionResult.toString indicates "Sign_In_Required", and returns an error code of 4 (the same as the Sign_In_Required constant.
On the API console, I've implemented an Ouath 2.0 client ID, and a public API access key for android applications. Notably, my app works using the older com.google.api.services.drive.Drive client.
As for my code:
I've tried using two different implementations here and here. I tried to implement the second example making as few changes as possible. It is reproduced below:
public class MainActivity extends Activity implements ConnectionCallbacks,
OnConnectionFailedListener {
private static final String TAG = "android-drive-quickstart";
private static final int REQUEST_CODE_CAPTURE_IMAGE = 1;
private static final int REQUEST_CODE_CREATOR = 2;
private static final int REQUEST_CODE_RESOLUTION = 3;
private GoogleApiClient mGoogleApiClient;
private Bitmap mBitmapToSave;
/**
* Create a new file and save it to Drive.
*/
private void saveFileToDrive() {
// Start by creating a new contents, and setting a callback.
Log.i(TAG, "Creating new contents.");
final Bitmap image = mBitmapToSave;
Drive.DriveApi.newContents(mGoogleApiClient).setResultCallback(new ResultCallback<DriveApi.ContentsResult>() {
#Override
public void onResult(DriveApi.ContentsResult result) {
// If the operation was not successful, we cannot do anything
// and must
// fail.
if (!result.getStatus().isSuccess()) {
Log.i(TAG, "Failed to create new contents.");
return;
}
// Otherwise, we can write our data to the new contents.
Log.i(TAG, "New contents created.");
// Get an output stream for the contents.
OutputStream outputStream = result.getContents().getOutputStream();
// Write the bitmap data from it.
ByteArrayOutputStream bitmapStream = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.PNG, 100, bitmapStream);
try {
outputStream.write(bitmapStream.toByteArray());
} catch (IOException e1) {
Log.i(TAG, "Unable to write file contents.");
}
// Create the initial metadata - MIME type and title.
// Note that the user will be able to change the title later.
MetadataChangeSet metadataChangeSet = new MetadataChangeSet.Builder()
.setMimeType("image/jpeg").setTitle("Android Photo.png").build();
// Create an intent for the file chooser, and start it.
IntentSender intentSender = Drive.DriveApi
.newCreateFileActivityBuilder()
.setInitialMetadata(metadataChangeSet)
.setInitialContents(result.getContents())
.build(mGoogleApiClient);
try {
startIntentSenderForResult(
intentSender, REQUEST_CODE_CREATOR, null, 0, 0, 0);
} catch (SendIntentException e) {
Log.i(TAG, "Failed to launch file chooser.");
}
}
});
}
#Override
protected void onResume() {
super.onResume();
if (mGoogleApiClient == null) {
// Create the API client and bind it to an instance variable.
// We use this instance as the callback for connection and connection
// failures.
// Since no account name is passed, the user is prompted to choose.
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API)
.addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
}
// Connect the client. Once connected, the camera is launched.
mGoogleApiClient.connect();
}
#Override
protected void onPause() {
if (mGoogleApiClient != null) {
mGoogleApiClient.disconnect();
}
super.onPause();
}
#Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
switch (requestCode) {
case REQUEST_CODE_CAPTURE_IMAGE:
// Called after a photo has been taken.
if (resultCode == Activity.RESULT_OK) {
// Store the image data as a bitmap for writing later.
mBitmapToSave = (Bitmap) data.getExtras().get("data");
}
break;
case REQUEST_CODE_CREATOR:
// Called after a file is saved to Drive.
if (resultCode == RESULT_OK) {
Log.i(TAG, "Image successfully saved.");
mBitmapToSave = null;
// Just start the camera again for another photo.
startActivityForResult(new Intent(MediaStore.ACTION_IMAGE_CAPTURE),
REQUEST_CODE_CAPTURE_IMAGE);
}
break;
}
}
#Override
public void onConnectionFailed(ConnectionResult result) {
// Called whenever the API client fails to connect.
Log.i(TAG, "GoogleApiClient connection failed: " + result.toString());
if (!result.hasResolution()) {
// show the localized error dialog.
GooglePlayServicesUtil.getErrorDialog(result.getErrorCode(), this, 0).show();
return;
}
// The failure has a resolution. Resolve it.
// Called typically when the app is not yet authorized, and an
// authorization
// dialog is displayed to the user.
try {
result.startResolutionForResult(this, REQUEST_CODE_RESOLUTION);
} catch (SendIntentException e) {
Log.e(TAG, "Exception while starting resolution activity", e);
}
}
#Override
public void onConnected(Bundle connectionHint) {
Log.i(TAG, "API client connected.");
if (mBitmapToSave == null) {
// This activity has no UI of its own. Just start the camera.
startActivityForResult(new Intent(MediaStore.ACTION_IMAGE_CAPTURE),
REQUEST_CODE_CAPTURE_IMAGE);
return;
}
saveFileToDrive();
}
#Override
public void onConnectionSuspended(int cause) {
Log.i(TAG, "GoogleApiClient connection suspended");
}
}
This happens because after the first login/authorization android keeps using the same default account parameters. If you want to avoid the loop and make sure the picker shows again you must clear completely the default account by calling Plus.AccountApi.clearDefaultAccount(mGoogleApiClient) before reconnecting again.
To achieve this, you must add the Plus.API scope to the GoogleApiClient builder:
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API)
.addApi(Plus.API)
.addScope(Drive.SCOPE_FILE)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
And then you can clear the default account before rebuilding the api client and connecting to a different account (rebuilding the api client when changing accounts avoids problems):
// if the api client existed, we terminate it
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
Plus.AccountApi.clearDefaultAccount(mGoogleApiClient);
mGoogleApiClient.disconnect();
}
// build new api client to avoid problems reusing it
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Drive.API)
.addApi(Plus.API)
.addScope(Drive.SCOPE_FILE)
.setAccountName(account)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
mGoogleApiClient.connect();
No additional permissions or api activations are needed for using the Plus.API scope this way. I hope this helps with your problem.
It is a tough one, since I don't have time to completely re-run and analyze your code. And without running it, I don't see anything obvious.
But, since I have this stuff up and running in my app, I'd like to help. Unfortunately the Google Play services connection and authorization code is scattered all over my app's fragments and activities. So, I made an attempt to create a dummy activity and pull all the stuff in it. By 'all the stuff' I mean the account manager wrapper (GA) and associated account picker code.
The result is some 300 lines of gibberish that may work, but I don't make any claims it will. Take a look and good luck.
package com.......;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.app.Activity;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentSender;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.auth.GoogleAuthUtil;
import com.google.android.gms.common.AccountPicker;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.common.api.GoogleApiClient;
public class GooApiClient extends Activity implements
GoogleApiClient.OnConnectionFailedListener, GoogleApiClient.ConnectionCallbacks {
private static final String DIALOG_ERROR = "dialog_error";
private static final String REQUEST_CODE = "request_code";
private static final int REQ_ACCPICK = 1;
private static final int REQ_AUTH = 2;
private static final int REQ_RECOVER = 3;
private GoogleApiClient mGooApiClient;
private boolean mIsInAuth; //block re-entrancy
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (checkPlayServices() && checkUserAccount()) {
gooInit();
gooConnect(true);
}
}
#Override
public void onConnected(Bundle bundle) {
Log.d("_", "connected");
}
#Override
public void onConnectionSuspended(int i) { }
#Override
public void onConnectionFailed(ConnectionResult result) {
Log.d("_", "failed " + result.hasResolution());
if (!mIsInAuth) {
if (result.hasResolution()) {
try {
mIsInAuth = true;
result.startResolutionForResult(this, REQ_AUTH);
} catch (IntentSender.SendIntentException e) {
suicide("authorization fail");
}
} else {
suicide("authorization fail");
}
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent it) {
Log.d("_", "activity result " + requestCode + " " + resultCode);
switch (requestCode) {
case REQ_AUTH: case REQ_RECOVER: {
mIsInAuth = false;
if (resultCode == Activity.RESULT_OK) {
gooConnect(true);
} else if (resultCode == RESULT_CANCELED) {
suicide("authorization fail");
}
return;
}
case REQ_ACCPICK: { // return from account picker
if (resultCode == Activity.RESULT_OK && it != null) {
String emil = it.getStringExtra(AccountManager.KEY_ACCOUNT_NAME);
if (GA.setEmil(this, emil) == GA.CHANGED) {
gooInit();
gooConnect(true);
}
} else if (GA.getActiveEmil(this) == null) {
suicide("selection failed");
}
return;
}
}
super.onActivityResult(requestCode, resultCode, it); // DO NOT REMOVE
}
private boolean checkPlayServices() {
Log.d("_", "check PS");
int status = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
if (status != ConnectionResult.SUCCESS) {
if (GooglePlayServicesUtil.isUserRecoverableError(status)) {
mIsInAuth = true;
errorDialog(status, LstActivity.REQ_RECOVER);
} else {
suicide("play services failed");
}
return false;
}
return true;
}
private boolean checkUserAccount() {
String emil = GA.getActiveEmil(this);
Account accnt = GA.getPrimaryAccnt(this, true);
Log.d("_", "check user account " + emil + " " + accnt);
if (emil == null) { // no emil (after install)
if (accnt == null) { // multiple or no accounts available, go pick one
accnt = GA.getPrimaryAccnt(this, false);
Intent it = AccountPicker.newChooseAccountIntent(accnt, null,
new String[]{GoogleAuthUtil.GOOGLE_ACCOUNT_TYPE}, true, null, null, null, null
);
this.startActivityForResult(it, LstActivity.REQ_ACCPICK);
return false; //--------------------->>>
} else { // there's only one goo account registered with the device, skip the picker
GA.setEmil(this, accnt.name);
}
// UNLIKELY BUT POSSIBLE, emil's OK, but the account have been removed since (through settings)
} else {
accnt = GA.getActiveAccnt(this);
if (accnt == null) {
accnt = GA.getPrimaryAccnt(this, false);
Intent it = AccountPicker.newChooseAccountIntent(accnt, null,
new String[]{GoogleAuthUtil.GOOGLE_ACCOUNT_TYPE}, true, null, null, null, null
);
this.startActivityForResult(it, LstActivity.REQ_ACCPICK);
return false; //------------------>>>
}
}
return true;
}
private void gooInit(){
String emil = GA.getActiveEmil(this);
Log.d("_", "goo init " + emil);
if (emil != null){
mGooApiClient = new GoogleApiClient.Builder(this)
.setAccountName(emil).addApi(com.google.android.gms.drive.Drive.API)
.addScope(com.google.android.gms.drive.Drive.SCOPE_FILE)
.addConnectionCallbacks(this).addOnConnectionFailedListener(this)
.build();
}
}
private void gooConnect(boolean bConnect) {
Log.d("_", "goo connect " + bConnect);
if (mGooApiClient != null) {
if (!bConnect) {
mGooApiClient.disconnect();
} else if (! (mGooApiClient.isConnecting() || mGooApiClient.isConnected())){
mGooApiClient.connect();
}
}
}
private void suicide(String msg) {
GA.removeActiveAccnt(this);
Toast.makeText(this, msg, Toast.LENGTH_LONG).show();
finish();
}
private void errorDialog(int errorCode, int requestCode) {
Bundle args = new Bundle();
args.putInt(DIALOG_ERROR, errorCode);
args.putInt(REQUEST_CODE, requestCode);
ErrorDialogFragment dialogFragment = new ErrorDialogFragment();
dialogFragment.setArguments(args);
dialogFragment.show(getFragmentManager(), "errordialog");
}
public static class ErrorDialogFragment extends DialogFragment {
public ErrorDialogFragment() { }
#Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
int errorCode = getArguments().getInt(DIALOG_ERROR);
int requestCode = getArguments().getInt(DIALOG_ERROR);
return GooglePlayServicesUtil.getErrorDialog(errorCode, getActivity(), requestCode);
}
#Override
public void onDismiss(DialogInterface dialog) {
getActivity().finish();
}
}
private static class GA {
private static final String ACC_NAME = "account_name";
public static final int FAIL = -1;
public static final int UNCHANGED = 0;
public static final int CHANGED = +1;
private static String mCurrEmil = null; // cache locally
private static String mPrevEmil = null; // cache locally
public static Account[] getAllAccnts(Context ctx) {
return AccountManager.get(acx(ctx)).getAccountsByType(GoogleAuthUtil.GOOGLE_ACCOUNT_TYPE);
}
public static Account getPrimaryAccnt(Context ctx, boolean bOneOnly) {
Account[] accts = getAllAccnts(ctx);
if (bOneOnly)
return accts == null || accts.length != 1 ? null : accts[0];
return accts == null || accts.length == 0 ? null : accts[0];
}
public static Account getActiveAccnt(Context ctx) {
return emil2Accnt(ctx, getActiveEmil(ctx));
}
public static String getActiveEmil(Context ctx) {
if (mCurrEmil != null) {
return mCurrEmil;
}
mCurrEmil = ctx == null ? null : pfs(ctx).getString(ACC_NAME, null);
return mCurrEmil;
}
public static Account getPrevEmil(Context ctx) {
return emil2Accnt(ctx, mPrevEmil);
}
public static Account emil2Accnt(Context ctx, String emil) {
if (emil != null) {
Account[] accounts =
AccountManager.get(acx(ctx)).getAccountsByType(GoogleAuthUtil.GOOGLE_ACCOUNT_TYPE);
for (Account account : accounts) {
if (emil.equalsIgnoreCase(account.name)) {
return account;
}
}
}
return null;
}
/**
* Stores a new email in persistent app storage, reporting result
* #param newEmil new email, optionally null
* #param ctx activity context
* #return FAIL, CHANGED or UNCHANGED (based on the following table)
* OLD NEW SAVED RESULT
* ERROR FAIL
* null null null FAIL
* null new new CHANGED
* old null old UNCHANGED
* old != new new CHANGED
* old == new new UNCHANGED
*/
public static int setEmil(Context ctx, String newEmil) {
int result = FAIL; // 0 0
mPrevEmil = getActiveEmil(ctx);
if ((mPrevEmil == null) && (newEmil != null)) {
result = CHANGED;
} else if ((mPrevEmil != null) && (newEmil == null)) {
result = UNCHANGED;
} else if ((mPrevEmil != null) && (newEmil != null)) {
result = mPrevEmil.equalsIgnoreCase(newEmil) ? UNCHANGED : CHANGED;
}
if (result == CHANGED) {
mCurrEmil = newEmil;
pfs(ctx).edit().putString(ACC_NAME, newEmil).apply();
}
return result;
}
public static void removeActiveAccnt(Context ctx) {
mCurrEmil = null;
pfs(ctx).edit().remove(ACC_NAME).apply();
}
private static Context acx(Context ctx) {
return ctx == null ? null : ctx.getApplicationContext();
}
private static SharedPreferences pfs(Context ctx) {
return ctx == null ? null : PreferenceManager.getDefaultSharedPreferences(acx(ctx));
}
}
}
BTW, I know how to spell 'email', 'Emil' just happened to be my uncle's name and I couldn't resist :-)
UPDATE (2015-Apr-11):
I've recently re-visited the code that handles Google Drive Authorization and Account switching. The result can be found here and it supports both REST and GDAA apis.

Android AR in Fragment

I've been trying to get some AR (Augmented Reality) SDK's working in a Fragment.
However, I can't seem to get it working.
I've found some code of someone who has got Metaio (AR Framework) working in a fragment.
So I've applied that code to my own project, it is working but the code is not programmed to scan a picture. I want to scan a picture mark with it.
I copied some code to scan a picture mark from a Sample Project of Metaio, but it doesn't work.
Right now it is failing at (Debug logs after that don't get logged):
trackingConfigFile = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/TrackingData_PictureMarker.xml");
This is my full code:
package com.example.bt6_aedapp;
import android.app.Application;
import android.content.res.Configuration;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.metaio.cloud.plugin.MetaioCloudPlugin;
import com.metaio.sdk.MetaioDebug;
import com.metaio.sdk.MetaioSurfaceView;
import com.metaio.sdk.SensorsComponentAndroid;
import com.metaio.sdk.jni.ERENDER_SYSTEM;
import com.metaio.sdk.jni.ESCREEN_ROTATION;
import com.metaio.sdk.jni.IGeometry;
import com.metaio.sdk.jni.IMetaioSDKAndroid;
import com.metaio.sdk.jni.IMetaioSDKCallback;
import com.metaio.sdk.jni.MetaioSDK;
import com.metaio.sdk.jni.TrackingValuesVector;
import com.metaio.sdk.jni.Vector3d;
import com.metaio.tools.Screen;
import com.metaio.tools.SystemInfo;
import com.metaio.tools.io.AssetsManager;
public class fragmentA extends Fragment implements MetaioSurfaceView.Callback {
private Application mAppContext;
private ViewGroup mRootLayout;
String trackingConfigFile;
private MetaioSDKCallbackHandler mCallback;
private IGeometry mModel;
private IMetaioSDKAndroid mMetaioSDK;
private MetaioSurfaceView mSurfaceView;
private static boolean mNativeLibsLoaded = false;
private boolean mRendererInitialized;
private SensorsComponentAndroid mSensors;
static {
mNativeLibsLoaded = IMetaioSDKAndroid.loadNativeLibs();
}
#Override
public void onCreate(Bundle savedInstanceState) {
MetaioCloudPlugin.startJunaio(null, getActivity().getApplicationContext());
super.onCreate(savedInstanceState);
Log.d("LifeCycle", "onCreate");
mAppContext = getActivity().getApplication();
mMetaioSDK = null;
mSurfaceView = null;
mRendererInitialized = false;
try {
mCallback = new MetaioSDKCallbackHandler();
if (!mNativeLibsLoaded){
throw new Exception("Unsupported platform, failed to load the native libs");
}
// Create sensors component
mSensors = new SensorsComponentAndroid(mAppContext);
// Create Unifeye Mobile by passing Activity instance and
// application signature
mMetaioSDK = MetaioSDK.CreateMetaioSDKAndroid(getActivity(), getResources().getString(R.string.metaioSDKSignature));
mMetaioSDK.registerSensorsComponent(mSensors);
} catch (Throwable e) {
MetaioDebug.log(Log.ERROR, "ArCameraFragment.onCreate: failed to create or intialize metaio SDK: " + e.getMessage());
return;
}
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
Log.d("LifeCycle", "onCreateView");
View view = inflater.inflate(R.layout.fragment_a, container, false);
mRootLayout = (ViewGroup)getActivity().findViewById(R.id.pager);
return view;
}
#Override
public void onStart() {
super.onStart();
Log.d("LifeCycle", "onStart");
if(mMetaioSDK == null){
return;
}
MetaioDebug.log("ArCameraFragment.onStart()");
try {
mSurfaceView = null;
// Start camera
startCamera();
// Add Unifeye GL Surface view
mSurfaceView = new MetaioSurfaceView(mAppContext);
mSurfaceView.registerCallback(this);
mSurfaceView.setKeepScreenOn(true);
MetaioDebug.log("ArCameraFragment.onStart: addContentView(mMetaioSurfaceView)");
mRootLayout.addView(mSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
mSurfaceView.setZOrderMediaOverlay(true);
} catch (Exception e) {
MetaioDebug.log(Log.ERROR, "Error creating views: " + e.getMessage());
MetaioDebug.printStackTrace(Log.ERROR, e);
}
}
#Override
public void onResume() {
super.onResume();
Log.d("LifeCycle", "onResume");
// make sure to resume the OpenGL surface
if (mSurfaceView != null) {
mSurfaceView.onResume();
}
if(mMetaioSDK != null){
mMetaioSDK.resume();
}
}
#Override
public void onPause() {
super.onPause();
Log.d("LifeCycle", "onPause");
// pause the OpenGL surface
if (mSurfaceView != null) {
mSurfaceView.onPause();
}
if (mMetaioSDK != null) {
// Disable the camera
mMetaioSDK.pause();
}
}
#Override
public void onStop() {
super.onStop();
Log.d("LifeCycle", "onStop");
if (mMetaioSDK != null) {
// Disable the camera
mMetaioSDK.stopCamera();
}
if (mSurfaceView != null) {
mRootLayout.removeView(mSurfaceView);
}
System.runFinalization();
System.gc();
}
#Override
public void onDestroy() {
super.onDestroy();
mCallback.delete();
mCallback = null;
/*Log.d("LifeCycle", "onDestroy");
try {
mRendererInitialized = false;
} catch (Exception e) {
MetaioDebug.printStackTrace(Log.ERROR, e);
}
MetaioDebug.log("ArCameraFragment.onDestroy");
if (mMetaioSDK != null) {
mMetaioSDK.delete();
mMetaioSDK = null;
}
MetaioDebug.log("ArCameraFragment.onDestroy releasing sensors");
if (mSensors != null) {
mSensors.registerCallback(null);
mSensors.release();
mSensors.delete();
mSensors = null;
}
// Memory.unbindViews(activity.findViewById(android.R.id.content));
System.runFinalization();
System.gc();*/
}
#Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
final ESCREEN_ROTATION rotation = Screen.getRotation(getActivity());
mMetaioSDK.setScreenRotation(rotation);
MetaioDebug.log("onConfigurationChanged: " + rotation);
}
#Override
public void onDrawFrame() {
if(mMetaioSDK != null) {
TrackingValuesVector poses = mMetaioSDK.getTrackingValues();
if(poses.size() != 0) {
mModel.setCoordinateSystemID(poses.get(0).getCoordinateSystemID());
}
}
// Log.d("LifeCycle", "onDrawFrame");
/* if (mRendererInitialized) {
mMetaioSDK.render();
} */
}
#Override
public void onSurfaceCreated() {
Log.d("LifeCycle", "onSurfaceCreated");
try {
if (!mRendererInitialized) {
mMetaioSDK.initializeRenderer(mSurfaceView.getWidth(), mSurfaceView.getHeight(), Screen.getRotation(getActivity()),
ERENDER_SYSTEM.ERENDER_SYSTEM_OPENGL_ES_2_0);
mRendererInitialized = true;
} else {
MetaioDebug.log("ArCameraFragment.onSurfaceCreated: Reloading textures...");
mMetaioSDK.reloadTextures();
}
MetaioDebug.log("ArCameraFragment.onSurfaceCreated: Registering audio renderer...");
// mMetaioSDK.registerAudioCallback(mSurfaceView.getAudioRenderer());
mMetaioSDK.registerCallback(mCallback);
MetaioDebug.log("ARViewActivity.onSurfaceCreated");
} catch (Exception e) {
MetaioDebug.log(Log.ERROR, "ArCameraFragment.onSurfaceCreated: " + e.getMessage());
}
mSurfaceView.queueEvent(new Runnable() {
#Override
public void run() {
loadContents();
}
});
}
private void loadContents() {
try {
trackingConfigFile = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/TrackingData_PictureMarker.xml");
boolean result = mMetaioSDK.setTrackingConfiguration(trackingConfigFile);
Log.d("result", Boolean.toString(result));
MetaioDebug.log("Tracking data loaded: " + result);
String aedLogo = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/metaioman.md2");
Log.d("aedLogo", "aaa: " + aedLogo);
if(aedLogo != null) {
mModel = mMetaioSDK.createGeometry(aedLogo);
if(mModel != null) {
mModel.setScale(new Vector3d(4.0f, 4.0f, 4.0f));
}
else {
MetaioDebug.log(Log.ERROR, "Error loading geometry: " + aedLogo);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void onSurfaceChanged(int width, int height) {
Log.d("LifeCycle", "onSurfaceChanged");
mMetaioSDK.resizeRenderer(width, height);
}
#Override
public void onSurfaceDestroyed() {
Log.d("LifeCycle", "onSurfaceDestroyed");
MetaioDebug.log("ArCameraFragment.onSurfaceDestroyed(){");
mSurfaceView = null;
// mMetaioSDK.registerAudioCallback(null);
}
protected void startCamera() {
final int cameraIndex = SystemInfo.getCameraIndex(CameraInfo.CAMERA_FACING_BACK);
if (mMetaioSDK != null) {
mMetaioSDK.startCamera(cameraIndex, 640, 480);
}
}
final class MetaioSDKCallbackHandler extends IMetaioSDKCallback {
#Override
public void onTrackingEvent(final TrackingValuesVector trackingValues) {
super.onTrackingEvent(trackingValues);
if(!trackingValues.isEmpty() && trackingValues.get(0).isTrackingState()){
Log.d("Track", "NOT EMPTY");
}
}
}
}
I really hope someone can help me with this as I can not figure it out.. :(
EDIT
The Error (e.printStackTrace()) is throwing is:
03-24 20:25:19.068: W/System.err(28062): java.lang.NullPointerException: null string
03-24 20:25:19.068: W/System.err(28062): at com.metaio.sdk.jni.MetaioSDKJNI.IMetaioSDK_setTrackingConfiguration__SWIG_1(Native Method)
03-24 20:25:19.068: W/System.err(28062): at com.metaio.sdk.jni.IMetaioSDK.setTrackingConfiguration(IMetaioSDK.java:106)
03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA.loadContents(fragmentA.java:278)
03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA.access$0(fragmentA.java:274)
03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA$1.run(fragmentA.java:268)
03-24 20:25:19.068: W/System.err(28062): at android.opengl.GLSurfaceView$GLThread.guardedRun(GLSurfaceView.java:1463)
03-24 20:25:19.068: W/System.err(28062): at android.opengl.GLSurfaceView$GLThread.run(GLSurfaceView.java:1240)
What I want to do with it:
Being able to 'scan' a picture (https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQFqKIurD3QMU0zVeiwEhtm1twLmTCDlnFulfCwDkxTA1_XQjIQ) and detect the image in the app. The image is referenced in the app in the Assets folder of the project, and I've made a xml file where the marker for it is defined as is stated on the Metaio website. After detecting I'm going to do some database stuff, but for now I need to get the detecting part working.
EDIT
If anyone knows how i can make another AR Framework in Fragments I would love tot know.
I don't know pretty much anything about fragments but as for the null string I think that happens because you haven't extracted the assets.
In this video http://youtu.be/KVtCi-WwmFU?t=30m29s it's explained.
Basically what you have to do is add this code
private class AssetsExtracter extends AsyncTask<Integer, Integer, Boolean>{
#Override
protected Boolean doInBackground(Integer... params){
try
{
AssetsManager.extractAllAssets(getApplicationContext(), BuildConfig.DEBUG);
}catch (IOException e){
MetaioDebug.printStackTrace(Log.ERROR, e);
return false;
}
return true;
}
}
to your activity (or in this case, I guess, your fragment).
Then you have to add a field of this class like
private AssetsExtracter mTask;
and inside the onCreate() method you put
mTask = new AssetsExtracter();
mTask.execute(0);
After that your assets should be avaliable from AssetsManager.getAssetPath(..) and it shouldn't return a null string anymore.

Youtube video play with android player

How will we play youtube video with android media player???
Anybody has idea then please explain me.
I have to play the URL:"http://www.youtube.com/embed/bIPcobKMB94?autoplay=1" with android default media player.
When i played this url with android media player, i got MediaPlayer error (1, -2147483648).
I play that url in my android device media player but now i am not able play in tablet. Anybody help me. Why i am not able to play that video in tablet?
rtsp://v6.cache3.c.youtube.com/CiILENy73wIaGQnokCRYfXXPsBMYDSANFEgGUgZ2aWRlb3MM/0/0/0/video.3gp
Thanks
Well, the Android part is quite easy. You just need a raw URI of the YouTube video and trigger an intent with it:
Uri uri = Uri.parse("http://<link-to-RAW-youtube-video>"); // i.e. mp4 version
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(uri, "video/*"); // important! otherwise you just download the video
startActivity(intent); // called directly from an other activity as you see
If the user has multiple video players installed the user also gets a choice which one he wants to use for the playback.
The tricky part here is not Android related it is how to get the raw URI of a YouTube video. Usually they are extracted from the source code of the original YouTube Video page but that's an other topic. I tried the code above with a self extracted URI to a mp4 version of the video and it worked just fine on my Android 4.0 phone.
EDIT:
You really have to use a raw URI to the YouTube Video, for your Video it would be:
http://o-o.preferred.ber01s04.v22.lscache2.c.youtube.com/videoplayback?sparams=cp%2Cid%2Cip%2Cipbits%2Citag%2Cratebypass%2Csource%2Cexpire&fexp=904550%2C919700%2C911614&itag=18&ip=77.0.0.0&signature=C721CE7543081FC0C805C86F5D3C4D9B34D77764.D4288106CF7A3153FF1574F2334161CBD1176535&sver=3&ratebypass=yes&source=youtube&expire=1332001847&key=yt1&ipbits=8&cp=U0hSR1BLT19JUUNOMl9IRVNJOmllRjJJYy1SSG92&id=6c83dca1b28c07de&title=AT%26T%20Samsung%20Captivate%20TV%20Commercial%20-%20Galaxy%20S%20Phone-360p
It's very long but it will work :)
I had a very hard time implementing a player supporting many features and formats. Even VideoView didn't match all of my needs. Finally, I ended up writting my own solution based on SurfaceView and MediaPlayer. Here is the source code. It was validated on 4.0.3 and 4.1.2 for local 3gp, local mp4, http 3gp and youtube rtsp streams.
Remember that, presently, an URI for valid youtube video on android is to be like this: rtsp://v5.cache1.c.youtube.com/CjYLENy73wIaLQnhycnrJQ8qmRMYESARFEIJbXYtZ29vZ2xlSARSBXdhdGNoYPj_hYjnq6uUTQw=/0/0/0/video.3gp. You get them via http://m.youtube.video.
The activity's arguments are set by Intent. There are quite a variety of options. See the last source code provided in my comment. You can remove the code related to Sequencer, Scenario and Test classes. They are specific to my program (automated system for platform testing)
package my.package;
import java.io.IOException;
import java.lang.ref.WeakReference;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Point;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.net.Uri;
import android.os.Build.VERSION;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.text.format.Time;
import android.view.Display;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import my.package.Log;
import my.package.R;
import my.package.Scenario;
import my.package.Sequencer;
import my.package.Test;
import my.package.TestUtil;
/**
* An activity to display local or remote video or play audio file.
*/
public class MediaPlayerActivity extends Activity implements
OnBufferingUpdateListener, OnCompletionListener, OnPreparedListener,
OnVideoSizeChangedListener, OnErrorListener, SurfaceHolder.Callback {
private static final String LOG_TAG = MediaPlayerActivity.class
.getSimpleName();
// Mandatory creation arguments passed by extras in starting Intent
public static final String SEQUENCER = "sequencer"; //$NON-NLS-1$
public static final String SCENARIO = "scenario"; //$NON-NLS-1$
public static final String TEST = "test"; //$NON-NLS-1$
public static final String SOURCE_URI = "uri"; //$NON-NLS-1$
// Optional creation arguments passed by extras in starting Intent
public static final String PAUSE_ON_BACKGROUND = "auto_pause"; //$NON-NLS-1$
public static final String TIMEOUT = "timeout"; //$NON-NLS-1$
public static final String LOOP = "loop"; //$NON-NLS-1$
public static final String SCREEN_ON_WHILE_PLAYING = "screen_on_while_playing"; //$NON-NLS-1$
// data arguments returned by Intent on finish
public static final String REASON = "cause"; //$NON-NLS-1$
public static final String EXCEPTION = "exception"; //$NON-NLS-1$
// additional state bundle arguments.
private static final String START_POSITION = "start"; //$NON-NLS-1$
private static final String VIDEO_WIDTH = "video_width"; //$NON-NLS-1$
private static final String VIDEO_HEIGHT = "video_height"; //$NON-NLS-1$
private WeakReference<Sequencer> mSequencer = new WeakReference<Sequencer> (null);
private WeakReference<Test> mTest = new WeakReference<Test> (null);
/**
* URI of the video/audio source.
*
* This player supports a variety of videos and audio sources, either local
* or remote.
* <p>
* An HTTP live streaming URI would be:
* {#code httplive://xboodangx.api.channel.livestream.com/3.0/playlist.m3u8}
* </p>
* <p>
* A local video file URI would be {#code file:///sdcard/spiderman.mp4}
* </p>
* <p>
* A remote 3GPP format video URI would be
* {#code http://commonsware.com/misc/test2.3gp}
* </p>
* <p>
* And finally an RTP or RTSP video source URI would be
* {#code rtsp://v4.cache1.c.youtube.com/CjYLENy73wIaLQk4RDShYkdS1BMYDSANFEIJbXYtZ29vZ2xlSARSBXdhdGNoYK-Cn8qh8J6-Tgw=/0/0/0/video.3gp}
* </p>
*/
private Uri mMediaURI;
/**
* Input: this flag is set to true if the video must be paused when the
* activity is not visible any more. The video will resume when the activity
* is visible again.
*/
private boolean mPauseOnBackground;
/**
* Input: number of milliseconds until automatic shutdown, or -1 if no
* timeout.
*/
private long mTimeout;
/**
* Input: flag set to true to loop back to the beginning of the video when
* reaching its end
*/
private boolean mLoop;
/**
* The width of the video, obtained by
* {#link #onVideoSizeChanged(MediaPlayer, int, int)}
*/
private int mVideoWidth;
/**
* The height of the video, obtained by
* {#link #onVideoSizeChanged(MediaPlayer, int, int)}
*/
private int mVideoHeight;
private MediaPlayer mMediaPlayer;
private SurfaceView mPreview;
private boolean mIsVideoSizeKnown = false;
private boolean mMediaPrepared = false;
/**
* This member is set to position the video should start. It is set when
* pausing the video and used when restoring the instance.
*/
private int mStartPosition;
private boolean mTimeoutSet;
private boolean mScreenOnWhilePlaying;
private SurfaceHolder mHolder;
private static class ShutdownHandler extends Handler {
WeakReference<MediaPlayerActivity> mActivity;
public ShutdownHandler(MediaPlayerActivity activity) {
mActivity = new WeakReference<MediaPlayerActivity>(activity);
}
#Override
public void handleMessage(Message msg) {
MediaPlayerActivity activity = mActivity.get();
if (activity != null) {
activity.finishTest(Activity.RESULT_OK, null);
} else {
//Log.w(LOG_TAG, "no activity for shutdown");
}
}
}
public MediaPlayerActivity() {
super();
// These members are initialized in onCreate(Bundle) by the
// starting Intent, the first time the activity is created, and restored
// in the same method with the arguments in the saved instance bundle.
mSequencer = new WeakReference<Sequencer> (null);
mTest = new WeakReference<Test> (null);
setSourceURI(null);
setPauseOnBackground(false);
setPlayTimeout(-1);
setLooping(false);
setScreenOnWhilePlaying(true);
// Initialize internals.
mIsVideoSizeKnown = false;
mVideoWidth = mVideoHeight = 0; // unknown
mMediaPrepared = false;
mMediaPlayer = null;
mPreview = null; // set in onCreate(Bundle)
setStartPosition(0); // beginning of the video
mTimeoutSet = false;
}
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.video_player);
Intent intent = getIntent();
if (savedInstanceState != null) {
onRestoreInstanceState(savedInstanceState);
} else if (intent != null) {
Log.d(LOG_TAG, "Loading starting Intent extras...");
// read starting Intent extras.
_updateForeignReferences(intent);
setSourceURI((Uri) intent.getParcelableExtra(SOURCE_URI));
setPlayTimeout(intent.getLongExtra(TIMEOUT, -1L));
setPauseOnBackground(intent.getBooleanExtra(PAUSE_ON_BACKGROUND,
false));
setLooping(intent.getBooleanExtra(LOOP, false));
setScreenOnWhilePlaying(intent.getBooleanExtra(SCREEN_ON_WHILE_PLAYING, true));
}
mTimeoutSet = false;
_updateWidgets();
}
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
Log.d(LOG_TAG, "Restoring instance state...");
// restore saved references
_updateSavedForeignReferences(savedInstanceState);
// restore saved inputs
setSourceURI(Uri.parse(savedInstanceState.getString(SOURCE_URI)));
setPlayTimeout(savedInstanceState.getLong(TIMEOUT));
setPauseOnBackground(savedInstanceState.getBoolean(PAUSE_ON_BACKGROUND));
setLooping(savedInstanceState.getBoolean(LOOP));
setScreenOnWhilePlaying(savedInstanceState.getBoolean(SCREEN_ON_WHILE_PLAYING));
// restore internals
setStartPosition(savedInstanceState.getInt(START_POSITION, 0));
mVideoWidth = savedInstanceState.getInt(VIDEO_WIDTH);
mVideoHeight = savedInstanceState.getInt(VIDEO_HEIGHT); // unknown
mIsVideoSizeKnown = (mVideoWidth > 0) && (mVideoHeight > 0);
}
#Override
protected void onResume() {
super.onResume();
if (mMediaPlayer == null) {
try {
_playMedia();
} catch (Exception e) {
_fail(e);
}
}
}
#Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
Log.d(LOG_TAG, "Saving instance state");
Sequencer sequencer = mSequencer.get();
if (sequencer != null) {
outState.putInt(SEQUENCER, sequencer.getPosition());
Scenario scenario = sequencer.getScenario();
if (scenario != null) {
outState.putInt(SCENARIO, scenario.getScenarioId());
}
}
Test test = mTest.get();
if (test != null) {
outState.putString(TEST, TestUtil.getTestPath(test, TestUtil.Path.Static));
}
if (getSourceURI() != null) {
outState.putString(SOURCE_URI, getSourceURI().toString());
}
outState.putBoolean(LOOP, isLooping());
outState.putBoolean(PAUSE_ON_BACKGROUND, isPausingOnBackground());
outState.putLong(TIMEOUT, getPlayTimeout());
outState.putBoolean(SCREEN_ON_WHILE_PLAYING, isScreenOnWhilePlaying());
outState.putInt(START_POSITION, getStartPosition());
}
#Override
protected void onPause() {
super.onPause();
if (isPausingOnBackground()) {
_pausePlayback();
}
}
#Override
protected void onStop() {
super.onStop();
Log.d(LOG_TAG, "onStop");
if (isPausingOnBackground()) {
_releaseMediaPlayer();
}
}
#Override
protected void onDestroy() {
super.onDestroy();
Log.d(LOG_TAG, "onDestroy");
// TODO: It would be fine to fail the test if the activity was destroyed
// if we didn't finished the test yet but there are far too many cases
// and I failed to implement one working with all of them!
/* if (!mInstanceSaved) {
finishTest(Activity.RESULT_FIRST_USER, new Intent().putExtra(REASON,
"Activity destroyed. Something certainly goes wrong there."));
}
*/
_releaseMediaPlayer();
}
// Restore sequencer, scenario and test references from saved state.
private void _updateSavedForeignReferences(Bundle savedInstanceState) {
int sequencer = savedInstanceState.getInt(SEQUENCER, -1);
int scenarioId = savedInstanceState.getInt(SCENARIO, -1);
mSequencer = new WeakReference<Sequencer>(null);
mTest = new WeakReference<Test>(null);
if (scenarioId >= 0 && sequencer >= 0) {
Scenario scenario = Controller.controller.getData().scenarios()
.getScenario(scenarioId);
mSequencer = new WeakReference<Sequencer>(Controller.controller
.engine().getSequencerAt(sequencer));
String testPath = savedInstanceState.getString(TEST);
if (!TextUtils.isEmpty(testPath)) {
mTest = new WeakReference<Test>(TestUtil.fromPath(
scenario.getRootTest(), testPath));
}
}
}
// Update sequencer, scenario and test references from starting Intent
protected void _updateForeignReferences(Intent intent) {
int scenarioId = intent.getIntExtra(MediaPlayerActivity.SCENARIO, -1);
Scenario scenario = Controller.controller.getData().scenarios()
.getScenario(scenarioId);
int sequencer = intent.getIntExtra(MediaPlayerActivity.SEQUENCER, -1);
mSequencer = new WeakReference<Sequencer>(null);
mTest = new WeakReference<Test>(null);
if (scenarioId >= 0 && sequencer >= 0) {
mSequencer = new WeakReference<Sequencer>(Controller.controller
.engine().getSequencerAt(sequencer));
String testPath = intent.getStringExtra(MediaPlayerActivity.TEST);
if (!TextUtils.isEmpty(testPath)) {
mTest = new WeakReference<Test>(TestUtil.fromPath(
scenario.getRootTest(), testPath));
}
}
}
/**
* Terminate the test case and finish the activity at the same time.
* <p>
* The result code and data are passed to both the parent activity and the
* {#link Test#terminate(int, Sequencer, Object)} method.
* </p>
*
* #param resultCode
* the result code. May be on of the Activity result code but
* also any other one having a meaning for the test and caller
* activity.
* #param data
* extra result data. Can be null.
*/
public void finishTest(int resultCode, Intent data) {
Test test = mTest.get();
Sequencer sequencer = mSequencer.get();
if ((test != null) && (sequencer != null)) {
test.terminate(resultCode, sequencer, data);
// prevent any further call to finishTest.
mTest = new WeakReference<Test> (null);
}
if (!isFinishing()) {
setResult(Activity.RESULT_OK, data);
finish();
}
}
#SuppressWarnings("deprecation")
private void _updateWidgets() {
SurfaceView surface = (SurfaceView) findViewById(R.id.surface);
mPreview = surface;
SurfaceHolder holder = surface.getHolder();
holder.addCallback(this);
if (VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB) {
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
}
public void onBufferingUpdate(MediaPlayer arg0, int percent) {
Log.d(LOG_TAG, "onBufferingUpdate percent:" + percent);
if (percent >= 100) {
mMediaPlayer.setOnBufferingUpdateListener(null);
}
// no display so don't bother.
}
public void onCompletion(MediaPlayer player) {
_releaseMediaPlayer();
finishTest(Activity.RESULT_OK, null);
}
private void _rearmPlayer() {
mVideoWidth = 0;
mVideoHeight = 0;
mMediaPrepared = false;
mIsVideoSizeKnown = false;
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
if (mIsVideoSizeKnown) return; // further notifications are completely wrong!!
Log.i(LOG_TAG, "Video size: " + width + "x" + height);
mIsVideoSizeKnown = true;
mVideoWidth = width;
mVideoHeight = height;
if (width > 0 && height > 0) {
_fitSurfaceForVideo();
} else {
// audio only or video size unknown.
}
if (mMediaPrepared) {
_startPlayback();
}
}
#SuppressWarnings("deprecation")
#SuppressLint("NewApi")
private void _fitSurfaceForVideo() {
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
if (VERSION.SDK_INT >= 13) {
display.getSize(size);
} else {
size.x = display.getWidth();
size.y = display.getHeight();
}
double ratioVideo = (double)mVideoWidth / mVideoHeight;
double ratioScreen = (double)size.x / size.y;
if (ratioScreen > ratioVideo) {
// fit in height and scale the width to match the video ratio
mVideoHeight = size.y;
mVideoWidth = (int) (size.y / ratioVideo);
} else {
// fit in width and scale height to keep the video ratio
mVideoWidth = size.x;
mVideoHeight = (int) (size.x / ratioVideo);
}
}
public void onPrepared(MediaPlayer mediaplayer) {
mMediaPrepared = true;
if (mIsVideoSizeKnown) {
_startPlayback();
}
}
private void _startPlayback() {
SurfaceHolder holder = mPreview.getHolder();
if (mVideoWidth > 0 && mVideoHeight > 0) {
holder.setFixedSize(mVideoWidth, mVideoHeight);
}
if (mStartPosition > 0) {
Log.i(LOG_TAG, String.format("Resume at %f seconds", mStartPosition / 1000.f));
mMediaPlayer.seekTo(mStartPosition);
} else {
Log.i(LOG_TAG, "Start video");
}
if (!mTimeoutSet && (getPlayTimeout() > 0)) {
// this is a constant time reference: deduce the time already played.
long remaining = getPlayTimeout() - mStartPosition;
Time time = new Time();
time.setToNow();
time.second += remaining / 1000;
time.normalize(false);
Log.i(LOG_TAG, "Will end the video on " + time.format2445());
new ShutdownHandler(this).sendEmptyMessageDelayed(0, remaining);
mTimeoutSet = true;
}
mMediaPlayer.start();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.d(LOG_TAG, String.format(
"surfaceChanged called: format=%d, width=%d, height=%d",
format, width, height));
}
public void surfaceDestroyed(SurfaceHolder surfaceholder) {
Log.d(LOG_TAG, "surfaceDestroyed called");
mHolder = null;
if (mMediaPlayer != null) {
mMediaPlayer.setDisplay(null);
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(LOG_TAG, "surfaceCreated called");
mHolder = holder;
if (mMediaPlayer != null) {
// surface created after the media player
try {
mMediaPlayer.setDisplay(holder);
mMediaPlayer.setScreenOnWhilePlaying(isScreenOnWhilePlaying());
mMediaPlayer.setOnCompletionListener(this);
if (!mMediaPrepared) {
mMediaPlayer.setDataSource(this, mMediaURI);
mMediaPlayer.prepareAsync();
}
} catch (IllegalStateException e) {
_fail(e);
} catch (IllegalArgumentException e) {
_fail(e);
} catch (SecurityException e) {
_fail(e);
} catch (IOException e) {
_fail(e);
}
}
}
private void _playMedia() throws IllegalArgumentException,
SecurityException, IllegalStateException, IOException {
Log.d(LOG_TAG, "_playMedia()");
_rearmPlayer();
/*
* The video should be in a streamable mp4 or 3gpp format. The URI
* scheme may be Http. Mediaplayer can only play
* "progressive streamable contents" which basically means: 1. the movie
* atom has to precede all the media data atoms. 2. The clip has to be
* reasonably interleaved.
*/
if (mMediaURI != null) {
Log.i(LOG_TAG, "Source: " + mMediaURI);
// Create a new media player and set the listeners
if (mMediaPlayer != null) {
mMediaPlayer.reset();
} else {
mMediaPlayer = new MediaPlayer();
}
// mMediaPlayer.setDataSource(this, mMediaURI);
mMediaPlayer.setOnBufferingUpdateListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setLooping(isLooping());
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setOnVideoSizeChangedListener(this);
if (mHolder != null) {
// surface created before the media player
mMediaPlayer.setDisplay(mHolder);
mMediaPlayer.setScreenOnWhilePlaying(isScreenOnWhilePlaying());
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setDataSource(this, mMediaURI);
mMediaPlayer.prepareAsync();
}
} else {
_fail("No video source defined");
}
}
// fail due to exception
private void _fail(Exception e) {
Log.e(LOG_TAG, e);
Intent data = new Intent();
data.putExtra(EXCEPTION, e);
finishTest(Activity.RESULT_FIRST_USER, data);
}
// fail due to generic error
private void _fail(String text) {
Log.e(LOG_TAG, text);
Intent data = new Intent();
data.putExtra(REASON, text);
finishTest(Activity.RESULT_FIRST_USER, data);
}
private void _pausePlayback() {
if (mMediaPlayer != null && mMediaPlayer.isPlaying()) {
Log.i(LOG_TAG, "Pause playback");
mMediaPlayer.pause();
// retain position for use by onSaveInstanceState(Bundle)
setStartPosition(mMediaPlayer.getCurrentPosition());
}
}
#Override
public void onBackPressed() {
super.onBackPressed();
Intent data = new Intent();
data.putExtra(REASON, "canceled by user");
finishTest(Activity.RESULT_CANCELED, data);
}
private void _releaseMediaPlayer() {
_stopPlayback();
if (mMediaPlayer != null) {
mMediaPlayer.release();
mMediaPlayer = null;
}
_rearmPlayer();
}
private void _stopPlayback() {
if (mMediaPlayer != null && mMediaPlayer.isPlaying()) {
_pausePlayback();
Log.i(LOG_TAG, "stop video");
mMediaPlayer.stop();
}
}
public boolean isPausingOnBackground() {
return mPauseOnBackground;
}
public void setPauseOnBackground(boolean pause) {
Log.d(LOG_TAG, "Pausing on background: " + pause);
this.mPauseOnBackground = pause;
}
public Uri getSourceURI() {
return mMediaURI;
}
public void setSourceURI(Uri uri) {
Log.d(LOG_TAG, "Media source: " + uri);
this.mMediaURI = uri;
}
public long getPlayTimeout() {
return mTimeout;
}
public void setPlayTimeout(long timeout) {
Log.d(LOG_TAG, "Play length (ms): " + timeout);
this.mTimeout = timeout;
}
public boolean isLooping() {
return mLoop;
}
public void setLooping(boolean loop) {
Log.d(LOG_TAG, "Is looping: " + loop);
this.mLoop = loop;
}
public int getStartPosition() {
int position = 0;
if (mMediaPlayer != null) {
position = mMediaPlayer.getCurrentPosition();
}
return position;
}
public void setStartPosition(int position) {
Log.d(LOG_TAG, String.format("Start at %fs", position / 1000.));
this.mStartPosition = position;
}
public boolean isScreenOnWhilePlaying() {
return mScreenOnWhilePlaying;
}
public void setScreenOnWhilePlaying(boolean screenOnWhilePlaying) {
Log.d(LOG_TAG, "Screen ON while playing: " + screenOnWhilePlaying);
this.mScreenOnWhilePlaying = screenOnWhilePlaying;
}
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
_fail(String.format("Media player error: what=%d, extra=%d", what, extra));
return true;
}
}
The resource for the layout containing the SurfaceView:
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<SurfaceView
android:id="#+id/surface"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
</FrameLayout>
An example of use of the activity:
Intent intent = new Intent(parent, MediaPlayerActivity.class);
intent.putExtra(MediaPlayerActivity.SOURCE_URI, this.getSourceURI());
intent.putExtra(MediaPlayerActivity.PAUSE_ON_BACKGROUND,
this.getPauseOnBackground());
Scenario scenario = sequencer.getScenario();
intent.putExtra(MediaPlayerActivity.SCENARIO, scenario.getScenarioId());
intent.putExtra(MediaPlayerActivity.SEQUENCER, sequencer.getPosition());
intent.putExtra(MediaPlayerActivity.TEST, TestUtil.getTestPath(
scenario.getCurrentTest(), TestUtil.Path.Static));
// timeout option
TimeSpan timeout = this.getTimeout();
if (!timeout.isNull()) {
timeout.renew();
intent.putExtra(MediaPlayerActivity.TIMEOUT, timeout.value());
}
intent.putExtra(MediaPlayerActivity.LOOP, this.isLooping());
startActivity(intent);
#Sock have you thought about using the YouTube API for Android. They have a great playerview which gives you lots of control and event-listening ability on the video. I recently posted a tutorial on using the API if it might fit your needs, check it out here

Android ImageSwitcher: Out Of Memory Error when setImageURI

I seem to have a memory leak and I'm not sure how to fix. I've read all of the ImageSwitcher tutorials and examples on android.com, but those all seem to deal with drawables that are already in the drawables folder. My code allows the user to take one or two photos with their camera, store the images to SD, and use an imageswitcher to "flip" the images over.
public class CardViewImageActivity extends Activity implements ViewFactory {
private CardDBAdapter mDbHelper;
private String _cardImgGuidFront;
private String _cardImgGuidBack;
private Boolean frontShowing = false;
private Boolean hasFront = false;
private Boolean hasBack = false;
private Uri uriFront;
private Uri uriBack;
private int cardId;
private ImageSwitcher iSwitcher;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.card_view_image);
iSwitcher = (ImageSwitcher)findViewById(R.id.imageSwitcher);
iSwitcher.setFactory(this);
iSwitcher.setOnClickListener(SwitcherOnClick);
this.cardId = Integer.parseInt(getIntent().getExtras().getString(
"cardId")); //$NON-NLS-1$
getCardImageGuids(this.cardId);
if(_cardImgGuidFront != null)
{
hasFront = true;
uriFront = Uri.parse(Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidFront + ".jpg");
}
if(_cardImgGuidBack != null)
{
hasBack = true;
uriBack = Uri.parse(Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidBack + ".jpg");
}
if(hasFront && hasBack)
Toast.makeText(this, R.string.card_view_touch, Toast.LENGTH_SHORT).show();
if(hasFront)
{
iSwitcher.setImageURI(uriFront);
frontShowing = true;
}
else if(hasBack)
{
iSwitcher.setImageURI(uriBack);
frontShowing = false;
}
else
{
Toast.makeText(this, R.string.card_no_image, Toast.LENGTH_SHORT).show();
}
}
#Override
public void onDestroy()
{
iSwitcher.setImageURI(null);
super.onDestroy();
}
public View makeView() {
ImageView iView = new ImageView(this);
iView.setScaleType(ImageView.ScaleType.FIT_CENTER);
iView.setLayoutParams(new ImageSwitcher.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
return iView;
}
protected OnClickListener SwitcherOnClick = new OnClickListener()
{
#Override
public void onClick(View v)
{
if(frontShowing && hasBack)
{
iSwitcher.destroyDrawingCache();
iSwitcher.setImageURI(uriBack);
frontShowing = false;
}
else if(!frontShowing && hasFront)
{
iSwitcher.destroyDrawingCache();
iSwitcher.setImageURI(uriFront);
frontShowing = true;
}
else
{
}
}
};
private void getCardImageGuids(int cardId)
{
try
{
this.mDbHelper = new CardDBAdapter(this);
this.mDbHelper.open();
Cursor c = this.mDbHelper.fetchCard(this.cardId);
_cardImgGuidFront = c.getString(c
.getColumnIndex(CardDBAdapter.CARD_IMG_GUID_FRONT));
_cardImgGuidBack = c.getString(c
.getColumnIndex(CardDBAdapter.CARD_IMG_GUID_BACK));
}
catch(SQLiteException ex)
{
Toast.makeText(CardViewImageActivity.this, ex.toString(), Toast.LENGTH_LONG).show();
}
finally
{
this.mDbHelper.close();
this.mDbHelper = null;
}
}
}
The above code seems to work sometimes quite well. However, I'm occasionally getting an OutOfMemory error. Now, from my understanding through debugging, makeView() seems to be getting called twice when .setFactory(this) is called and this seems to be fine since ImageSwitcher's purpose is to switch between two images. I'm wondering if there is a better way to switch the images besides SetImageUri(). I don't see anywhere I might be leaking or what might be causing the issue. I don't see anywhere that convertView might even be utilized. Is there a way to cache the images from the Uri? Are the images being reloaded each time .setImageUri() is called? Is there a way to dump that memory(or reuse)? Is this what's eating up my memory?
Not to sound disrespectful or rude, but I would really prefer help without having someone referencing the Avoiding Memory Leaks article or links to the javadocs for imageswitcher? The Avoid Memory Leaks article shows a couple of "you shouldn't do this" but never shows what you "should" do instead. I already have links to the javadocs. I'm looking for someone that can actually explain what I'm doing incorrectly and point me in a better direction with code(I learn best seeing code rather than vague abstract academic theories) rather than just regurgitating the top 3 links from a Google search. :)
Thank you for any help! :)
EDIT:10Feb2012
So I tried to load the Drawables and IMMEDIATELY received an out of memory error which is WORSE than getting the error occasionally with .setImageUri(). The following contains the mods:
private Drawable front;
private Drawable back;
#Override
public void onCreate(Bundle savedInstanceState) {
...
Resources res = getResources();
...
if(_cardImgGuidFront != null)
{
hasFront = true;
String frontPath = Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidFront + ".jpg";
front = new BitmapDrawable(res, frontPath);
}
if(_cardImgGuidBack != null)
{
hasBack = true;
String backPath = Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidBack + ".jpg";
back = new BitmapDrawable(res, backPath);
}
Looking at the SoftReference, usage requires the creation of the SoftReference using another drawable. I don't see how using SoftReference could possibly help since I am now crashing on initial load.
ImageView v = (ImageView)imageSwitcher.getNextView();
BitmapDrawable bd = (BitmapDrawable) v.getDrawable();
if (bd != null)
{
Bitmap b = bd.getBitmap();
b.recycle();
}
I'm wondering if there is a better way to switch the images besides SetImageUri().
Call setImageDrawable() using a cached BitmapDrawable.
I don't see anywhere I might be leaking or what might be causing the issue.
Use DDMS and MAT to see where your leaks are.
I don't see anywhere that convertView might even be utilized.
Considering that there is nothing named convertView in your source code, this is not surprising.
Is there a way to cache the images from the Uri?
Yes. Use BitmapFactory, load the images yourself. Cache the results, preferably using SoftReferences. Call recycle() on the Bitmap objects when you no longer need them.
Are the images being reloaded each time .setImageUri() is called?
Yes.
Is there a way to dump that memory(or reuse)?
Not if you have Android create the Bitmap for you. ImageSwitcher seems to be a one-way API, where you can set images but not retrieve them.
Ok, so this seems to work and I will provide the code to save Java frustration for anyone else flustered by this. It's probably not the prettiest, but so far(knock wood) I have not seen any further Out of memory errors.
import android.app.Activity;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.sqlite.SQLiteException;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.ImageSwitcher;
import android.widget.ImageView;
import android.widget.Toast;
import android.widget.ViewSwitcher.ViewFactory;
public class CardViewImageActivity extends Activity implements ViewFactory {
private CardDBAdapter mDbHelper;
private String _cardImgGuidFront;
private String _cardImgGuidBack;
private Boolean frontShowing = false;
private Boolean hasFront = false;
private Boolean hasBack = false;
private Drawable front;
private Drawable back;
private int cardId;
private ImageSwitcher iSwitcher;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.cert_view_image);
iSwitcher = (ImageSwitcher)findViewById(R.id.imageSwitcher);
iSwitcher.setFactory(this);
iSwitcher.setOnClickListener(SwitcherOnClick);
Resources res = getResources();
BitmapFactory.Options o = new BitmapFactory.Options();
o.inSampleSize = 2;
this.cardId = Integer.parseInt(getIntent().getExtras().getString(
"cardId")); //$NON-NLS-1$
getCardImageGuids(this.cardId);
if(_cardImgGuidFront != null)
{
hasFront = true;
String frontPath = Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidFront + ".jpg";
front = new BitmapDrawable(res, BitmapFactory.decodeFile(frontPath, o));
}
if(_cardImgGuidBack != null)
{
hasBack = true;
String backPath = Environment.getExternalStorageDirectory().toString() + "/" + _cardImgGuidBack + ".jpg";
back = new BitmapDrawable(res, BitmapFactory.decodeFile(backPath, o));
}
if(hasFront && hasBack)
Toast.makeText(this, R.string.card_view_touch, Toast.LENGTH_SHORT).show();
if(hasFront)
{
iSwitcher.setImageDrawable(front);
frontShowing = true;
}
else if(hasBack)
{
iSwitcher.setImageDrawable(back);
frontShowing = false; }
else
{
Toast.makeText(this, R.string.card_no_image, Toast.LENGTH_SHORT).show();
}
res = null;
}
#Override
public void onPause()
{
super.onPause();
}
#Override
public void onDestroy()
{
front = null;
back = null;
super.onDestroy();
}
public View makeView() {
ImageView iView = new ImageView(this);
iView.setScaleType(ImageView.ScaleType.FIT_CENTER);
iView.setLayoutParams(new ImageSwitcher.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
return iView;
}
protected OnClickListener SwitcherOnClick = new OnClickListener()
{
#Override
public void onClick(View v)
{
if(frontShowing && hasBack)
{
iSwitcher.setImageDrawable(back);
frontShowing = false;
}
else if(!frontShowing && hasFront)
{
iSwitcher.setImageDrawable(front);
frontShowing = true;
}
else
{
}
}
};
private void getCardImageGuids(int cardId)
{
...
// Put your db logic retrieval for the img id here
}
}
I hope this solution (and ACTUAL code) helps someone else.

Categories

Resources