Code writing to SQLite is slow - android

The code below takes data from an ArrayList and writes it to the SQLite database on the device.It runs fairly slow, with an ArrayList size of about 800, it takes about 1.5 minutes.Do you see anything that could make it run faster?
Iterator<PermitData> iterator = permitDataArrayList.iterator();
while (iterator.hasNext()) {
PermitData permitData = (PermitData) iterator.next();
HashMap<String, String> queryValues = new HashMap<String, String>();
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_ID, Integer.toString(permitData.Id));
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_TYPE, permitData.Type);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_NAME, permitData.Name);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD1, permitData.Field1);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD2, permitData.Field2);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD3, permitData.Field3);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD4, permitData.Field4);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD5, permitData.Field5);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD6, permitData.Field6);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD7, permitData.Field7);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD8, permitData.Field8);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD9, permitData.Field9);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD10, permitData.Field10);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD11, permitData.Field11);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD12, permitData.Field12);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD13, permitData.Field13);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD14, permitData.Field14);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD15, permitData.Field15);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_TO_DATE, permitData.ToDate);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FROM_DATE, permitData.FromDate);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD16, permitData.Field16);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD17, permitData.Field17);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_ADDRESS, permitData.Address);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_PHONE, permitData.Phone);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD18, permitData.Field18);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_EMAIL, permitData.Email);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD19, permitData.Field19);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_XCOORD, permitData.XCoord);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_YCOORD, permitData.YCoord);
queryValues.put(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_FIELD20, permitData.Field20);
try {
sqLiteManager.insertOrUpdatePermit(PermitDataContract.PermitDataEntry.TABLE_NAME, queryValues);
}
catch (Exception e) {
Log.d("StoreData", " Id: " + queryValues.get(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_ID) + " Name: " + queryValues.get(PermitDataContract.PermitDataEntry.COLUMN_NAME_PERMIT_NAME));
}
} // end while
SQLiteManager.java
public synchronized void insertOrUpdatePermit(String tableName, HashMap<String, String> queryValues) {
ContentValues contentValues = new ContentValues();
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_ID, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_ID));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_TYPE, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_TYPE));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_NAME, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_NAME));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD1, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD1));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD2, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD2));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD3, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD3));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD4, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD4));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD5, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD5));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD6, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD6));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD7, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD7));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD8, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD8));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD9, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD9));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD10, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD10));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD11, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD11));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD12, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD12));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD13, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD13));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD14, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD14));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD15, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD15));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FROM_DATE, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FROM_DATE));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_TO_DATE, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_TO_DATE));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD16, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD16));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD17, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD17));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_ADDRESS, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_ADDRESS));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_PHONE, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_PHONE));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD18, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD18));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_EMAIL, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_EMAIL));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD19, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD19));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_XCOORD, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_XCOORD));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_YCOORD, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_YCOORD));
contentValues.put(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD20, queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_FIELD20));
String permitId = queryValues.get(PermitDataEntry.COLUMN_NAME_PERMIT_ID);
String columns[] = {PermitDataEntry.COLUMN_NAME_PERMIT_ID};
Cursor cursor = null;
try {
cursor = mDatabase.query(tableName, columns, PermitDataEntry.COLUMN_NAME_PERMIT_ID + "=?", new String[]{permitId}, null, null, null);
}
catch (Exception e) {
Log.e("insertOrUpdatePermit", "exception:cursor: " + tableName + " columns: " + columns);
e.printStackTrace();
}
// if count is 0, then permitId does not exist, so insert
// if count is 1, then permitId does exist, so update
int count = cursor.getCount();
if (count == 0) { // if new row
mDatabase.insert(tableName, null, contentValues);
}
else { // primary key already exists
mDatabase.update(tableName, contentValues, null, null);
}
cursor.close();
}

By default, the boundaries of a SQLite statement (e.g., insert(),update(),execSQL()`) is that individual statement. This means that your code is doing 800 transactions. Each transaction involves disk I/O, to update the database and transaction log. Doing lots of little transactions gets slow.
For bulk data operations, it is better to wrap your own transaction around the work. Partly, that will be for speed. Partly, that way the whole bulk data load will succeed or fail as a whole, so if it fails (e.g., foreign key constraint violation), you do not wind up with a mix of succeeded and failed operations.
The pseudo-Java for this is:
db.beginTransaction();
try {
// do real SQL calls here
db.setTransactionSuccesful();
}
finally {
db.endTransaction();
}
(where db is a SQLiteDatabase, and catch blocks are optional)

Related

Android - SQLiteException while deleting record

Currently We have one application in which we are receiving many crash reports while deleting record from database .
Here is method in which app is crashing.
public int deleteGroupMap(String nickName) {
SQLiteDatabase database = this.getWritableDatabase();
try {
return database.delete(TABLE_NAME_GROUP_MAP, COLUMN_GMAP_NICK_NAME + " = '" + nickName + "'", null);
} catch (Exception e) {
e.printStackTrace();
} finally {
database.close();
}
return 0;
}
but we am getting following exception:
android.database.sqlite.SQLiteException: near "adz": syntax error
(code 1): , while compiling: DELETE FROM groups_map WHERE
gmap_nick_name = ''adz.'
Any help will be appreciated.
Look at delete signature:
int delete (String table, String whereClause, String[] whereArgs)
Third argument is where args:
You may include ?s in the where clause, which will be replaced by the
values from whereArgs. The values will be bound as Strings.
It's automatically escaped, so there is no need to put quotes (', ") manually.
Use where args instead of strings concating:
database.delete(TABLE_NAME_GROUP_MAP, COLUMN_GMAP_NICK_NAME + " = ?", new String[] { nickName });
Try Like This
public int deleteGroupMap(String nickName) {
SQLiteDatabase database = this.getWritableDatabase();
try {
database .execSQL("DELETE FROM "+ TABLE_NAME_GROUP_MAP + " WHERE " + COLUMN_GMAP_NICK_NAME + " = "+nickName+"");
database .close();
} catch (Exception e) {
e.printStackTrace();
} finally {
database.close();
}
return 0;
}
Try this
return database.delete(TABLE_NAME_GROUP_MAP, COLUMN_GMAP_NICK_NAME + "= ?" , new String[]{Long.toString(nickName)});
You should also use parameter markers because appending values directly is error prone when the source contains special characters.
try following because it will also prevent SQL injections to your app
database.delete(TABLE_NAME_GROUP_MAP, COLUMN_GMAP_NICK_NAME + "=?", new String[]{String.valueOf(nickName));

Bulk inserting using an array of ContentValues

im trying to do a batch insert of about 700 floats. The method i'm using is below and as well as the content provider's bulkInsert. The issue is that when i put all the floating point values into the ContentValues nothing happens. What's a better way to insert those floating point values into the ContentValues object?
private void saveToDatabase( float[] tempValues )
{
ContentValues values = new ContentValues();
// WM: TODO: add patient id and sensor type
for (float tempVal : tempValues){
values.put( DataTable.COLUMN_DATA, tempVal );
}
ContentValues[] cvArray = new ContentValues[1];
cvArray[0] = values;
ContentResolver resolver = getContentResolver();
resolver.bulkInsert( HealthDevContentProvider.CONTENT_URI_DATA, cvArray);
public int bulkInsert(Uri uri, ContentValues[] values){
int numInserted = 0;
String table = null;
int uriType = sURIMatcher.match(uri);
switch (uriType) {
case RAWINPUT_TABLE:
table = RAWINPUT_TABLE_PATH;
break;
}
db.beginTransaction();
try {
for (ContentValues cv : values) {
long newID = db.insertOrThrow(table, null, cv);
if (newID <= 0) {
throw new SQLException("Failed to insert row into " + uri);
}
}
db.setTransactionSuccessful();
getContext().getContentResolver().notifyChange(uri, null);
numInserted = values.length;
} finally {
db.endTransaction();
}
return numInserted;
}
If you want each float to have it's own record in your database, you need an instance of ContentValues for each new record. Right now you have one instance of ContentValues and you are writing the same key to it (meaning you are writing over the value) 700 times.
private void saveToDatabase( float[] tempValues ) {
final int count = tempValues.legnth;
ContentValues[] cvArray = new ContentValues[count];
for (int i = 0; i < count; i++) {
float tempVal = tempValues[i];
ContentValues values = new ContentValues();
values.put( DataTable.COLUMN_DATA, tempVal );
cvArray[i] = values;
}
/* all the rest */
}
I know that this will be rude, but just throw away this code. Providers have primary methods to deal with most SQLite operations and you tried to blend three of them (insert(), bulkInsert(), and applyBatch()) into some kind of Frankenstein. Here are the main mistakes:
1) This line values.put(DataTable.COLUMN_DATA, tempVal) is not inserting new entries at each iteration; it is overriding them. After all iterations, values contains only the 700th float value of your array.
2) As #Karakuri remembered, there is only one ContentValues instance inside cvArray. bulkInsert() doc states about its second parameter:
An array of sets of column_name/value pairs to add to the database. This must not be null.
So cvArray must contain a ContentValues instance (a set) for every entry you want to insert into the database.
3) Not exactly an error, but something you should watch out. There are no guarantees that mTables will exist, and trying to make operations without specifying a table will throw a SQLException.
4) These three lines are basically useless:
if (newId <= 0) {
throw new SQLException("Failed to insert row into " + uri);
}
insertOrThrow() already throws an exception if some error happens during the insert operation. If you want to check manually for an error, try insert() or insertWithOnConflict() (or add a catch to your try block and deal with the exception there).
5) And finally, there is the problem about numInserted #petey pointed (and there's no need to repeat).
One last advice: forget that bulkInsert() exists. I know that this will require more lines of code, but using applyBatch() you can achieve better results (and more easily, since you do not have to implement it). Wolfram Rittmeyer wrote a series of excellent articles about transactions, check if you have any doubt.
Last but not least (yes, I'm in a good mood today), this is how I would do a basic implementation of your code:
#Override
public Uri insert(Uri uri, ContentValues values) {
final SQLiteDatabase db // TODO: retrieve writable database
final int match = matcher.match(uri);
switch(match) {
case RAWINPUT_TABLE:
long id = db.insert(RAWINPUT_TABLE, null, values); // TODO: add catch block to deal.
getContext().getContentResolver().notifyChange(uri, null, false);
return ContentUris.withAppendedId(uri, id);
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
private void saveToDatabase( float[] tempValues ) {
ArrayList<ContentProviderOperation> operations = new ArrayList<ContentProviderOperation>();
for (float tempVal : tempValues){
operations.add(ContentProviderOperation
.newInsert(HealthDevContentProvider.CONTENT_URI_DATA)
.withValue(DataTable.COLUMN_DATA, tempVal).build();
.withValue() // TODO: add patient id
.withValue() // TODO: add sensor type);
}
// WARNING!! Provider operations (except query if you are using loaders) happen by default in the main thread!!
getContentResolver().applyBatch(operations);
}
I use batch inserts, not sure what the difference between bulk and batch is but all I do is this
ArrayList<ContentProviderOperation> operations = new ArrayList<ContentProviderOperation>();
for(int j=0;j<locationAry.length;j++){
ContentValues values2 = new ContentValues();
values2.put(MapPoints.ELEMENT_ECM2ID, ecm2id);
values2.put(MapPoints.ELEMENT_ID, newElementId);
values2.put(MapPoints.LATITUDE, locationAry[j+1]);
values2.put(MapPoints.LONGITUDE, locationAry[j]);
values2.put(MapPoints.LAYER_ID, layerID);
operations2.add(ContentProviderOperation.newInsert(MapPoints.CONTENT_URI).withValues(values2).build());
}
getContentResolver().applyBatch(MapElements.AUTHORITY, operations);
did you override the bulkInsert method in your ContentProvider?
If one insert fails, your whole transaction fails. Without seeing your table create statement for unique keys, try a replace after your insert fails.. Also your numInserted will always be the same as values.length no matter what insert/replace fails. this doesnt seem correct either.
...
db.beginTransaction();
int numInserted = 0;
try {
for (ContentValues cv : values) {
long newID;
try {
newID = database.insertOrThrow(table, null, cv);
} catch (SQLException ignore) {
newID = database.replace(table, null, cv);
}
if (newID <= 0) {
Log.e("TAG, "Failed to insert or replace row into " + uri);
} else {
// you are good...increment numInserted
numInserted++;
}
}
db.setTransactionSuccessful();
getContext().getContentResolver().notifyChange(uri, null);
} finally {
db.endTransaction();
}
return numInserted;

Batch insert/update via contentProvider based on 2 fields

i'm creating a contentProvider , and i wish to be able to send it multiple DB records (contentValues) to be inserted or updated to a single table using a single batch operations .
how do i do that?
batchInsert is intended only for inserting , but wouldn't it mean that insertion of something that already exists won't do anything?
also , is there a way for the update operation to use a special constraint ? for example , i need to ignore the primary key and update based on 2 other fields that together are unique.
"batchInsert is intended only for inserting" : this is true BUT you can override it in your ContentProvider to perform an UPSERT (insert/update) depending on the URI passed to batchInsert.
The following is some working code that I currently use to perform bulk inserts on time-series data (admittedly, I just delete anything that gets in the way instead of updating, but you could easily change this to your own ends.).
Also note the use of the sql transaction; this speeds up the process immensely.
#Override
public int bulkInsert(Uri uri, ContentValues[] values) {
SQLiteDatabase sqlDB = database.getWritableDatabase();
switch (match(uri)) {
case ONEPROGRAMME:
String cid = uri.getLastPathSegment();
int insertCount = 0;
int len = values.length;
if (len > 0) {
long start = values[0].getAsLong(Programme.COLUMN_START);
long end = values[len - 1].getAsLong(Programme.COLUMN_END);
String where = Programme.COLUMN_CHANNEL + "=? AND " + Programme.COLUMN_START + ">=? AND "
+ Programme.COLUMN_END + "<=?";
String[] args = { cid, Long.toString(start), Long.toString(end) };
//TODO use a compiled statement ?
//SQLiteStatement stmt = sqlDB.compileStatement(INSERT)
sqlDB.beginTransaction();
try {
sqlDB.delete(tableName(PROGRAMME_TABLE), where, args);
for (ContentValues row : values) {
if (sqlDB.insert(tableName(PROGRAMME_TABLE), null, row) != -1L) {
insertCount++;
}
}
sqlDB.setTransactionSuccessful();
} finally {
sqlDB.endTransaction();
}
}
if (insertCount > 0)
getContext().getContentResolver().notifyChange(Resolver.PROGRAMME.uri, null);
return insertCount;
default:
throw new UnsupportedOperationException("Unsupported URI: " + uri);
}
}

android sqlite query optimisation

I have an SQLite db with about 400 000 entries. To query the db I am using the following method:
public double lookUpBigramFrequency(String bigram) throws SQLException {
SQLiteDatabase db = dbh.getReadableDatabase();
double frequency = 0;
bigram = bigram.toLowerCase();
String select = "SELECT frequency FROM bigrams WHERE bigram = '"
+ bigram + "'";
Cursor mCursor = db.rawQuery(select, null);
if (mCursor != null) {
if (mCursor.moveToFirst()) {
frequency = Double.parseDouble(mCursor.getString(0));
} else {
frequency = 0;
}
}
return frequency;
}
but it takes about 0.5 sec to retrieve a single entry and having few queries, it builds up and the method is executing for 10 secs. How to speeed it up?
Firstly, use an INDEX
http://www.sqlite.org/lang_createindex.html
in your case that will be something like:
CREATE INDEX idx_bigram ON bigrams (bigram)
Secondly, use '?' instead of literal query. It helps sqlite for caching requests:
String select = "SELECT frequency FROM bigrams WHERE bigram = ?";
Cursor mCursor = db.rawQuery(select, new String[]{ bigram });
Thirdly, I trust query is more efficient than rawQuery:
mCursor = dq.query("bigrams", new String[] { "frequency" }, "bigram = ?",
new String[]{ bigram }, null, null, null, null);
Fourthly, you can query several values at once (not compatible with point 2):
SELECT frequency FROM bigrams WHERE bigrams IN ('1', '2', '3')
Fifthly, you don't need to open your database every time. You should consider leaving it open.
Edit
After seeing this question IN clause and placeholders it appears you can combine 2 and 4 after all (not sure it is useful, though)
Always use transaction mechanism when you want to do lots of database operations
public static void doLotDBOperations() {
try {
// Code to Open Database
// Start transaction
sqlDb.beginTransaction();
// Code to Execute all queries
sqlDb.setTransactionSuccessful();
} catch (Exception e) {
e.printStackTrace();
} finally {
// End all transaction
sqlDb.endTransaction();
// Code to Close Database
}
}

Retaining persistent information on different versions of the software

I am planning to write a free version and a full version of a software. I want the information stored by the free version of the software to be accessible by the full version also (I don't want to use Content Providers). And I also want to make sure this data is not lost when the software is updated. How do I achieve this?
You need to implement an intelligent way of onUpgrade for your sqlite helpers.
You should always have the new table creation query at hand, and use that for upgrade and transfer any existing data. Note: that the onUpgrade methods runs once for your sqlite helper object and you need to handle all the tables in it.
So what is recommended onUpgrade:
beginTransaction
run a table creation with if not exists (we are doing an upgrade, so the table might not exists yet, it will fail alter and drop)
put in a list the existing columns List<String> columns = DBUtils.GetColumns(db, TableName);
backup table (ALTER table " + TableName + " RENAME TO 'temp_" + TableName)
create new table (the newest table creation schema)
get the intersection with the new columns, this time columns taken from the upgraded table (columns.retainAll(DBUtils.GetColumns(db, TableName));)
restore data (String cols = StringUtils.join(columns, ",");
db.execSQL(String.format(
"INSERT INTO %s (%s) SELECT %s from temp_%s",
TableName, cols, cols, TableName));
)
remove backup table (DROP table 'temp_" + TableName)
setTransactionSuccessful
(This doesn't handle table downgrade, if you rename a column, you don't get the existing data transfered as the column names do not match).
.
public static List<String> GetColumns(SQLiteDatabase db, String tableName) {
List<String> ar = null;
Cursor c = null;
try {
c = db.rawQuery("select * from " + tableName + " limit 1", null);
if (c != null) {
ar = new ArrayList<String>(Arrays.asList(c.getColumnNames()));
}
} catch (Exception e) {
Log.v(tableName, e.getMessage(), e);
e.printStackTrace();
} finally {
if (c != null)
c.close();
}
return ar;
}
public static String join(List<String> list, String delim) {
StringBuilder buf = new StringBuilder();
int num = list.size();
for (int i = 0; i < num; i++) {
if (i != 0)
buf.append(delim);
buf.append((String) list.get(i));
}
return buf.toString();
}

Categories

Resources