Skip to content

Commit

Permalink
fix: try to reduce sqlite cursor memory usage (#375)
Browse files Browse the repository at this point in the history
* fix: try to reduce sqlite cursor memory usage

* fix: try to read events in row-by-row mode on CursorWindowAllocationException
  • Loading branch information
falconandy authored Jul 7, 2023
1 parent ccfcb40 commit 77e508d
Show file tree
Hide file tree
Showing 2 changed files with 93 additions and 39 deletions.
95 changes: 93 additions & 2 deletions src/main/java/com/amplitude/api/DatabaseHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import org.json.JSONObject;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
Expand Down Expand Up @@ -339,7 +338,16 @@ synchronized List<JSONObject> getIdentifyInterceptors(
}

protected synchronized List<JSONObject> getEventsFromTable(
String table, long upToId, long limit) throws JSONException {
String table, long upToId, long limit) throws JSONException {
try {
return getEventsBatchFromTable(table, upToId, limit);
} catch (CursorWindowAllocationException e) {
return getEventsRowByRowFromTable(table, upToId, limit);
}
}

private List<JSONObject> getEventsBatchFromTable(
String table, long upToId, long limit) throws JSONException {
List<JSONObject> events = new LinkedList<JSONObject>();
Cursor cursor = null;
try {
Expand Down Expand Up @@ -380,6 +388,89 @@ protected synchronized List<JSONObject> getEventsFromTable(
return events;
}

private List<JSONObject> getEventsRowByRowFromTable(
String table, long upToId, long limit) throws JSONException {
List<Long> eventIds = new LinkedList<Long>();
Cursor cursor = null;
try {
SQLiteDatabase db = getReadableDatabase();
cursor = queryDb(
db, table, new String[] { ID_FIELD },
upToId >= 0 ? ID_FIELD + " <= " + upToId : null, null, null, null,
ID_FIELD + " ASC", limit >= 0 ? "" + limit : null
);

while (cursor.moveToNext()) {
long eventId = cursor.getLong(0);
eventIds.add(eventId);
}
} catch (SQLiteException e) {
logger.e(TAG, String.format("getEvents from %s failed", table), e);
delete();
} catch (StackOverflowError e) {
logger.e(TAG, String.format("getEvents from %s failed", table), e);
delete();
} catch (IllegalStateException e) { // put before Runtime since IllegalState extends
handleIfCursorRowTooLargeException(e);
} catch (RuntimeException e) {
convertIfCursorWindowException(e);
} finally {
if (cursor != null) {
cursor.close();
}
close();
}

try {
List<JSONObject> events = new LinkedList<JSONObject>();
for (Long eventId : eventIds) {
JSONObject event = getEventFromTable(table, eventId);
if (event != null) {
events.add(event);
}
}
return events;
} finally {
close();
}
}

protected synchronized JSONObject getEventFromTable(String table, long eventId) throws JSONException {
JSONObject event = null;
Cursor cursor = null;
try {
SQLiteDatabase db = getReadableDatabase();
cursor = queryDb(
db, table, new String[] { EVENT_FIELD },
ID_FIELD + " = " + eventId,
null, null, null, null, null
);

if (cursor.moveToFirst()) {
String eventData = cursor.getString(0);
if (!Utils.isEmptyString(eventData)) {
event = new JSONObject(eventData);
event.put("event_id", eventId);
}
}
} catch (SQLiteException e) {
logger.e(TAG, String.format("getEvent from %s failed", table), e);
delete();
} catch (StackOverflowError e) {
logger.e(TAG, String.format("getEvent from %s failed", table), e);
delete();
} catch (IllegalStateException e) { // put before Runtime since IllegalState extends
handleIfCursorRowTooLargeException(e);
} catch (RuntimeException e) {
convertIfCursorWindowException(e);
} finally {
if (cursor != null) {
cursor.close();
}
}
return event;
}

synchronized long getEventCount() {
return getEventCountFromTable(EVENT_TABLE_NAME);
}
Expand Down
37 changes: 0 additions & 37 deletions src/main/java/com/amplitude/api/IdentifyInterceptor.java
Original file line number Diff line number Diff line change
Expand Up @@ -92,27 +92,6 @@ public void setIdentifyBatchIntervalMillis(long identifyBatchIntervalMillis) {
this.identifyBatchIntervalMillis = identifyBatchIntervalMillis;
}

private JSONObject fetchAndMergeToIdentifyEvent(JSONObject event) {
try {
List<JSONObject> identifys = dbHelper.getIdentifyInterceptors(lastIdentifyInterceptorId, -1);
if (identifys.isEmpty()) {
return event;
}
JSONObject identifyEventUserProperties = event.getJSONObject("user_properties");
JSONObject userProperties = mergeIdentifyInterceptList(identifys);
if (identifyEventUserProperties.has(Constants.AMP_OP_SET)) {
mergeUserProperties(userProperties, identifyEventUserProperties.getJSONObject(Constants.AMP_OP_SET));
}
identifyEventUserProperties.put(Constants.AMP_OP_SET, userProperties);
event.put("user_properties", identifyEventUserProperties);
dbHelper.removeIdentifyInterceptors(lastIdentifyInterceptorId);
return event;
} catch (JSONException e) {
AmplitudeLog.getLogger().w(TAG, "Identify Merge error: " + e.getMessage());
}
return event;
}

private JSONObject getTransferIdentifyEvent() {
try {
List<JSONObject> identifys = dbHelper.getIdentifyInterceptors(lastIdentifyInterceptorId, -1);
Expand Down Expand Up @@ -154,22 +133,6 @@ public void transferInterceptedIdentify() {
client.saveEvent(Constants.IDENTIFY_EVENT, identifyEvent);
}

private JSONObject fetchAndMergeToNormalEvent(JSONObject event) {
try {
List<JSONObject> identifys = dbHelper.getIdentifyInterceptors(lastIdentifyInterceptorId, -1);
if (identifys.isEmpty()) {
return event;
}
JSONObject userProperties = mergeIdentifyInterceptList(identifys);
mergeUserProperties(userProperties, event.getJSONObject("user_properties"));
event.put("user_properties", userProperties);
dbHelper.removeIdentifyInterceptors(lastIdentifyInterceptorId);
} catch (JSONException e) {
AmplitudeLog.getLogger().w(TAG, "Identify Merge error: " + e.getMessage());
}
return event;
}

private JSONObject mergeIdentifyInterceptList(List<JSONObject> identifys) throws JSONException {
JSONObject userProperties = new JSONObject();
for (JSONObject identify : identifys) {
Expand Down

0 comments on commit 77e508d

Please sign in to comment.