diff --git a/androidlibrary_lib/build.gradle b/androidlibrary_lib/build.gradle index 5ea61337..aff0aa86 100644 --- a/androidlibrary_lib/build.gradle +++ b/androidlibrary_lib/build.gradle @@ -105,6 +105,12 @@ dependencies { api 'com.android.support:support-annotations:27.1.0' api 'com.android.support:support-v4:27.1.0' + api group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.4' + api group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: '2.9.4' + api group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.4' + api group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-csv', version: '2.9.4' + api group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-xml', version: '2.9.4' + // Testing dependencies testImplementation 'junit:junit:4.12' androidTestImplementation 'com.android.support.test:rules:1.0.1' diff --git a/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-javadoc.jar b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-javadoc.jar new file mode 100644 index 00000000..66e94ca1 Binary files /dev/null and b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-javadoc.jar differ diff --git a/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-sources.jar b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-sources.jar new file mode 100644 index 00000000..f5ac14ac Binary files /dev/null and b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22-sources.jar differ diff --git a/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22.jar b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22.jar index 0a0a03f9..12701752 100644 Binary files a/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22.jar and b/androidlibrary_lib/libs/aggregate-rest-interface-2017-06-22.jar differ diff --git a/androidlibrary_lib/libs/jackson-annotations-2.8.8.jar b/androidlibrary_lib/libs/jackson-annotations-2.8.8.jar deleted file mode 100644 index bd3ba9d2..00000000 Binary files a/androidlibrary_lib/libs/jackson-annotations-2.8.8.jar and /dev/null differ diff --git a/androidlibrary_lib/libs/jackson-core-2.8.8.jar b/androidlibrary_lib/libs/jackson-core-2.8.8.jar deleted file mode 100644 index 2a66a6f0..00000000 Binary files a/androidlibrary_lib/libs/jackson-core-2.8.8.jar and /dev/null differ diff --git a/androidlibrary_lib/libs/jackson-databind-2.8.8.jar b/androidlibrary_lib/libs/jackson-databind-2.8.8.jar deleted file mode 100644 index 91f068e5..00000000 Binary files a/androidlibrary_lib/libs/jackson-databind-2.8.8.jar and /dev/null differ diff --git a/androidlibrary_lib/libs/jackson-dataformat-xml-2.8.8.jar b/androidlibrary_lib/libs/jackson-dataformat-xml-2.8.8.jar deleted file mode 100644 index 25d66734..00000000 Binary files a/androidlibrary_lib/libs/jackson-dataformat-xml-2.8.8.jar and /dev/null differ diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/builder/CsvUtil.java b/androidlibrary_lib/src/main/java/org/opendatakit/builder/CsvUtil.java index b096ba58..c989fd79 100644 --- a/androidlibrary_lib/src/main/java/org/opendatakit/builder/CsvUtil.java +++ b/androidlibrary_lib/src/main/java/org/opendatakit/builder/CsvUtil.java @@ -16,15 +16,16 @@ package org.opendatakit.builder; import android.content.ContentValues; + import org.apache.commons.lang3.CharEncoding; import org.opendatakit.aggregate.odktables.rest.ConflictType; import org.opendatakit.aggregate.odktables.rest.ElementDataType; import org.opendatakit.aggregate.odktables.rest.KeyValueStoreConstants; -import org.opendatakit.aggregate.odktables.rest.RFC4180CsvReader; import org.opendatakit.aggregate.odktables.rest.RFC4180CsvWriter; -import org.opendatakit.aggregate.odktables.rest.SavepointTypeManipulator; import org.opendatakit.aggregate.odktables.rest.SyncState; -import org.opendatakit.aggregate.odktables.rest.TableConstants; +import org.opendatakit.aggregate.odktables.rest.entity.Column; +import org.opendatakit.aggregate.odktables.rest.entity.DataKeyValue; +import org.opendatakit.builder.csvparser.Parser; import org.opendatakit.database.data.ColumnDefinition; import org.opendatakit.database.data.KeyValueStoreEntry; import org.opendatakit.database.data.OrderedColumns; @@ -32,28 +33,24 @@ import org.opendatakit.database.data.UserTable; import org.opendatakit.database.queries.BindArgs; import org.opendatakit.database.service.DbHandle; -import org.opendatakit.database.utilities.CursorUtils; import org.opendatakit.exception.ServicesAvailabilityException; import org.opendatakit.listener.ExportListener; import org.opendatakit.listener.ImportListener; import org.opendatakit.logging.WebLogger; +import org.opendatakit.logging.WebLoggerIf; import org.opendatakit.provider.DataTableColumns; -import org.opendatakit.utilities.LocalizationUtils; import org.opendatakit.utilities.ODKFileUtils; import java.io.File; import java.io.FileFilter; -import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.UUID; +import java.util.Set; /** * Various utilities for importing/exporting tables from/to CSV. @@ -71,9 +68,15 @@ public class CsvUtil { private final CsvUtilSupervisor supervisor; + private final WebLoggerIf logger; + + private final Parser csvParser; + public CsvUtil(CsvUtilSupervisor supervisor, String appName) { this.supervisor = supervisor; this.appName = appName; + this.logger = WebLogger.getLogger(appName); + this.csvParser = new Parser(); } // =========================================================================================== @@ -107,12 +110,12 @@ public CsvUtil(CsvUtilSupervisor supervisor, String appName) { */ @SuppressWarnings("unused") public boolean exportSeparable(ExportListener exportListener, DbHandle db, String tableId, - OrderedColumns orderedDefns, String fileQualifier) throws ServicesAvailabilityException { + OrderedColumns orderedDefns, String fileQualifier) throws ServicesAvailabilityException { // building array of columns to select and header row for output file // then we are including all the metadata columns. ArrayList columns = new ArrayList<>(); - WebLogger.getLogger(appName).i(TAG, + logger.i(TAG, "exportSeparable: tableId: " + tableId + " fileQualifier: " + (fileQualifier == null ? "" : fileQualifier)); @@ -234,7 +237,7 @@ public boolean accept(File pathname) { outputCsv = outputCsv.getParentFile(); } } catch (IOException e1) { - WebLogger.getLogger(appName).printStackTrace(e1); + logger.printStackTrace(e1); return false; } return false; @@ -274,8 +277,8 @@ public boolean accept(File pathname) { * @throws ServicesAvailabilityException if the service was unavailable */ private boolean writePropertiesCsv(DbHandle db, String tableId, OrderedColumns orderedDefns, - File definitionCsv, File propertiesCsv) throws ServicesAvailabilityException { - WebLogger.getLogger(appName).i(TAG, "writePropertiesCsv: tableId: " + tableId); + File definitionCsv, File propertiesCsv) throws ServicesAvailabilityException { + logger.i(TAG, "writePropertiesCsv: tableId: " + tableId); /* * Get all the KVS entries and scan through, replacing all choice list @@ -394,7 +397,7 @@ public synchronized void updateTablePropertiesFromCsv(String tableId) * @throws ServicesAvailabilityException if the database is down */ public boolean importSeparable(ImportListener importListener, String tableId, - String fileQualifier, boolean createIfNotPresent) throws ServicesAvailabilityException { + String fileQualifier, boolean createIfNotPresent) throws ServicesAvailabilityException { DbHandle db = null; try { @@ -413,334 +416,132 @@ public boolean importSeparable(ImportListener importListener, String tableId, OrderedColumns orderedDefns = supervisor.getDatabase() .getUserDefinedColumns(appName, db, tableId); - WebLogger.getLogger(appName).i(TAG, + Set columnNames = new HashSet<>(); + for (Column column : orderedDefns.getColumns()) { + columnNames.add(column.getElementKey()); + } + + logger.i(TAG, "importSeparable: tableId: " + tableId + " fileQualifier: " + (fileQualifier == null ? "" : fileQualifier)); - // reading data - InputStreamReader input = null; - try { - - File assetsCsvInstances = new File( - ODKFileUtils.getAssetsCsvInstancesFolder(appName, tableId)); - HashSet instancesHavingData = new HashSet<>(); - if (assetsCsvInstances.exists() && assetsCsvInstances.isDirectory()) { - File[] subDirectories = assetsCsvInstances.listFiles(new FileFilter() { - - @Override - public boolean accept(File pathname) { - return pathname.isDirectory() && pathname.list().length != 0; - } - }); - instancesHavingData.addAll(Arrays.asList(subDirectories)); - } + File assetsCsvInstances = new File( + ODKFileUtils.getAssetsCsvInstancesFolder(appName, tableId)); + HashSet instancesHavingData = new HashSet<>(); + if (assetsCsvInstances.exists() && assetsCsvInstances.isDirectory()) { + File[] subDirectories = assetsCsvInstances.listFiles(new FileFilter() { - // both files are read from config/assets/csv directory... - File assetsCsv = new File(ODKFileUtils.getAssetsCsvFolder(appName)); - - // read data table... - File file = new File(assetsCsv, - tableId + (fileQualifier != null && !fileQualifier.isEmpty() ? "." + fileQualifier : "") - + ".csv"); - FileInputStream in = new FileInputStream(file); - input = new InputStreamReader(in, CharEncoding.UTF_8); - int total = 0; - char[] buf = new char[1024]; - int read; - while ((read = input.read(buf)) > 0) { - int i = 0; - // while not for because we change i in the loop - while (i < read) { - if (buf[i] == '\r' || buf[i] == '\n') { - if (i + 1 < buf.length && (buf[i + 1] == '\r' || buf[i + 1] == '\n')) { - i++; - } - total++; - } - i++; - } - } - input.close(); - in.close(); - in = new FileInputStream(file); - input = new InputStreamReader(in, CharEncoding.UTF_8); - //int total = r.getLineNumber(); - RFC4180CsvReader cr = new RFC4180CsvReader(input); - // don't have to worry about quotes in elementKeys... - String[] columnsInFile = cr.readNext(); - int columnsInFileLength = countUpToLastNonNullElement(columnsInFile); - - String v_id; - String v_form_id; - String v_locale; - String v_savepoint_type; - String v_savepoint_creator; - String v_savepoint_timestamp; - String v_row_etag; - String v_default_access; - String v_row_owner; - String v_group_read_only; - String v_group_modify; - String v_group_privileged; - - HashMap valueMap = new HashMap<>(); - - int rowCount = 0; - String[] row; - while (true) { - row = cr.readNext(); - rowCount++; - importListener.updateProgressDetail(rowCount, total); - if (row == null || countUpToLastNonNullElement(row) == 0) { - break; - } - int rowLength = countUpToLastNonNullElement(row); - - // default values for metadata columns if not provided - v_id = UUID.randomUUID().toString(); - v_form_id = null; - v_locale = CursorUtils.DEFAULT_LOCALE; - v_savepoint_type = SavepointTypeManipulator.complete(); - v_savepoint_creator = CursorUtils.DEFAULT_CREATOR; - v_savepoint_timestamp = TableConstants.nanoSecondsFromMillis(System.currentTimeMillis()); - v_row_etag = null; - v_default_access = DataTableColumns.DEFAULT_DEFAULT_ACCESS; - v_row_owner = DataTableColumns.DEFAULT_ROW_OWNER; - v_group_read_only = DataTableColumns.DEFAULT_GROUP_READ_ONLY; - v_group_modify = DataTableColumns.DEFAULT_GROUP_MODDIFY; - v_group_privileged = DataTableColumns.DEFAULT_GROUP_PRIVILEGED; - - // clear value map - valueMap.clear(); - - boolean foundId = false; - for (int i = 0; i < columnsInFileLength; ++i) { - if (i >= rowLength) - break; - String column = columnsInFile[i]; - String tmp = row[i]; - if (DataTableColumns.ID.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - foundId = true; - v_id = tmp; - } - continue; - } - if (DataTableColumns.FORM_ID.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_form_id = tmp; - } - continue; - } - if (DataTableColumns.LOCALE.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_locale = tmp; - } - continue; - } - if (DataTableColumns.SAVEPOINT_TYPE.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_savepoint_type = tmp; - } - continue; - } - if (DataTableColumns.SAVEPOINT_CREATOR.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_savepoint_creator = tmp; - } - continue; - } - if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_savepoint_timestamp = tmp; - } - continue; - } - if (DataTableColumns.ROW_ETAG.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_row_etag = tmp; - } - continue; - } - if (DataTableColumns.DEFAULT_ACCESS.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_default_access = tmp; - } - continue; - } - if (DataTableColumns.ROW_OWNER.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_row_owner = tmp; - } - continue; - } - if (DataTableColumns.GROUP_READ_ONLY.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_group_read_only = tmp; - } - continue; - } - if (DataTableColumns.GROUP_MODIFY.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_group_modify = tmp; - } - continue; - } - if (DataTableColumns.GROUP_PRIVILEGED.equals(column)) { - if (tmp != null && !tmp.isEmpty()) { - v_group_privileged = tmp; - } - continue; - } - - try { - orderedDefns.find(column); - valueMap.put(column, tmp); - } catch (IllegalArgumentException ignored) { - // this is OK -- - // the csv contains an extra column - } + @Override + public boolean accept(File pathname) { + return pathname.isDirectory() && pathname.list().length != 0; } + }); + instancesHavingData.addAll(Arrays.asList(subDirectories)); + } - // if there are any conflicts or checkpoints on this row, we do not import - // this row change. Instead, silently ignore them. - UserTable table = supervisor.getDatabase() - .privilegedGetRowsWithId(appName, db, tableId, orderedDefns, v_id); - if (table.getNumberOfRows() > 1) { - WebLogger.getLogger(appName).w(TAG, - "importSeparable: tableId: " + tableId + " rowId: " + v_id + - " has checkpoints or conflicts -- IGNORED in .csv"); - continue; - } + // both files are read from config/assets/csv directory... + File assetsCsv = new File(ODKFileUtils.getAssetsCsvFolder(appName)); - SyncState syncState = null; - if (foundId && table.getNumberOfRows() == 1) { - String syncStateStr = table.getRowAtIndex(0).getDataByKey(DataTableColumns.SYNC_STATE); - if (syncStateStr == null) { - throw new IllegalStateException("Unexpected null syncState value"); - } - syncState = SyncState.valueOf(syncStateStr); - } + // read data table... + File file = new File(assetsCsv, + tableId + (fileQualifier != null && !fileQualifier.isEmpty() ? "." + fileQualifier : "") + + ".csv"); - /* - * Insertion will set the SYNC_STATE to new_row. - * - * If the table is sync'd to the server, this will cause one sync - * interaction with the server to confirm that the server also has - * this record. - * - * If a record with this same rowId already exists, if it is in an - * new_row sync state, we update it here. Otherwise, if there were any - * local changes, we leave the row unchanged. - */ - if (syncState != null) { - - ContentValues cv = new ContentValues(); - for (String column : valueMap.keySet()) { - if (column != null) { - cv.put(column, valueMap.get(column)); - } - } - - // The admin columns get added here - cv.put(DataTableColumns.FORM_ID, v_form_id); - cv.put(DataTableColumns.LOCALE, v_locale); - cv.put(DataTableColumns.SAVEPOINT_TYPE, v_savepoint_type); - cv.put(DataTableColumns.SAVEPOINT_TIMESTAMP, v_savepoint_timestamp); - cv.put(DataTableColumns.SAVEPOINT_CREATOR, v_savepoint_creator); - cv.put(DataTableColumns.ROW_ETAG, v_row_etag); - cv.put(DataTableColumns.DEFAULT_ACCESS, v_default_access); - cv.put(DataTableColumns.ROW_OWNER, v_row_owner); - cv.put(DataTableColumns.GROUP_READ_ONLY, v_group_read_only); - cv.put(DataTableColumns.GROUP_MODIFY, v_group_modify); - cv.put(DataTableColumns.GROUP_PRIVILEGED, v_group_privileged); - - cv.put(DataTableColumns.SYNC_STATE, SyncState.new_row.name()); - cv.putNull(DataTableColumns.CONFLICT_TYPE); - - if (v_id != null) { - cv.put(DataTableColumns.ID, v_id); - } - - if (syncState == SyncState.new_row) { - // delete the existing row then insert the new values for it - supervisor.getDatabase() - .privilegedDeleteRowWithId(appName, db, tableId, orderedDefns, v_id); - supervisor.getDatabase() - .privilegedInsertRowWithId(appName, db, tableId, orderedDefns, cv, v_id, true); - } - // otherwise, do NOT update the row. - // i.e., if the row has been sync'd with - // the server, then we don't revise it. + List rows = csvParser.parse(file); - } else { + for (int i = 0; i < rows.size(); i++) { + importListener.updateProgressDetail(i + 1, rows.size()); + + org.opendatakit.aggregate.odktables.rest.entity.Row r = rows.get(i); - ContentValues cv = new ContentValues(); - for (String column : valueMap.keySet()) { - if (column != null) { - cv.put(column, valueMap.get(column)); - } - } - - // The admin columns get added here - cv.put(DataTableColumns.FORM_ID, v_form_id); - cv.put(DataTableColumns.LOCALE, v_locale); - cv.put(DataTableColumns.SAVEPOINT_TYPE, v_savepoint_type); - cv.put(DataTableColumns.SAVEPOINT_TIMESTAMP, v_savepoint_timestamp); - cv.put(DataTableColumns.SAVEPOINT_CREATOR, v_savepoint_creator); - cv.put(DataTableColumns.ROW_ETAG, v_row_etag); - cv.put(DataTableColumns.DEFAULT_ACCESS, v_default_access); - cv.put(DataTableColumns.ROW_OWNER, v_row_owner); - cv.put(DataTableColumns.GROUP_READ_ONLY, v_group_read_only); - cv.put(DataTableColumns.GROUP_MODIFY, v_group_modify); - cv.put(DataTableColumns.GROUP_PRIVILEGED, v_group_privileged); - - cv.put(DataTableColumns.SYNC_STATE, SyncState.new_row.name()); - cv.putNull(DataTableColumns.CONFLICT_TYPE); - - if (v_id == null) { - v_id = LocalizationUtils.genUUID(); - } - - cv.put(DataTableColumns.ID, v_id); - - // imports assume super-user level powers. Treat these as if they were - // directed by the server during a sync. - supervisor.getDatabase() - .privilegedInsertRowWithId(appName, db, tableId, orderedDefns, cv, v_id, true); + UserTable table = supervisor.getDatabase() + .privilegedGetRowsWithId(appName, db, tableId, orderedDefns, r.getRowId()); + + // if there are any conflicts or checkpoints on this row, we do not import + // this row change. Instead, silently ignore them. + if (table.getNumberOfRows() > 1) { + logger.w(TAG, + "importSeparable: tableId: " + tableId + " rowId: " + r.getRowId() + + " has checkpoints or conflicts -- IGNORED in .csv"); + continue; + } + + /* + * Insertion will set the SYNC_STATE to new_row. + * + * If the table is sync'd to the server, this will cause one sync + * interaction with the server to confirm that the server also has + * this record. + * + * If a record with this same rowId already exists, if it is in an + * new_row sync state, we update it here. Otherwise, if there were any + * local changes, we leave the row unchanged. + */ + if (table.getNumberOfRows() == 1) { + String syncStateStr = table.getRowAtIndex(0).getDataByKey(DataTableColumns.SYNC_STATE); + if (syncStateStr == null) { + throw new IllegalStateException("Unexpected null syncState value"); } - /* - * Copy all attachment files into the destination row. - * The attachments are in instance-id-labeled sub-directories. - * Anything in the corresponding subdirectory should be - * referenced by the valuesMap above. If it isn't, don't worry about - * it. This is a simplification. - */ - File assetsInstanceFolder = new File( - ODKFileUtils.getAssetsCsvInstanceFolder(appName, tableId, v_id)); - if (instancesHavingData.contains(assetsInstanceFolder)) { - File tableInstanceFolder = new File( - ODKFileUtils.getInstanceFolder(appName, tableId, v_id)); - tableInstanceFolder.mkdirs(); - ODKFileUtils.copyDirectory(assetsInstanceFolder, tableInstanceFolder); - instancesHavingData.remove(assetsInstanceFolder); + SyncState syncState = SyncState.valueOf(syncStateStr); + if (syncState != SyncState.new_row) { + continue; } + } + + ContentValues cv = new ContentValues(); + for (DataKeyValue dataKeyValue : r.getValues()) { + if (columnNames.contains(dataKeyValue.column)) { + cv.put(dataKeyValue.column, dataKeyValue.value); + } } - cr.close(); - return true; - } catch (IOException ignored) { - return false; - } finally { - try { - input.close(); - } catch (IOException ignored) { - // we never even opened the file + + // The admin columns get added here + cv.put(DataTableColumns.ID, r.getRowId()); + cv.put(DataTableColumns.FORM_ID, r.getFormId()); + cv.put(DataTableColumns.LOCALE, r.getLocale()); + cv.put(DataTableColumns.SAVEPOINT_TYPE, r.getSavepointType()); + cv.put(DataTableColumns.SAVEPOINT_TIMESTAMP, r.getSavepointTimestamp()); + cv.put(DataTableColumns.SAVEPOINT_CREATOR, r.getSavepointCreator()); + cv.put(DataTableColumns.ROW_ETAG, r.getRowETag()); + cv.put(DataTableColumns.DEFAULT_ACCESS, r.getRowFilterScope().getDefaultAccess().name()); + cv.put(DataTableColumns.ROW_OWNER, r.getRowFilterScope().getRowOwner()); + cv.put(DataTableColumns.GROUP_READ_ONLY, r.getRowFilterScope().getGroupReadOnly()); + cv.put(DataTableColumns.GROUP_MODIFY, r.getRowFilterScope().getGroupModify()); + cv.put(DataTableColumns.GROUP_PRIVILEGED, r.getRowFilterScope().getGroupPrivileged()); + + cv.put(DataTableColumns.SYNC_STATE, SyncState.new_row.name()); + cv.putNull(DataTableColumns.CONFLICT_TYPE); + + // delete the existing row then insert the new values for it + supervisor.getDatabase() + .privilegedDeleteRowWithId(appName, db, tableId, orderedDefns, r.getRowId()); + supervisor.getDatabase() + .privilegedInsertRowWithId(appName, db, tableId, orderedDefns, cv, r.getRowId(), true); + + /* + * Copy all attachment files into the destination row. + * The attachments are in instance-id-labeled sub-directories. + * Anything in the corresponding subdirectory should be + * referenced by the valuesMap above. If it isn't, don't worry about + * it. This is a simplification. + */ + File assetsInstanceFolder = new File( + ODKFileUtils.getAssetsCsvInstanceFolder(appName, tableId, r.getRowId())); + if (instancesHavingData.contains(assetsInstanceFolder)) { + File tableInstanceFolder = new File( + ODKFileUtils.getInstanceFolder(appName, tableId, r.getRowId())); + tableInstanceFolder.mkdirs(); + ODKFileUtils.copyDirectory(assetsInstanceFolder, tableInstanceFolder); + instancesHavingData.remove(assetsInstanceFolder); } } - } catch (IOException ignored) { + + return true; + } catch (IOException | IllegalArgumentException ignored) { + logger.printStackTrace(ignored); return false; } finally { if (db != null) { @@ -748,5 +549,4 @@ public boolean accept(File pathname) { } } } - } diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/Parser.java b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/Parser.java new file mode 100644 index 00000000..c5fe2fad --- /dev/null +++ b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/Parser.java @@ -0,0 +1,209 @@ +package org.opendatakit.builder.csvparser; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.MappingIterator; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.dataformat.csv.CsvMapper; +import com.fasterxml.jackson.dataformat.csv.CsvParser; +import com.fasterxml.jackson.dataformat.csv.CsvSchema; + +import org.apache.commons.lang3.time.DateUtils; +import org.opendatakit.aggregate.odktables.rest.SavepointTypeManipulator; +import org.opendatakit.aggregate.odktables.rest.TableConstants; +import org.opendatakit.aggregate.odktables.rest.entity.Row; +import org.opendatakit.aggregate.odktables.rest.entity.RowFilterScope; +import org.opendatakit.builder.csvparser.jackson.CsvRow; +import org.opendatakit.builder.csvparser.jackson.RowCsvMixin; +import org.opendatakit.builder.csvparser.jackson.RowFilterScopeCsvMixin; +import org.opendatakit.database.utilities.CursorUtils; +import org.opendatakit.utilities.LocalizationUtils; + +import java.io.File; +import java.io.IOException; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.List; + +public class Parser { + private static final String[] TIMESTAMP_PATTERNS = { + "yyyy-MM-dd'T'HH:mm:ss.SSS", // ISO 8601 w/o TZ + "yyyy-MM-dd'T'HH:mm:ss.SSSZ", // ISO 8601 UTC + "yyyy-MM-dd'T'HH:mm:ss.SSSXXX", // ISO 8601 w/ TZ + "EEE, d MMM yyyy HH:mm:ss Z", // RFC1123 + "MMMM dd, yyyy", + "dd/MM/yyyy", + "yyyy/MM/dd", + "dd-MM-yyyy", + "yyyy-MM-dd" + }; + + private final ObjectReader objectReader; + + public Parser() { + CsvMapper csvMapper = (CsvMapper) new CsvMapper() + .enable(CsvParser.Feature.FAIL_ON_MISSING_COLUMNS) + .enable(CsvParser.Feature.SKIP_EMPTY_LINES) + .enable(DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL) + .enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT) + .addMixIn(CsvRow.class, RowCsvMixin.class) + .addMixIn(RowFilterScope.class, RowFilterScopeCsvMixin.class); + + CsvSchema csvSchema = CsvSchema.builder() + .addColumnsFrom(csvMapper.schemaFor(Row.class)) + .build() + .withHeader() + .withColumnReordering(true); + + objectReader = csvMapper + .readerFor(CsvRow.class) + .with(csvSchema);; + } + + public List parse(File file) throws IOException { + MappingIterator iterator = null; + + try { + iterator = objectReader.readValues(file); + + List rows = new ArrayList<>(); + while (iterator.hasNext()) { + CsvRow nextRow = iterator.next(); + nextRow.setValues(Row.convertFromMap(nextRow.getColumns())); + nextRow = populateWithDefault(nextRow); + + rows.add(nextRow); + } + + return rows; + } finally { + if (iterator != null) { + iterator.close(); + } + } + } + + public List parseAsCsvRow(File file) throws IOException { + MappingIterator iterator = null; + + try { + iterator = objectReader.readValues(file); + List rows = iterator.readAll(); + + for (CsvRow row : rows) { + populateWithDefault(row); + } + + return rows; + } finally { + if (iterator != null) { + iterator.close(); + } + } + } + + private T populateWithDefault(T row) { + if (row.getRowId() == null || row.getRowId().isEmpty()) { + row.setRowId(LocalizationUtils.genUUID()); + } + + if (row.getRowETag() == null || row.getRowETag().isEmpty()) { + row.setRowETag(null); + } + + if (row.getSavepointCreator() == null || row.getSavepointCreator().isEmpty()) { + row.setSavepointCreator(CursorUtils.DEFAULT_CREATOR); + } + + if (row.getFormId() == null || row.getFormId().isEmpty()) { + row.setFormId(null); + } + + if (row.getLocale() == null || row.getLocale().isEmpty()) { + row.setLocale(CursorUtils.DEFAULT_LOCALE); + } + + // savepointType cannot be null, empty or a value other than complete or incomplete + if (row.getSavepointType() == null || row.getSavepointType().isEmpty() || + !(row.getSavepointType().equals(SavepointTypeManipulator.complete()) || + row.getSavepointType().equals(SavepointTypeManipulator.incomplete()))) { + row.setSavepointType(SavepointTypeManipulator.complete()); + } + + row.setRowFilterScope(populateRowFilterScopeWithDefault(row.getRowFilterScope())); + row.setSavepointTimestamp(convertInvalidTimestamp(row.getSavepointTimestamp())); + + return row; + } + + private RowFilterScope populateRowFilterScopeWithDefault(RowFilterScope scope) { + if (scope.getDefaultAccess() == null) { + scope.setDefaultAccess(RowFilterScope.EMPTY_ROW_FILTER.getDefaultAccess()); + } + + if (scope.getRowOwner() == null || scope.getRowOwner().isEmpty()) { + scope.setRowOwner(RowFilterScope.EMPTY_ROW_FILTER.getRowOwner()); + } + + if (scope.getGroupReadOnly() == null || scope.getGroupReadOnly().isEmpty()) { + scope.setGroupReadOnly(RowFilterScope.EMPTY_ROW_FILTER.getGroupReadOnly()); + } + + if (scope.getGroupModify() == null || scope.getGroupModify().isEmpty()) { + scope.setGroupModify(RowFilterScope.EMPTY_ROW_FILTER.getGroupModify()); + } + + if (scope.getGroupPrivileged() == null || scope.getGroupPrivileged().isEmpty()) { + scope.setGroupPrivileged(RowFilterScope.EMPTY_ROW_FILTER.getGroupPrivileged()); + } + + return scope; + } + + private String convertInvalidTimestamp(String timestamp) { + // timestamp is null/empty, use the current time + if (timestamp == null || timestamp.isEmpty()) { + return TableConstants.nanoSecondsFromMillis( + System.currentTimeMillis(), TableConstants.TIMESTAMP_LOCALE); + } + + // timestamp is valid + try { + TableConstants.milliSecondsFromNanos(timestamp, TableConstants.TIMESTAMP_LOCALE); + return timestamp; + } catch (IllegalArgumentException e) { + // ignored + // IllegalArgumentException is thrown when the pattern doesn't match + } + + // try our timestamp format with system default locale + try { + Long timeLong = TableConstants.milliSecondsFromNanos(timestamp, null); + return TableConstants.nanoSecondsFromMillis(timeLong, TableConstants.TIMESTAMP_LOCALE); + } catch (IllegalArgumentException e) { + // ignored + } + + // try with DateUtils + try { + long timeLong = DateUtils + .parseDate(timestamp, TableConstants.TIMESTAMP_LOCALE, TIMESTAMP_PATTERNS) + .getTime(); + return TableConstants.nanoSecondsFromMillis(timeLong, TableConstants.TIMESTAMP_LOCALE); + } catch (ParseException e) { + // ignored + } + + // try with DateUtils and system default locale + try { + long timeLong = DateUtils + .parseDate(timestamp, TIMESTAMP_PATTERNS) + .getTime(); + return TableConstants.nanoSecondsFromMillis(timeLong, TableConstants.TIMESTAMP_LOCALE); + } catch (ParseException e) { + // ignored + } + + throw new IllegalArgumentException("Unable to parse timestamp"); + } +} diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/CsvRow.java b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/CsvRow.java new file mode 100644 index 00000000..9a758fc5 --- /dev/null +++ b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/CsvRow.java @@ -0,0 +1,43 @@ +package org.opendatakit.builder.csvparser.jackson; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonUnwrapped; + +import org.apache.commons.lang3.time.DateUtils; +import org.opendatakit.aggregate.odktables.rest.SavepointTypeManipulator; +import org.opendatakit.aggregate.odktables.rest.TableConstants; +import org.opendatakit.aggregate.odktables.rest.entity.Row; +import org.opendatakit.aggregate.odktables.rest.entity.RowFilterScope; +import org.opendatakit.database.utilities.CursorUtils; +import org.opendatakit.utilities.LocalizationUtils; + +import java.util.HashMap; +import java.util.Map; + +public class CsvRow extends Row { + private final Map columns; + + @JsonAnyGetter + @JsonUnwrapped + public Map getColumns() { + return columns; + } + + @JsonAnySetter + public void anySetter(String key, String value) { + // set empty strings to null, + // because this is how org.opendatakit.aggregate.odktables.rest.RFC4180CsvReader does it + if (value.isEmpty()) { + value = null; + } + + if (!key.startsWith("_")) { + columns.put(key, value); + } + } + + public CsvRow() { + this.columns = new HashMap<>(); + } +} diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowCsvMixin.java b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowCsvMixin.java new file mode 100644 index 00000000..33a68627 --- /dev/null +++ b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowCsvMixin.java @@ -0,0 +1,109 @@ +package org.opendatakit.builder.csvparser.jackson; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonUnwrapped; + +import org.opendatakit.aggregate.odktables.rest.TableConstants; +import org.opendatakit.aggregate.odktables.rest.entity.Row; +import org.opendatakit.aggregate.odktables.rest.entity.RowFilterScope; + +@JsonIgnoreProperties(ignoreUnknown = true) +public abstract class RowCsvMixin extends Row { + @Override + @JsonProperty(TableConstants.ID) + public abstract String getRowId(); + + @Override + @JsonProperty(TableConstants.ROW_ETAG) + public abstract String getRowETag(); + + @Override + @JsonIgnore + public abstract String getDataETagAtModification(); + + @Override + @JsonIgnore + public abstract boolean isDeleted(); + + @Override + @JsonIgnore + public abstract String getCreateUser(); + + @Override + @JsonIgnore + public abstract String getLastUpdateUser(); + + @Override + @JsonUnwrapped + public abstract RowFilterScope getRowFilterScope(); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_CREATOR) + public abstract String getSavepointCreator(); + + @Override + @JsonProperty(TableConstants.FORM_ID) + public abstract String getFormId(); + + @Override + @JsonProperty(TableConstants.LOCALE) + public abstract String getLocale(); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_TYPE) + public abstract String getSavepointType(); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_TIMESTAMP) + public abstract String getSavepointTimestamp(); + + @Override + @JsonProperty(TableConstants.ID) + public abstract void setRowId(String rowId); + + @Override + @JsonProperty(TableConstants.ROW_ETAG) + public abstract void setRowETag(String rowETag); + + @Override + @JsonIgnore + public abstract void setDataETagAtModification(String dataETagAtModification); + + @Override + @JsonIgnore + public abstract void setDeleted(boolean deleted); + + @Override + @JsonIgnore + public abstract void setCreateUser(String createUser); + + @Override + @JsonIgnore + public abstract void setLastUpdateUser(String lastUpdateUser); + + @Override + @JsonUnwrapped + public abstract void setRowFilterScope(RowFilterScope filterScope); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_CREATOR) + public abstract void setSavepointCreator(String savepointCreator); + + @Override + @JsonProperty(TableConstants.FORM_ID) + public abstract void setFormId(String formId); + + @Override + @JsonProperty(TableConstants.LOCALE) + public abstract void setLocale(String locale); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_TYPE) + public abstract void setSavepointType(String savepointType); + + @Override + @JsonProperty(TableConstants.SAVEPOINT_TIMESTAMP) + public abstract void setSavepointTimestamp(String savepointTimestamp); +} diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowFilterScopeCsvMixin.java b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowFilterScopeCsvMixin.java new file mode 100644 index 00000000..66485719 --- /dev/null +++ b/androidlibrary_lib/src/main/java/org/opendatakit/builder/csvparser/jackson/RowFilterScopeCsvMixin.java @@ -0,0 +1,52 @@ +package org.opendatakit.builder.csvparser.jackson; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import org.opendatakit.aggregate.odktables.rest.TableConstants; +import org.opendatakit.aggregate.odktables.rest.entity.RowFilterScope; + +public abstract class RowFilterScopeCsvMixin extends RowFilterScope { + private RowFilterScopeCsvMixin(Access access, String rowOwner, String groupReadOnly, String groupModify, String groupPrivileged) { + super(access, rowOwner, groupReadOnly, groupModify, groupPrivileged); + } + + @Override + @JsonProperty(TableConstants.DEFAULT_ACCESS) + public abstract Access getDefaultAccess(); + + @Override + @JsonProperty(TableConstants.DEFAULT_ACCESS) + public abstract void setDefaultAccess(Access access); + + @Override + @JsonProperty(TableConstants.ROW_OWNER) + public abstract String getRowOwner(); + + @Override + @JsonProperty(TableConstants.ROW_OWNER) + public abstract void setRowOwner(String rowOwner); + + @Override + @JsonProperty(TableConstants.GROUP_READ_ONLY) + public abstract String getGroupReadOnly(); + + @Override + @JsonProperty(TableConstants.GROUP_READ_ONLY) + public abstract void setGroupReadOnly(String groupReadOnly); + + @Override + @JsonProperty(TableConstants.GROUP_MODIFY) + public abstract String getGroupModify(); + + @Override + @JsonProperty(TableConstants.GROUP_MODIFY) + public abstract void setGroupModify(String groupModify); + + @Override + @JsonProperty(TableConstants.GROUP_PRIVILEGED) + public abstract String getGroupPrivileged(); + + @Override + @JsonProperty(TableConstants.GROUP_PRIVILEGED) + public abstract void setGroupPrivileged(String groupPrivileged); +} diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/logging/WebLoggerImpl.java b/androidlibrary_lib/src/main/java/org/opendatakit/logging/WebLoggerImpl.java index 358dcc0f..90bc050c 100644 --- a/androidlibrary_lib/src/main/java/org/opendatakit/logging/WebLoggerImpl.java +++ b/androidlibrary_lib/src/main/java/org/opendatakit/logging/WebLoggerImpl.java @@ -18,6 +18,7 @@ import android.util.Log; import org.apache.commons.lang3.CharEncoding; import org.joda.time.DateTime; +import org.opendatakit.androidlibrary.BuildConfig; import org.opendatakit.utilities.ODKFileUtils; import java.io.BufferedOutputStream; @@ -351,7 +352,9 @@ public void s(String t, String logMsg) { } public void printStackTrace(Throwable e) { - //e.printStackTrace(); + if (BuildConfig.DEBUG) { + e.printStackTrace(); + } ByteArrayOutputStream ba = new ByteArrayOutputStream(); PrintStream w; try { diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/properties/PropertiesSingleton.java b/androidlibrary_lib/src/main/java/org/opendatakit/properties/PropertiesSingleton.java index 2301fb2b..346ad86e 100644 --- a/androidlibrary_lib/src/main/java/org/opendatakit/properties/PropertiesSingleton.java +++ b/androidlibrary_lib/src/main/java/org/opendatakit/properties/PropertiesSingleton.java @@ -383,15 +383,21 @@ public boolean shouldRunCommonInitializationTask() { public void clearRunInitializationTask(String toolName) { // this is stored in the device properties readPropertiesIfModified(); - mDeviceProps.setProperty(toolInitializationPropertyName(toolName), - TableConstants.nanoSecondsFromMillis(System.currentTimeMillis())); + mDeviceProps.setProperty( + toolInitializationPropertyName(toolName), + TableConstants.nanoSecondsFromMillis( + System.currentTimeMillis(), TableConstants.TIMESTAMP_LOCALE) + ); writeProperties(false, true, false); } public void clearRunCommonInitializationTask() { readPropertiesIfModified(); - mDeviceProps.setProperty(CommonToolProperties.KEY_COMMON_INITIALIZATION, - TableConstants.nanoSecondsFromMillis(System.currentTimeMillis())); + mDeviceProps.setProperty( + CommonToolProperties.KEY_COMMON_INITIALIZATION, + TableConstants.nanoSecondsFromMillis( + System.currentTimeMillis(), TableConstants.TIMESTAMP_LOCALE) + ); writeProperties(false, true, false); } diff --git a/androidlibrary_lib/src/main/java/org/opendatakit/utilities/DateUtils.java b/androidlibrary_lib/src/main/java/org/opendatakit/utilities/DateUtils.java index 07dde4ef..551fce4f 100644 --- a/androidlibrary_lib/src/main/java/org/opendatakit/utilities/DateUtils.java +++ b/androidlibrary_lib/src/main/java/org/opendatakit/utilities/DateUtils.java @@ -211,6 +211,6 @@ private int tryParseDuration(String input) { } public String formatDateTimeForDb(DateTime dt) { - return TableConstants.nanoSecondsFromMillis(dt.getMillis()); + return TableConstants.nanoSecondsFromMillis(dt.getMillis(), TableConstants.TIMESTAMP_LOCALE); } }