From 44de7eecbe6878812fc40420005bf2b93b9f652c Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 2 Jul 2025 21:07:35 -0700 Subject: [PATCH 1/9] Put ETL inside transaction --- .../sivstudies/etl/SubjectScopedSelect.java | 180 +++++++++--------- 1 file changed, 95 insertions(+), 85 deletions(-) diff --git a/SivStudies/src/org/labkey/sivstudies/etl/SubjectScopedSelect.java b/SivStudies/src/org/labkey/sivstudies/etl/SubjectScopedSelect.java index a36369ce..6c750b8b 100644 --- a/SivStudies/src/org/labkey/sivstudies/etl/SubjectScopedSelect.java +++ b/SivStudies/src/org/labkey/sivstudies/etl/SubjectScopedSelect.java @@ -9,6 +9,7 @@ import org.labkey.api.data.CompareType; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.DbScope; import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.TableInfo; import org.labkey.api.data.TableSelector; @@ -100,7 +101,7 @@ public boolean isRequired() } } - final int BATCH_SIZE = 100; + final int BATCH_SIZE = 250; private MODE getMode() { @@ -136,127 +137,136 @@ private void checkCancelled(PipelineJob job) private void processBatch(List subjects, Logger log, PipelineJob job) { log.info("processing batch with " + subjects.size() + " subjects"); - TableInfo destinationTable = getDataDestinationTable(); + try (DbScope.Transaction t = DbScope.getLabKeyScope().ensureTransaction()) + { + TableInfo destinationTable = getDataDestinationTable(); - QueryUpdateService qus = destinationTable.getUpdateService(); - qus.setBulkLoad(true); + QueryUpdateService qus = destinationTable.getUpdateService(); + qus.setBulkLoad(true); - try - { - if (getMode() == MODE.TRUNCATE) + try { - // Find / Delete existing values: - Set keyFields = destinationTable.getColumns().stream().filter(ColumnInfo::isKeyField).collect(Collectors.toSet()); - final SimpleFilter subjectFilter = new SimpleFilter(FieldKey.fromString(_settings.get(Settings.targetSubjectColumn.name())), subjects, CompareType.IN); - if (_settings.get(Settings.targetAdditionalFilters.name()) != null) + if (getMode() == MODE.TRUNCATE) { - List additionalFilters = parseAdditionalFilters(_settings.get(Settings.targetAdditionalFilters.name())); - additionalFilters.forEach(subjectFilter::addCondition); - } + // Find / Delete existing values: + Set keyFields = destinationTable.getColumns().stream().filter(ColumnInfo::isKeyField).collect(Collectors.toSet()); + final SimpleFilter subjectFilter = new SimpleFilter(FieldKey.fromString(_settings.get(Settings.targetSubjectColumn.name())), subjects, CompareType.IN); + if (_settings.get(Settings.targetAdditionalFilters.name()) != null) + { + List additionalFilters = parseAdditionalFilters(_settings.get(Settings.targetAdditionalFilters.name())); + additionalFilters.forEach(subjectFilter::addCondition); + } - if (destinationTable.getColumn(FieldKey.fromString(_settings.get(Settings.targetSubjectColumn.name()))) == null) - { - throw new IllegalStateException("Unknown column on table " + destinationTable.getName() + ": " + _settings.get(Settings.targetSubjectColumn.name())); - } + if (destinationTable.getColumn(FieldKey.fromString(_settings.get(Settings.targetSubjectColumn.name()))) == null) + { + throw new IllegalStateException("Unknown column on table " + destinationTable.getName() + ": " + _settings.get(Settings.targetSubjectColumn.name())); + } - List> existingRows = new ArrayList<>(new TableSelector(destinationTable, keyFields, subjectFilter, null).getMapCollection()); - if (!existingRows.isEmpty()) - { - List>> batches = Lists.partition(existingRows, 5000); - log.info("deleting " + existingRows.size() + " rows in " + batches.size() + " batches"); - int i = 0; - for (List> batch : batches) + List> existingRows = new ArrayList<>(new TableSelector(destinationTable, keyFields, subjectFilter, null).getMapCollection()); + if (!existingRows.isEmpty()) { - i++; - log.info("batch " + i); - checkCancelled(job); + List>> batches = Lists.partition(existingRows, 5000); + log.info("deleting " + existingRows.size() + " rows in " + batches.size() + " batches"); + int i = 0; + for (List> batch : batches) + { + i++; + log.info("batch " + i); + checkCancelled(job); - qus.deleteRows(_containerUser.getUser(), _containerUser.getContainer(), batch, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); + qus.deleteRows(_containerUser.getUser(), _containerUser.getContainer(), batch, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); + t.commitAndKeepConnection(); + } + } + else + { + log.info("No rows to delete for this subject batch"); } } else { - log.info("No rows to delete for this subject batch"); + log.info("Using " + getMode().name() + " mode, source records will not be deleted"); } - } - else - { - log.info("Using " + getMode().name() + " mode, source records will not be deleted"); - } - // Query data and import - List> toImportOrUpdate = getRowsToImport(subjects, log); - if (!toImportOrUpdate.isEmpty()) - { - if (getMode() == MODE.TRUNCATE) + // Query data and import + List> toImportOrUpdate = getRowsToImport(subjects, log); + if (!toImportOrUpdate.isEmpty()) { - List>> batches = Lists.partition(toImportOrUpdate, 5000); - log.info("inserting " + toImportOrUpdate.size() + " rows in " + batches.size() + " batches"); - - int i = 0; - for (List> batch : batches) + if (getMode() == MODE.TRUNCATE) { - i++; - log.info("batch " + i); - checkCancelled(job); + List>> batches = Lists.partition(toImportOrUpdate, 5000); + log.info("inserting " + toImportOrUpdate.size() + " rows in " + batches.size() + " batches"); - BatchValidationException bve = new BatchValidationException(); - qus.insertRows(_containerUser.getUser(), _containerUser.getContainer(), batch, bve, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); - if (bve.hasErrors()) + int i = 0; + for (List> batch : batches) { - throw bve; + i++; + log.info("batch " + i); + checkCancelled(job); + + BatchValidationException bve = new BatchValidationException(); + qus.insertRows(_containerUser.getUser(), _containerUser.getContainer(), batch, bve, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); + if (bve.hasErrors()) + { + throw bve; + } + t.commitAndKeepConnection(); } } - } - else if (getMode() == MODE.UPDATE_ONLY) - { - List>> batches = Lists.partition(toImportOrUpdate, 5000); - log.info("updating " + toImportOrUpdate.size() + " rows in " + batches.size() + " batches"); - - int i = 0; - for (List> batch : batches) + else if (getMode() == MODE.UPDATE_ONLY) { + List>> batches = Lists.partition(toImportOrUpdate, 5000); + log.info("updating " + toImportOrUpdate.size() + " rows in " + batches.size() + " batches"); - i++; - log.info("batch " + i); - checkCancelled(job); + int i = 0; + for (List> batch : batches) + { - BatchValidationException bve = new BatchValidationException(); + i++; + log.info("batch " + i); + checkCancelled(job); - Collection keyFields = destinationTable.getPkColumnNames(); - List> keys = batch.stream().map(x -> { - Map map = new HashMap<>(); - for (String keyField : keyFields) - { - if (x.get(keyField) != null) + BatchValidationException bve = new BatchValidationException(); + + Collection keyFields = destinationTable.getPkColumnNames(); + List> keys = batch.stream().map(x -> { + Map map = new HashMap<>(); + for (String keyField : keyFields) { - map.put(keyField, x.get(keyField)); + if (x.get(keyField) != null) + { + map.put(keyField, x.get(keyField)); + } } - } - return map; - }).toList(); + return map; + }).toList(); - qus.updateRows(_containerUser.getUser(), _containerUser.getContainer(), batch, keys, bve, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); - if (bve.hasErrors()) - { - throw bve; + qus.updateRows(_containerUser.getUser(), _containerUser.getContainer(), batch, keys, bve, new HashMap<>(Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, NONE, QueryUpdateService.ConfigParameters.BulkLoad, true)), null); + if (bve.hasErrors()) + { + throw bve; + } + t.commitAndKeepConnection(); } } + else + { + throw new IllegalStateException("Unknown mode: " + getMode()); + } } else { - throw new IllegalStateException("Unknown mode: " + getMode()); + log.info("No rows to import/update for this subject batch"); } } - else + catch (SQLException | InvalidKeyException | BatchValidationException | QueryUpdateServiceException | + DuplicateKeyException e) { - log.info("No rows to import/update for this subject batch"); + throw new IllegalStateException("Error Importing/Updating Rows", e); } - } - catch (SQLException | InvalidKeyException | BatchValidationException | QueryUpdateServiceException | DuplicateKeyException e) - { - throw new IllegalStateException("Error Importing/Updating Rows", e); + + t.commit(); } } From 3a1fbe74a700cfc759d33cd215745bf320166dd9 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 3 Jul 2025 05:30:27 -0700 Subject: [PATCH 2/9] Add calculated columns --- tcrdb/src/org/labkey/tcrdb/TCRdbModule.java | 2 + .../labkey/tcrdb/TCRdbTableCustomizer.java | 38 +++++++++++++++++++ .../src/org/labkey/tcrdb/TCRdbUserSchema.java | 6 ++- 3 files changed, 45 insertions(+), 1 deletion(-) diff --git a/tcrdb/src/org/labkey/tcrdb/TCRdbModule.java b/tcrdb/src/org/labkey/tcrdb/TCRdbModule.java index d452b218..18e7b327 100644 --- a/tcrdb/src/org/labkey/tcrdb/TCRdbModule.java +++ b/tcrdb/src/org/labkey/tcrdb/TCRdbModule.java @@ -72,6 +72,8 @@ protected void doStartupAfterSpringConfig(ModuleContext moduleContext) LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.SEQUENCE_ANALYSIS, "sequence_readsets"); LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.SEQUENCE_ANALYSIS, "sequence_analyses"); LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.NAME, TCRdbSchema.TABLE_CLONES); + LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.NAME, TCRdbSchema.TABLE_CLONE_RESPONSES); + LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.NAME, TCRdbSchema.TABLE_STIM_EXPERIMENTS); LaboratoryService.get().registerTableCustomizer(this, TCRdbTableCustomizer.class, TCRdbSchema.SINGLE_CELL, TCRdbSchema.TABLE_CDNAS); LDKService.get().registerQueryButton(new ChangeStatusButton(), TCRdbSchema.SINGLE_CELL, "samples"); diff --git a/tcrdb/src/org/labkey/tcrdb/TCRdbTableCustomizer.java b/tcrdb/src/org/labkey/tcrdb/TCRdbTableCustomizer.java index 1a9f0094..44226a3f 100644 --- a/tcrdb/src/org/labkey/tcrdb/TCRdbTableCustomizer.java +++ b/tcrdb/src/org/labkey/tcrdb/TCRdbTableCustomizer.java @@ -12,6 +12,7 @@ import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.Table; import org.labkey.api.data.TableInfo; +import org.labkey.api.data.WrappedColumn; import org.labkey.api.exp.api.ExpProtocol; import org.labkey.api.laboratory.LaboratoryService; import org.labkey.api.ldk.LDKService; @@ -19,6 +20,7 @@ import org.labkey.api.query.DetailsURL; import org.labkey.api.query.ExprColumn; import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryForeignKey; import org.labkey.api.query.QueryService; import java.util.Arrays; @@ -48,6 +50,14 @@ else if (matches(ti, TCRdbSchema.NAME, TCRdbSchema.TABLE_CLONES)) { customizeClones(ti); } + else if (matches(ti, TCRdbSchema.NAME, TCRdbSchema.TABLE_CLONE_RESPONSES)) + { + customizeCloneResponses(ti); + } + else if (matches(ti, TCRdbSchema.NAME, TCRdbSchema.TABLE_STIM_EXPERIMENTS)) + { + customizeStims(ti); + } else if (ti instanceof AssayResultTable) { customizeAssayData(ti); @@ -223,6 +233,34 @@ private void addClonotypeForLocusCol(AbstractTableInfo ti, SQLFragment selectSql ti.addColumn(newCol); } + private void customizeCloneResponses(AbstractTableInfo ti) + { + if (ti.getColumn("stimId") == null) + { + WrappedColumn col = new WrappedColumn(ti.getColumn("cDNA_ID"), "stimId"); + col.setReadOnly(true); + col.setUserEditable(false); + col.setLabel("Stim Experiment"); + col.setFk(new QueryForeignKey(ti.getUserSchema(), null, ti.getUserSchema(), null, TCRdbSchema.TABLE_STIM_EXPERIMENTS, "cdna_id", "cdna_id")); + + ti.addColumn(col); + } + } + + private void customizeStims(AbstractTableInfo ti) + { + if (ti.getColumn("controlStim") == null) + { + WrappedColumn col = new WrappedColumn(ti.getColumn("controlStimId"), "controlStim"); + col.setReadOnly(true); + col.setUserEditable(false); + col.setLabel("Control Stim Info"); + col.setFk(new QueryForeignKey(ti.getUserSchema(), null, ti.getUserSchema(), null, TCRdbSchema.TABLE_STIM_EXPERIMENTS, "cdna_id", "cdna_id")); + + ti.addColumn(col); + } + } + private void customizeClones(AbstractTableInfo ti) { LDKService.get().applyNaturalSort(ti, "cloneName"); diff --git a/tcrdb/src/org/labkey/tcrdb/TCRdbUserSchema.java b/tcrdb/src/org/labkey/tcrdb/TCRdbUserSchema.java index 188fcc11..505bd957 100644 --- a/tcrdb/src/org/labkey/tcrdb/TCRdbUserSchema.java +++ b/tcrdb/src/org/labkey/tcrdb/TCRdbUserSchema.java @@ -6,6 +6,7 @@ import org.labkey.api.data.ContainerFilter; import org.labkey.api.data.DbSchema; import org.labkey.api.data.TableInfo; +import org.labkey.api.ldk.table.ContainerScopedTable; import org.labkey.api.ldk.table.SharedDataTable; import org.labkey.api.module.Module; import org.labkey.api.query.DefaultSchema; @@ -43,9 +44,12 @@ protected TableInfo createWrappedTable(String name, @NotNull TableInfo sourceTab { if (TCRdbSchema.TABLE_MIXCR_LIBRARIES.equalsIgnoreCase(name)) { - // TODO: assert cf is null or not default? return new SharedDataTable<>(this, sourceTable).init(); } + else if (TCRdbSchema.TABLE_STIM_EXPERIMENTS.equalsIgnoreCase(name)) + { + return new ContainerScopedTable<>(this, sourceTable, null, "cdna_id").init(); + } return super.createWrappedTable(name, sourceTable, cf); } From 91be6e1e0c5a55c8a48a529aca3f8476d268c5dc Mon Sep 17 00:00:00 2001 From: bbimber Date: Sat, 5 Jul 2025 09:02:38 -0700 Subject: [PATCH 3/9] Bugfix cron string --- .../org/labkey/primeseq/notification/DiskUsageNotification.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primeseq/src/org/labkey/primeseq/notification/DiskUsageNotification.java b/primeseq/src/org/labkey/primeseq/notification/DiskUsageNotification.java index 799965cf..4ea8e2b9 100644 --- a/primeseq/src/org/labkey/primeseq/notification/DiskUsageNotification.java +++ b/primeseq/src/org/labkey/primeseq/notification/DiskUsageNotification.java @@ -73,7 +73,7 @@ public DateFormat getDateTimeFormat(Container c) @Override public String getCronString() { - return "0 8 * * 1 ?"; + return "0 0 8 ? * MON"; } @Override From 68456502dca81f06653bafa0dbaeec3c9abf7f4f Mon Sep 17 00:00:00 2001 From: bbimber Date: Mon, 7 Jul 2025 13:46:29 -0700 Subject: [PATCH 4/9] Add SIV calculated fields --- .../study/demographics/Expanded.qview.xml | 2 ++ .../demographicsChallengeAndArt.query.xml | 22 +++++++++++++++++++ .../study/demographicsChallengeAndArt.sql | 13 +++++++++++ .../resources/views/participantView.html | 7 ++++++ .../query/SivStudiesCustomizer.java | 7 ++++++ 5 files changed, 51 insertions(+) create mode 100644 SivStudies/resources/queries/study/demographicsChallengeAndArt.query.xml create mode 100644 SivStudies/resources/queries/study/demographicsChallengeAndArt.sql create mode 100644 SivStudies/resources/views/participantView.html diff --git a/SivStudies/resources/queries/study/demographics/Expanded.qview.xml b/SivStudies/resources/queries/study/demographics/Expanded.qview.xml index 6cfb780d..8d3c3e73 100644 --- a/SivStudies/resources/queries/study/demographics/Expanded.qview.xml +++ b/SivStudies/resources/queries/study/demographics/Expanded.qview.xml @@ -9,6 +9,8 @@ + + diff --git a/SivStudies/resources/queries/study/demographicsChallengeAndArt.query.xml b/SivStudies/resources/queries/study/demographicsChallengeAndArt.query.xml new file mode 100644 index 00000000..8e3ec535 --- /dev/null +++ b/SivStudies/resources/queries/study/demographicsChallengeAndArt.query.xml @@ -0,0 +1,22 @@ + + + + + SIV/ART Summary + + + true + true + + + SIV Infection + + + ART + + + allInfections +
+
+
+
diff --git a/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql b/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql new file mode 100644 index 00000000..63c9f2f2 --- /dev/null +++ b/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql @@ -0,0 +1,13 @@ +SELECT + t.Id, + group_concat(DISTINCT CASE + WHEN t.category = 'SIV Infection' THEN (cast(month(t.date) as varchar) || '-' || cast(dayofmonth(t.date) as varchar) || '-' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') + ELSE NULL + END, char(10)) as allInfections, + group_concat(DISTINCT CASE + WHEN t.category = 'ART' THEN (cast(month(t.date) as varchar) || '-' || cast(dayofmonth(t.date) as varchar) || '-' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') + ELSE NULL + END, char(10)) as allART, + +FROM study.treatments t +GROUP BY t.Id \ No newline at end of file diff --git a/SivStudies/resources/views/participantView.html b/SivStudies/resources/views/participantView.html new file mode 100644 index 00000000..64c6df39 --- /dev/null +++ b/SivStudies/resources/views/participantView.html @@ -0,0 +1,7 @@ + \ No newline at end of file diff --git a/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java b/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java index 48b615b8..ebaeb332 100644 --- a/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java +++ b/SivStudies/src/org/labkey/sivstudies/query/SivStudiesCustomizer.java @@ -249,6 +249,13 @@ private void appendDemographicsColumns(AbstractTableInfo parentTable) colInfo.setLabel("Outcomes"); parentTable.addColumn(colInfo); } + + if (parentTable.getColumn("sivART") == null) + { + BaseColumnInfo colInfo = getWrappedIdCol(parentTable.getUserSchema(), "demographicsChallengeAndArt", parentTable, "sivART"); + colInfo.setLabel("SIV/ART Dates"); + parentTable.addColumn(colInfo); + } } private void appendPvlColumns(DatasetTable ds, String subjectColName, String dateColName) From 9071af59d5a2fec924eba4b49752b2edb2f9f3e8 Mon Sep 17 00:00:00 2001 From: bbimber Date: Mon, 7 Jul 2025 14:02:51 -0700 Subject: [PATCH 5/9] Update URL values --- .../resources/queries/study/additionalDatatypes.query.xml | 2 +- SivStudies/resources/queries/study/demographics.query.xml | 1 - SivStudies/resources/queries/study/procedures.query.xml | 2 +- SivStudies/resources/queries/study/studyData.query.xml | 4 ++++ SivStudies/resources/queries/study/weight.query.xml | 2 +- 5 files changed, 7 insertions(+), 4 deletions(-) diff --git a/SivStudies/resources/queries/study/additionalDatatypes.query.xml b/SivStudies/resources/queries/study/additionalDatatypes.query.xml index 07aff712..1b446dfd 100644 --- a/SivStudies/resources/queries/study/additionalDatatypes.query.xml +++ b/SivStudies/resources/queries/study/additionalDatatypes.query.xml @@ -4,7 +4,7 @@ - + diff --git a/SivStudies/resources/queries/study/demographics.query.xml b/SivStudies/resources/queries/study/demographics.query.xml index 49f10a6d..63cf4041 100644 --- a/SivStudies/resources/queries/study/demographics.query.xml +++ b/SivStudies/resources/queries/study/demographics.query.xml @@ -5,7 +5,6 @@ - true diff --git a/SivStudies/resources/queries/study/procedures.query.xml b/SivStudies/resources/queries/study/procedures.query.xml index 06cd4bdc..eaa6a960 100644 --- a/SivStudies/resources/queries/study/procedures.query.xml +++ b/SivStudies/resources/queries/study/procedures.query.xml @@ -4,7 +4,7 @@
- + diff --git a/SivStudies/resources/queries/study/studyData.query.xml b/SivStudies/resources/queries/study/studyData.query.xml index 4a1d46d7..e78b0c84 100644 --- a/SivStudies/resources/queries/study/studyData.query.xml +++ b/SivStudies/resources/queries/study/studyData.query.xml @@ -6,6 +6,7 @@ http://cpas.labkey.com/Study#ParticipantId + laboratory/dataBrowser.view?subjectId=${Id} Date @@ -25,6 +26,9 @@ true + + true +
- + true From 4e6f690b0632f73a9c2f5494c7164eaf5245bd38 Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 9 Jul 2025 11:33:31 -0700 Subject: [PATCH 6/9] Improve wording on mGAP login page --- mGAP/resources/views/login.html | 1 + 1 file changed, 1 insertion(+) diff --git a/mGAP/resources/views/login.html b/mGAP/resources/views/login.html index 9dd9f23c..83f51434 100644 --- a/mGAP/resources/views/login.html +++ b/mGAP/resources/views/login.html @@ -14,6 +14,7 @@
+
While mGAP is a free NIH-sponsored resource, we require users register to help us track and report usage to our funders. This information is critical to demonstrate the value and impact of the resource. Please use the 'Request an Account' link below if you are not already registered. Thank you for your understanding.
Sign In
From 59dc9f07f2a4ae4048005f463b1ad919c8ca94a9 Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 9 Jul 2025 12:18:50 -0700 Subject: [PATCH 7/9] Improve wording on mGAP login page --- mGAP/resources/views/login.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mGAP/resources/views/login.html b/mGAP/resources/views/login.html index 83f51434..471911cf 100644 --- a/mGAP/resources/views/login.html +++ b/mGAP/resources/views/login.html @@ -14,8 +14,8 @@ -
While mGAP is a free NIH-sponsored resource, we require users register to help us track and report usage to our funders. This information is critical to demonstrate the value and impact of the resource. Please use the 'Request an Account' link below if you are not already registered. Thank you for your understanding.
-
Sign In
+
Sign In / Register
+
While mGAP is a free NIH-sponsored resource, we require users register to help us track and report usage to our funders. This information is critical to demonstrate the value and impact of the resource. Please request an account if you are not already registered. Thank you for your understanding.
From 20283e50139d3421e6230cb9196c8b576564b627 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 10 Jul 2025 13:17:49 -0700 Subject: [PATCH 8/9] Change date delimiter --- .../resources/queries/study/demographicsChallengeAndArt.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql b/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql index 63c9f2f2..ea898b42 100644 --- a/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql +++ b/SivStudies/resources/queries/study/demographicsChallengeAndArt.sql @@ -1,11 +1,11 @@ SELECT t.Id, group_concat(DISTINCT CASE - WHEN t.category = 'SIV Infection' THEN (cast(month(t.date) as varchar) || '-' || cast(dayofmonth(t.date) as varchar) || '-' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') + WHEN t.category = 'SIV Infection' THEN (cast(month(t.date) as varchar) || '/' || cast(dayofmonth(t.date) as varchar) || '/' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') ELSE NULL END, char(10)) as allInfections, group_concat(DISTINCT CASE - WHEN t.category = 'ART' THEN (cast(month(t.date) as varchar) || '-' || cast(dayofmonth(t.date) as varchar) || '-' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') + WHEN t.category = 'ART' THEN (cast(month(t.date) as varchar) || '/' || cast(dayofmonth(t.date) as varchar) || '/' || cast(year(t.date) as varchar) || ' (' || t.treatment || ')') ELSE NULL END, char(10)) as allART, From 756e99b793daee6705047505959df0e3224823f6 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 10 Jul 2025 20:32:17 -0700 Subject: [PATCH 9/9] Set bulkLoad=true --- mcc/src/org/labkey/mcc/etl/NprcObservationStep.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mcc/src/org/labkey/mcc/etl/NprcObservationStep.java b/mcc/src/org/labkey/mcc/etl/NprcObservationStep.java index 87a9f7f3..bbfe3f08 100644 --- a/mcc/src/org/labkey/mcc/etl/NprcObservationStep.java +++ b/mcc/src/org/labkey/mcc/etl/NprcObservationStep.java @@ -21,6 +21,7 @@ import org.labkey.api.query.FieldKey; import org.labkey.api.query.InvalidKeyException; import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.QueryUpdateServiceException; import org.labkey.api.query.UserSchema; import org.labkey.api.reader.Readers; @@ -74,6 +75,8 @@ private void processFile(PipelineJob job) throws PipelineJobException { throw new PipelineJobException("Unable to find table: clinical observations"); } + QueryUpdateService qus = clinicalObs.getUpdateService(); + qus.setBulkLoad(true); final List> toInsert = new ArrayList<>(); final List> toUpdate = new ArrayList<>(); @@ -156,7 +159,7 @@ private void processFile(PipelineJob job) throws PipelineJobException job.getLogger().info("Deleting " + toDelete.size() + " rows"); try { - clinicalObs.getUpdateService().deleteRows(_containerUser.getUser(), _containerUser.getContainer(), toDelete, null, null); + qus.deleteRows(_containerUser.getUser(), _containerUser.getContainer(), toDelete, null, null); } catch (InvalidKeyException | BatchValidationException | QueryUpdateServiceException | SQLException e) { @@ -170,7 +173,7 @@ private void processFile(PipelineJob job) throws PipelineJobException try { BatchValidationException bve = new BatchValidationException(); - clinicalObs.getUpdateService().insertRows(_containerUser.getUser(), _containerUser.getContainer(), toInsert, bve, null, null); + qus.insertRows(_containerUser.getUser(), _containerUser.getContainer(), toInsert, bve, null, null); if (bve.hasErrors()) { throw bve; @@ -192,7 +195,7 @@ private void processFile(PipelineJob job) throws PipelineJobException try { List> oldKeys = toUpdate.stream().map(x -> Map.of("objectid", x.get("objectid"))).collect(Collectors.toList()); - clinicalObs.getUpdateService().updateRows(_containerUser.getUser(), _containerUser.getContainer(), toUpdate, oldKeys, null, null); + qus.updateRows(_containerUser.getUser(), _containerUser.getContainer(), toUpdate, oldKeys, null, null); } catch (QueryUpdateServiceException | SQLException | BatchValidationException | InvalidKeyException e) {