diff --git a/api/src/org/labkey/api/audit/SampleTimelineAuditEvent.java b/api/src/org/labkey/api/audit/SampleTimelineAuditEvent.java index 3ed3637595f..9bc6b30f0e8 100644 --- a/api/src/org/labkey/api/audit/SampleTimelineAuditEvent.java +++ b/api/src/org/labkey/api/audit/SampleTimelineAuditEvent.java @@ -15,8 +15,7 @@ import static org.labkey.api.audit.AuditHandler.DELTA_PROVIDED_DATA_PREFIX; import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; public class SampleTimelineAuditEvent extends DetailedAuditTypeEvent { @@ -26,7 +25,7 @@ public class SampleTimelineAuditEvent extends DetailedAuditTypeEvent public static final String AMOUNT_AND_UNIT_UPGRADE_COMMENT = "Storage amount unit conversion to base unit during upgrade script."; public static final Set EXCLUDED_DETAIL_FIELDS = Set.of( - "AvailableAliquotVolume", "AvailableAliquotCount", "AliquotCount", "AliquotVolume", "AliquotUnit", + AvailableAliquotVolume.name(), AvailableAliquotCount.name(), AliquotCount.name(), AliquotVolume.name(), AliquotUnit.name(), PROVIDED_DATA_PREFIX + StoredAmount.name(), PROVIDED_DATA_PREFIX + Units.name(), DELTA_PROVIDED_DATA_PREFIX + StoredAmount.name() + DELTA_PROVIDED_DATA_PREFIX + Units.name()); diff --git a/api/src/org/labkey/api/audit/TransactionAuditProvider.java b/api/src/org/labkey/api/audit/TransactionAuditProvider.java index 7555a2dd87f..4e2c1ff9c50 100644 --- a/api/src/org/labkey/api/audit/TransactionAuditProvider.java +++ b/api/src/org/labkey/api/audit/TransactionAuditProvider.java @@ -133,6 +133,7 @@ public enum TransactionDetail Action(false, "The controller-action for this request"), AuditEvents(true, "The types of audit events generated during the transaction"), ClientLibrary(false, "The client library (R, Python, etc) used to perform the action"), + DataIteratorPartitions(false, "The number of partitions rows were processed in via data iterator"), DataIteratorUsed(false, "If data iterator was used for insert/update"), EditMethod(false, "The method used to insert/update data from the app (e.g., 'DetailEdit', 'GridEdit', etc)"), ETL(true, "The ETL process name involved in the transaction"), @@ -159,7 +160,7 @@ public static TransactionDetail fromString(String key) return null; } - public static void addAuditDetails(@NotNull Map transactionDetails, @NotNull Map auditDetails) + public static void addAuditDetails(@NotNull Map transactionDetails, @NotNull Map auditDetails) { if (!auditDetails.isEmpty()) { @@ -172,7 +173,7 @@ public static void addAuditDetails(@NotNull Map transactionDetails, @NotNull String auditDetailsJson) + public static void addAuditDetails(@NotNull Map transactionDetails, @NotNull String auditDetailsJson) { if (StringUtils.isEmpty(auditDetailsJson)) return; diff --git a/api/src/org/labkey/api/data/DataColumn.java b/api/src/org/labkey/api/data/DataColumn.java index d2c2f593322..ff2c8c20738 100644 --- a/api/src/org/labkey/api/data/DataColumn.java +++ b/api/src/org/labkey/api/data/DataColumn.java @@ -259,8 +259,8 @@ public void addQueryFieldKeys(Set keys) { keys.add(_boundColumn.getFieldKey()); StringExpression effectiveURL = _boundColumn.getEffectiveURL(); - if (effectiveURL instanceof DetailsURL) - keys.addAll(((DetailsURL) effectiveURL).getFieldKeys()); + if (effectiveURL instanceof DetailsURL url) + keys.addAll(url.getFieldKeys()); } if (_displayColumn != null) keys.add(_displayColumn.getFieldKey()); diff --git a/api/src/org/labkey/api/data/DisplayColumn.java b/api/src/org/labkey/api/data/DisplayColumn.java index a9918cf3cd5..83c18d15813 100644 --- a/api/src/org/labkey/api/data/DisplayColumn.java +++ b/api/src/org/labkey/api/data/DisplayColumn.java @@ -58,7 +58,6 @@ import java.text.DecimalFormatSymbols; import java.text.Format; import java.util.ArrayList; -import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.LinkedHashSet; @@ -283,23 +282,14 @@ public void addQueryFieldKeys(Set keys) else if (null != _url) se = StringExpressionFactory.createURL(_url); - if (se instanceof StringExpressionFactory.FieldKeyStringExpression) - { - Set fields = ((StringExpressionFactory.FieldKeyStringExpression)se).getFieldKeys(); - keys.addAll(fields); - } + if (se instanceof StringExpressionFactory.FieldKeyStringExpression expression) + keys.addAll(expression.getFieldKeys()); - if (_urlTitle instanceof StringExpressionFactory.FieldKeyStringExpression) - { - Set fields = ((StringExpressionFactory.FieldKeyStringExpression) _urlTitle).getFieldKeys(); - keys.addAll(fields); - } + if (_urlTitle instanceof StringExpressionFactory.FieldKeyStringExpression expression) + keys.addAll(expression.getFieldKeys()); - if (_textExpression instanceof StringExpressionFactory.FieldKeyStringExpression) - { - Set fields = ((StringExpressionFactory.FieldKeyStringExpression) _textExpression).getFieldKeys(); - keys.addAll(fields); - } + if (_textExpression instanceof StringExpressionFactory.FieldKeyStringExpression expression) + keys.addAll(expression.getFieldKeys()); _rowSpanner.addQueryColumns(keys); } @@ -351,13 +341,11 @@ public String getWidth() return _width; } - public void setNoWrap(boolean nowrap) { _nowrap = nowrap; } - // Ideally, this would just set the string... and defer creation of the Format object until render time, when we would // have a Container and other context. That would avoid creating multiple Formats per DisplayColumn. @Override @@ -369,7 +357,6 @@ public void setFormatString(String formatString) _tsvFormat = createFormat(formatString, tsvFormatSymbols); } - // java 7 changed to using infinity symbols for formatting, which is challenging for tsv import/export // use old school "Infinity" for now static public DecimalFormatSymbols tsvFormatSymbols = new DecimalFormatSymbols(); @@ -811,7 +798,7 @@ public void renderGridHeaderCell(RenderContext ctx, HtmlWriter out, String heade if (style == null) style = ""; - // 34871: Support for column display width + // Issue 34871: Support for column display width if (!isBlank(getWidth())) style += "; width:" + getWidth() + "px;"; diff --git a/api/src/org/labkey/api/data/validator/RequiredValidator.java b/api/src/org/labkey/api/data/validator/RequiredValidator.java index 5c26a25fe9a..60fde95dc85 100644 --- a/api/src/org/labkey/api/data/validator/RequiredValidator.java +++ b/api/src/org/labkey/api/data/validator/RequiredValidator.java @@ -15,6 +15,7 @@ */ package org.labkey.api.data.validator; +import org.jetbrains.annotations.Nullable; import org.labkey.api.exp.MvFieldWrapper; /** @@ -26,12 +27,19 @@ public class RequiredValidator extends AbstractColumnValidator implements Unders { final boolean allowMV; final boolean allowES; + final String _message; public RequiredValidator(String columnName, boolean allowMissingValueIndicators, boolean allowEmptyString) + { + this(columnName, allowMissingValueIndicators, allowEmptyString, null); + } + + public RequiredValidator(String columnName, boolean allowMissingValueIndicators, boolean allowEmptyString, @Nullable String message) { super(columnName); allowMV = allowMissingValueIndicators; allowES = allowEmptyString; + _message = message; } @Override @@ -59,6 +67,9 @@ protected String _validate(int rowNum, Object value) return null; } + if (_message != null) + return _message; + // DatasetDefinition.importDatasetData:: errors.add("Row " + rowNumber + " does not contain required field " + col.getName() + "."); // OntologyManager.insertTabDelimited:: throw new ValidationException("Missing value for required property " + col.getName()); return "Missing value for required property: " + _columnName; diff --git a/api/src/org/labkey/api/dataiterator/AttachmentDataIterator.java b/api/src/org/labkey/api/dataiterator/AttachmentDataIterator.java index 4dc5b12e776..c8065c52077 100644 --- a/api/src/org/labkey/api/dataiterator/AttachmentDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/AttachmentDataIterator.java @@ -202,8 +202,11 @@ public static DataIteratorBuilder getAttachmentDataIteratorBuilder(TableInfo ti, throw new IllegalStateException("Originating data iterator is null"); DataIterator it = builder.getDataIterator(context); + if (it == null) + return null; // can happen if context has errors + Domain domain = ti.getDomain(); - if(domain == null) + if (domain == null) return it; // find attachment columns diff --git a/api/src/org/labkey/api/dataiterator/DetailedAuditLogDataIterator.java b/api/src/org/labkey/api/dataiterator/DetailedAuditLogDataIterator.java index 3f102575ab5..ec3696f2d8b 100644 --- a/api/src/org/labkey/api/dataiterator/DetailedAuditLogDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DetailedAuditLogDataIterator.java @@ -139,6 +139,10 @@ public static DataIteratorBuilder getDataIteratorBuilder(TableInfo queryTable, @ { return context -> { + DataIterator it = builder.getDataIterator(context); + if (it == null) + return null; // can happen if context has errors + AuditBehaviorType auditType = AuditBehaviorType.NONE; if (queryTable.supportsAuditTracking()) auditType = queryTable.getEffectiveAuditBehavior((AuditBehaviorType) context.getConfigParameter(AuditConfigs.AuditBehavior)); @@ -146,12 +150,12 @@ public static DataIteratorBuilder getDataIteratorBuilder(TableInfo queryTable, @ // Detailed auditing and not set to bulk load in ETL if (auditType == DETAILED && !context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad) && !context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.ByPassAudit)) { - DataIterator it = builder.getDataIterator(context); DataIterator in = DataIteratorUtil.wrapMap(it, true); return new DetailedAuditLogDataIterator(in, context, queryTable, insertOption.auditAction, user, container, extractProvidedValues); } + // Nothing to do, so just return input DataIterator - return builder.getDataIterator(context); + return it; }; } @@ -168,5 +172,4 @@ public boolean supportsGetExistingRecord() { return _data.supportsGetExistingRecord(); } - } diff --git a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java index 17d143deb40..c39acf459a1 100644 --- a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java @@ -32,7 +32,6 @@ import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -95,15 +94,23 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator var map = DataIteratorUtil.createColumnNameMap(in); containerCol = map.get("Container"); - Collection keyNames = null==keys ? target.getPkColumnNames() : keys; + Set keyNames = new CaseInsensitiveHashSet(); + if (keys == null) + keyNames.addAll(target.getPkColumnNames()); + else + keyNames.addAll(keys); if (sharedKeys != null) _sharedKeys.addAll(sharedKeys); - _dataColumnNames.addAll(detailed ? map.keySet() : keyNames); + if (detailed) + _dataColumnNames.addAll(map.keySet()); for (String name : keyNames) { + if (!map.containsKey(name)) + continue; + Integer index = map.get(name); ColumnInfo col = target.getColumn(name); if (null == index || null == col) @@ -114,7 +121,11 @@ public abstract class ExistingRecordDataIterator extends WrapperDataIterator } pkSuppliers.add(in.getSupplier(index)); pkColumns.add(col); + _dataColumnNames.add(name); } + + if (pkColumns.isEmpty()) + throw new IllegalArgumentException("At least one key column is required."); } @Override diff --git a/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java index d1e3197125f..68a44b09d91 100644 --- a/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java @@ -5,28 +5,25 @@ import org.labkey.api.collections.Sets; import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.CompareType; +import org.labkey.api.data.JdbcType; import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.TableInfo; import org.labkey.api.data.TableSelector; import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.FieldKey; -import org.labkey.api.util.StringUtilsLabKey; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Set; import java.util.function.Supplier; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; import static org.labkey.api.util.IntegerUtils.asInteger; public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator { - public static final String ALIQUOTED_FROM_LSID_COLUMN_NAME = "AliquotedFromLSID"; - public static final String ROOT_ROW_ID_COLUMN_NAME = "RootMaterialRowId"; public static final String CURRENT_SAMPLE_STATUS_COLUMN_NAME = "_CurrentSampleState_"; - static final String KEY_COLUMN_NAME = "Name"; - static final String KEY_COLUMN_LSID = "LSID"; final CachingDataIterator _unwrapped; final TableInfo target; @@ -36,7 +33,6 @@ public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator final int _aliquotedFromColIndex; final int _rootMaterialRowIdColIndex; final int _currentSampleStateColIndex; - final boolean _useLsid; // prefetch of existing records int lastPrefetchRowNumber = -1; @@ -44,27 +40,22 @@ public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator final IntHashMap aliquotRoots = new IntHashMap<>(); final IntHashMap sampleState = new IntHashMap<>(); - public SampleUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, long sampleTypeId, boolean useLsid) + public SampleUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, long sampleTypeId, String keyColumnName) { super(in); - this._unwrapped = (CachingDataIterator)in; - this.target = target; - this._sampleTypeId = sampleTypeId; - this._useLsid = useLsid; var map = DataIteratorUtil.createColumnNameMap(in); - this._aliquotedFromColIndex = map.get(ALIQUOTED_FROM_LSID_COLUMN_NAME); - this._rootMaterialRowIdColIndex = map.get(ROOT_ROW_ID_COLUMN_NAME); + this._aliquotedFromColIndex = map.get(AliquotedFromLSID.name()); + this._rootMaterialRowIdColIndex = map.get(RootMaterialRowId.name()); this._currentSampleStateColIndex = map.get(CURRENT_SAMPLE_STATUS_COLUMN_NAME); - String keyCol = useLsid ? KEY_COLUMN_LSID : KEY_COLUMN_NAME; - Integer index = map.get(keyCol); - ColumnInfo col = target.getColumn(keyCol); + Integer index = map.get(keyColumnName); + ColumnInfo col = target.getColumn(keyColumnName); if (null == index || null == col) - throw new IllegalArgumentException("Key column not found: " + keyCol); + throw new IllegalArgumentException("Key column not found: " + keyColumnName); pkSupplier = in.getSupplier(index); pkColumn = col; } @@ -119,20 +110,24 @@ protected void prefetchExisting() throws BatchValidationException sampleState.clear(); int rowsToFetch = 50; - Map rowKeyMap = new LinkedHashMap<>(); - Map keyRowMap = new LinkedHashMap<>(); + String keyFieldName = pkColumn.getName(); + boolean numericKey = pkColumn.isNumericType(); + JdbcType jdbcType = pkColumn.getJdbcType(); + Map rowKeyMap = new LinkedHashMap<>(); + Map keyRowMap = new LinkedHashMap<>(); do { lastPrefetchRowNumber = asInteger(_delegate.get(0)); Object keyObj = pkSupplier.get(); + Object key = jdbcType.convert(keyObj); - String key = null; - if (keyObj instanceof String) - key = StringUtilsLabKey.unquoteString((String) keyObj); - else if (keyObj instanceof Number) - key = keyObj.toString(); - if (StringUtils.isEmpty(key)) - throw new IllegalArgumentException(KEY_COLUMN_NAME + " value not provided on row " + lastPrefetchRowNumber); + if (numericKey) + { + if (null == key) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + } + else if (StringUtils.isEmpty((String) key)) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); rowKeyMap.put(lastPrefetchRowNumber, key); keyRowMap.put(key, lastPrefetchRowNumber); @@ -142,20 +137,19 @@ else if (keyObj instanceof Number) } while (--rowsToFetch > 0 && _delegate.next()); - String keyCol = _useLsid ? KEY_COLUMN_LSID : KEY_COLUMN_NAME; - SimpleFilter filter = new SimpleFilter(FieldKey.fromParts("MaterialSourceId"), _sampleTypeId); - FieldKey keyField = FieldKey.fromParts(keyCol); - filter.addCondition(keyField, rowKeyMap.values(), CompareType.IN); + SimpleFilter filter = new SimpleFilter(MaterialSourceId.fieldKey(), _sampleTypeId); + filter.addCondition(pkColumn.getFieldKey(), rowKeyMap.values(), CompareType.IN); filter.addCondition(FieldKey.fromParts("Container"), target.getUserSchema().getContainer()); - Map[] results = new TableSelector(ExperimentService.get().getTinfoMaterial(), Sets.newCaseInsensitiveHashSet(keyCol, "aliquotedfromlsid", "rootMaterialRowId", "sampleState"), filter, null).getMapArray(); + Set columns = Sets.newCaseInsensitiveHashSet(keyFieldName, AliquotedFromLSID.name(), RootMaterialRowId.name(), SampleState.name()); + Map[] results = new TableSelector(ExperimentService.get().getTinfoMaterial(), columns, filter, null).getMapArray(); for (Map result : results) { - String key = (String) result.get(keyCol); - Object aliquotedFromLSIDObj = result.get("aliquotedFromLSID"); - Object rootMaterialRowIdObj = result.get("rootMaterialRowId"); - Object sampleStateObj = result.get("sampleState"); + Object key = result.get(keyFieldName); + Object aliquotedFromLSIDObj = result.get(AliquotedFromLSID.name()); + Object rootMaterialRowIdObj = result.get(RootMaterialRowId.name()); + Object sampleStateObj = result.get(SampleState.name()); Integer rowInd = keyRowMap.get(key); if (aliquotedFromLSIDObj != null) aliquotParents.put(rowInd, (String) aliquotedFromLSIDObj); @@ -180,5 +174,4 @@ public boolean next() throws BatchValidationException prefetchExisting(); return ret; } - } diff --git a/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java b/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java index ba497930564..7712726b939 100644 --- a/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java +++ b/api/src/org/labkey/api/dataiterator/TriggerDataBuilderHelper.java @@ -32,11 +32,6 @@ import static org.labkey.api.admin.FolderImportContext.IS_NEW_FOLDER_IMPORT_KEY; import static org.labkey.api.util.IntegerUtils.asInteger; -/** - * User: matthewb - * Date: 2011-09-07 - * Time: 5:14 PM - */ public class TriggerDataBuilderHelper { final Container _c; @@ -129,14 +124,21 @@ class Before implements DataIteratorBuilder @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator pre = _pre.getDataIterator(context); + DataIterator di = _pre.getDataIterator(context); if (!_target.hasTriggers(_c)) - return pre; - pre = LoggingDataIterator.wrap(pre); + return di; + di = LoggingDataIterator.wrap(di); - Set mergeKeys = null; - if (_target instanceof ExpTable) - mergeKeys = ((ExpTable)_target).getAltMergeKeys(context); + Set existingRecordKeyColumnNames = null; + Set sharedKeys = null; + boolean isMergeOrUpdate = context.getInsertOption().allowUpdate; + + if (isMergeOrUpdate && _target instanceof ExpTable expTable) + { + Map colNameMap = DataIteratorUtil.createColumnNameMap(di); + existingRecordKeyColumnNames = expTable.getExistingRecordKeyColumnNames(context, colNameMap); + sharedKeys = expTable.getExistingRecordSharedKeyColumnNames(); + } boolean isNewFolderImport = false; if (_extraContext != null && _extraContext.get(IS_NEW_FOLDER_IMPORT_KEY) != null) @@ -144,18 +146,19 @@ public DataIterator getDataIterator(DataIteratorContext context) isNewFolderImport = (boolean) _extraContext.get(IS_NEW_FOLDER_IMPORT_KEY); } - boolean skipExistingRecord = !context.getInsertOption().allowUpdate || mergeKeys == null || isNewFolderImport; - DataIterator coerce = new CoerceDataIterator(pre, context, _target, !context.getInsertOption().updateOnly); + di = LoggingDataIterator.wrap(new CoerceDataIterator(di, context, _target, !context.getInsertOption().updateOnly)); context.setWithLookupRemapping(false); - coerce = LoggingDataIterator.wrap(coerce); - if (skipExistingRecord) - return LoggingDataIterator.wrap(new BeforeIterator(new CachingDataIterator(coerce), context)); - else if (context.getInsertOption().mergeRows && !_target.supportsInsertOption(QueryUpdateService.InsertOption.MERGE)) - return LoggingDataIterator.wrap(new BeforeIterator(coerce, context)); + // Skip existing records + if (!context.getInsertOption().allowUpdate || existingRecordKeyColumnNames == null || isNewFolderImport) + return LoggingDataIterator.wrap(new BeforeIterator(new CachingDataIterator(di), context)); + + // Merge request but merge is not supported + if (context.getInsertOption().mergeRows && !_target.supportsInsertOption(QueryUpdateService.InsertOption.MERGE)) + return LoggingDataIterator.wrap(new BeforeIterator(di, context)); - coerce = ExistingRecordDataIterator.createBuilder(coerce, _target, mergeKeys, null, true).getDataIterator(context); - return LoggingDataIterator.wrap(new BeforeIterator(new CachingDataIterator(coerce), context)); + di = ExistingRecordDataIterator.createBuilder(di, _target, existingRecordKeyColumnNames, sharedKeys, true).getDataIterator(context); + return LoggingDataIterator.wrap(new BeforeIterator(new CachingDataIterator(di), context)); } } diff --git a/api/src/org/labkey/api/exp/api/SampleTypeDomainKind.java b/api/src/org/labkey/api/exp/api/SampleTypeDomainKind.java index 4c58d2c7684..da47258672a 100644 --- a/api/src/org/labkey/api/exp/api/SampleTypeDomainKind.java +++ b/api/src/org/labkey/api/exp/api/SampleTypeDomainKind.java @@ -85,7 +85,7 @@ public class SampleTypeDomainKind extends AbstractDomainKind BASE_PROPERTIES; private static final Set INDEXES; @@ -105,7 +105,6 @@ public class SampleTypeDomainKind extends AbstractDomainKind { @@ -60,7 +61,12 @@ enum Column LastIndexed, Inputs, Outputs, - Properties + Properties; + + public FieldKey fieldKey() + { + return FieldKey.fromParts(name()); + } } void setExperiment(ExpExperiment experiment); diff --git a/api/src/org/labkey/api/exp/query/ExpMaterialTable.java b/api/src/org/labkey/api/exp/query/ExpMaterialTable.java index 0956af9d020..11ca4dd9e69 100644 --- a/api/src/org/labkey/api/exp/query/ExpMaterialTable.java +++ b/api/src/org/labkey/api/exp/query/ExpMaterialTable.java @@ -34,6 +34,7 @@ enum Column AliquotedFromLSID, AvailableAliquotCount, AvailableAliquotVolume(true), + CpasType, // database table only Created, CreatedBy, Description, @@ -48,6 +49,7 @@ enum Column Modified, ModifiedBy, Name, + ObjectId, // database table only Outputs, Properties, Property, @@ -60,10 +62,12 @@ enum Column RootMaterialRowId, RowId, Run, + RunId, // database table only RunApplication, RunApplicationOutput, SampleSet, SampleState, + SourceApplicationId, // database table only SourceApplicationInput, SourceProtocolApplication, SourceProtocolLSID, @@ -72,7 +76,9 @@ enum Column private boolean _hasUnit = false; private final String _label; - Column() { + + Column() + { _label = ColumnInfo.labelFromName(name()); } diff --git a/api/src/org/labkey/api/exp/query/ExpTable.java b/api/src/org/labkey/api/exp/query/ExpTable.java index 9d81d735dbe..cfa4cab2b93 100644 --- a/api/src/org/labkey/api/exp/query/ExpTable.java +++ b/api/src/org/labkey/api/exp/query/ExpTable.java @@ -33,6 +33,7 @@ import org.labkey.api.query.FieldKey; import org.labkey.api.security.permissions.Permission; +import java.util.Map; import java.util.Set; public interface ExpTable extends ContainerFilterable, TableInfo @@ -94,7 +95,6 @@ default MutableColumnInfo addColumns(Domain domain, @Nullable String legacyName) MutableColumnInfo addColumns(Domain domain, @Nullable String legacyName,@Nullable ContainerFilter cf); - void setTitle(String title); void setDescription(String description); @@ -125,7 +125,6 @@ default void setFilterPatterns(String columnName, String... patterns) // by default we do nothing } - /** returns a column that wraps objectid, this is only required to support the expObject() table method */ default ColumnInfo getExpObjectColumn() { @@ -137,6 +136,22 @@ default ColumnInfo getExpObjectColumn() return null; } + /** + * Returns the set of key column names for this table to be specified as key columns for the ExistingRecordDataIterator. + */ + @Nullable default Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) + { + return null; + } + + /** + * Returns the set of key column names for this table to be specified as shared key columns for the ExistingRecordDataIterator. + */ + @Nullable default Set getExistingRecordSharedKeyColumnNames() + { + return null; + } + class ExpObjectDataColumn extends DataColumn { public ExpObjectDataColumn(ColumnInfo colInfo) diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index bc11979ccbf..1b64835bb42 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -269,17 +269,24 @@ protected final DataIteratorContext getDataIteratorContext(BatchValidationExcept context.setInsertOption(forImport); context.setConfigParameters(configParameters); configureDataIteratorContext(context); - if (configParameters != null) + recordDataIteratorUsed(configParameters); + + return context; + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters == null) + return; + + try { - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } + catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test } - return context; } /** @@ -424,22 +431,11 @@ protected int _importRowsUsingDIB(User user, Container container, DataIteratorBu if (context.getErrors().hasErrors()) return 0; - else - { - if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level - { - AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); - String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); - boolean skipAuditLevelCheck = false; - if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) - { - if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad - skipAuditLevelCheck = true; - } - getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); - } - return count; - } + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) + _addSummaryAuditEvent(container, user, context, count); + + return count; } protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) @@ -452,7 +448,6 @@ protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, D return out; } - /** this is extracted so subclasses can add wrap */ protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) { @@ -808,7 +803,7 @@ final protected Map updateOneRow(User user, Container container, // used by updateRows to check if all rows have the same set of keys // prepared statement can only be used to updateRows if all rows have the same set of keys - protected boolean hasUniformKeys(List> rowsToUpdate) + protected static boolean hasUniformKeys(List> rowsToUpdate) { if (rowsToUpdate == null || rowsToUpdate.isEmpty()) return false; @@ -1157,6 +1152,22 @@ static FileLike checkFileUnderRoot(Container container, FileLike file) throws Ex return file; } + protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) + { + if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level + { + AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); + String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); + boolean skipAuditLevelCheck = false; + if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) + { + if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad + skipAuditLevelCheck = true; + } + getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); + } + } + /** * Is used by the AttachmentDataIterator to point to the location of the serialized * attachment files. diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index adb096bcb33..5538e77a783 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -21,7 +21,6 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.audit.TransactionAuditProvider; import org.labkey.api.collections.ArrayListMap; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveMapWrapper; @@ -71,7 +70,6 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -935,19 +933,4 @@ protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIterator context.setCrossFolderImport(false); } } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters != null) - { - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - } - } diff --git a/api/src/org/labkey/api/query/QueryUpdateService.java b/api/src/org/labkey/api/query/QueryUpdateService.java index 53925404dec..09c9e4f6495 100644 --- a/api/src/org/labkey/api/query/QueryUpdateService.java +++ b/api/src/org/labkey/api/query/QueryUpdateService.java @@ -114,7 +114,8 @@ enum ConfigParameters PreferPKOverObjectUriAsKey, // (Bool) Prefer getPkColumnNames instead of getObjectURIColumnName to use as keys SkipReselectRows, // (Bool) If true, skip qus.getRows and use raw returned rows. Applicable for CommandType.insert/insertWithKeys/update/updateChangingKeys TargetContainer, - ByPassAudit // (Bool) If true, skip DetailedAuditLogDataIterator. For internal use only, don't expose for API. + ByPassAudit, // (Bool) If true, skip DetailedAuditLogDataIterator. For internal use only, don't expose for API. + SkipAuditSummary, // (Bool) If true, skip audit summary logging. For internal use only, don't expose for API.' } diff --git a/assay/package-lock.json b/assay/package-lock.json index 5ee98e7f352..a5783fdc908 100644 --- a/assay/package-lock.json +++ b/assay/package-lock.json @@ -8,7 +8,7 @@ "name": "assay", "version": "0.0.0", "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0", @@ -2525,9 +2525,9 @@ } }, "node_modules/@labkey/components": { - "version": "6.72.0", - "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-6.72.0.tgz", - "integrity": "sha512-ouYWpvQBF0GZ/j/ErGRcAOHTAwkGP/fSA4hDKaql59U1kMGI7gZdoHZNb5aX0YWX+FLor8FDqLXz9WWmkykEWw==", + "version": "7.3.1", + "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-7.3.1.tgz", + "integrity": "sha512-rGH7uNiU3yWMhIUbcQZtctom8dbfrFFzhrHhcOcqzSNEQ6mH1/XXOPGmNdSsgAwKidqdgDfkTd5y5QC+I7PBmA==", "license": "SEE LICENSE IN LICENSE.txt", "dependencies": { "@hello-pangea/dnd": "18.0.1", diff --git a/assay/package.json b/assay/package.json index 9feb45a746a..e0cddb3cbe4 100644 --- a/assay/package.json +++ b/assay/package.json @@ -12,7 +12,7 @@ "clean": "rimraf resources/web/assay/gen && rimraf resources/views/gen && rimraf resources/web/gen" }, "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0", diff --git a/core/package-lock.json b/core/package-lock.json index 2753d73b83a..763936d194f 100644 --- a/core/package-lock.json +++ b/core/package-lock.json @@ -8,7 +8,7 @@ "name": "labkey-core", "version": "0.0.0", "dependencies": { - "@labkey/components": "6.72.0", + "@labkey/components": "7.3.1", "@labkey/themes": "1.5.0" }, "devDependencies": { @@ -3547,9 +3547,9 @@ } }, "node_modules/@labkey/components": { - "version": "6.72.0", - "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-6.72.0.tgz", - "integrity": "sha512-ouYWpvQBF0GZ/j/ErGRcAOHTAwkGP/fSA4hDKaql59U1kMGI7gZdoHZNb5aX0YWX+FLor8FDqLXz9WWmkykEWw==", + "version": "7.3.1", + "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-7.3.1.tgz", + "integrity": "sha512-rGH7uNiU3yWMhIUbcQZtctom8dbfrFFzhrHhcOcqzSNEQ6mH1/XXOPGmNdSsgAwKidqdgDfkTd5y5QC+I7PBmA==", "license": "SEE LICENSE IN LICENSE.txt", "dependencies": { "@hello-pangea/dnd": "18.0.1", diff --git a/core/package.json b/core/package.json index 9e7daa9b776..31f80eaf0d3 100644 --- a/core/package.json +++ b/core/package.json @@ -53,7 +53,7 @@ } }, "dependencies": { - "@labkey/components": "6.72.0", + "@labkey/components": "7.3.1", "@labkey/themes": "1.5.0" }, "devDependencies": { diff --git a/experiment/package-lock.json b/experiment/package-lock.json index af738667dc1..d189bfe57f7 100644 --- a/experiment/package-lock.json +++ b/experiment/package-lock.json @@ -8,7 +8,7 @@ "name": "experiment", "version": "0.0.0", "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0", @@ -3314,9 +3314,9 @@ } }, "node_modules/@labkey/components": { - "version": "6.72.0", - "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-6.72.0.tgz", - "integrity": "sha512-ouYWpvQBF0GZ/j/ErGRcAOHTAwkGP/fSA4hDKaql59U1kMGI7gZdoHZNb5aX0YWX+FLor8FDqLXz9WWmkykEWw==", + "version": "7.3.1", + "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-7.3.1.tgz", + "integrity": "sha512-rGH7uNiU3yWMhIUbcQZtctom8dbfrFFzhrHhcOcqzSNEQ6mH1/XXOPGmNdSsgAwKidqdgDfkTd5y5QC+I7PBmA==", "license": "SEE LICENSE IN LICENSE.txt", "dependencies": { "@hello-pangea/dnd": "18.0.1", diff --git a/experiment/package.json b/experiment/package.json index 00d4a9b2001..b6fe58b8519 100644 --- a/experiment/package.json +++ b/experiment/package.json @@ -13,7 +13,7 @@ "test-integration": "cross-env NODE_ENV=test jest --ci --runInBand -c test/js/jest.config.integration.js" }, "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0", diff --git a/experiment/resources/schemas/dbscripts/postgresql/exp-25.014-25.015.sql b/experiment/resources/schemas/dbscripts/postgresql/exp-25.014-25.015.sql new file mode 100644 index 00000000000..249f1a480a6 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/postgresql/exp-25.014-25.015.sql @@ -0,0 +1 @@ +SELECT core.executeJavaUpgradeCode('dropProvisionedSampleTypeLsidColumn'); diff --git a/experiment/resources/schemas/dbscripts/sqlserver/exp-25.014-25.015.sql b/experiment/resources/schemas/dbscripts/sqlserver/exp-25.014-25.015.sql new file mode 100644 index 00000000000..01ff3f93ca7 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/sqlserver/exp-25.014-25.015.sql @@ -0,0 +1 @@ +EXEC core.executeJavaUpgradeCode 'dropProvisionedSampleTypeLsidColumn'; diff --git a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts index 46c70b1059f..bb71be1260a 100644 --- a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts +++ b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts @@ -10,8 +10,8 @@ import { verifyRequiredLineageInsertUpdate } from './utils'; import { caseInsensitive, SAMPLE_TYPE_DESIGNER_ROLE } from '@labkey/components'; +const { importSample, insertRows } = ExperimentCRUDUtils; -// @ts-expect-error process is not available in a browser environment const server = hookServer(process.env); const PROJECT_NAME = 'SampleTypeCrudJestProject'; @@ -314,7 +314,6 @@ describe('Sample Type Designer', () => { }); - describe('Import with update / merge', () => { it ("Issue 52922: Blank sample id in the file are getting ignored in update from file", async () => { const BLANK_KEY_UPDATE_ERROR = 'Name value not provided'; @@ -387,9 +386,110 @@ describe('Import with update / merge', () => { expect(successResp.text.indexOf('"success" : true') > -1).toBeTruthy(); }); + it('Support RowId lookup and renaming', async () => { + const dataType = SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME; + const initialName = 'RowIdLookupTest'; + const newName = 'RenamedViaRowId'; -}); + const rows = await insertRows(server, [{ + name: initialName, + description: 'Original Description' + }], 'samples', dataType, topFolderOptions, editorUserOptions); + + // Capture the generated RowId from the insert response + // Note: Assuming standard response structure where 'rows' array contains the returned data + const rowId = caseInsensitive(rows[0], 'rowId'); + expect(rowId).toBeDefined(); + + // Test: Update Description using ONLY RowId (Name column omitted) + // This validates that the importer can lookup by RowId alone + let updateTsv = `RowId\tDescription\n${rowId}\tUpdated Description via RowId`; + let resp = await importSample(server, updateTsv, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf('"success" : true') > -1).toBeTruthy(); + + // Test: Update Name using RowId + Name (Renaming) + // This validates that providing both keys looks up by RowId and updates the Name + updateTsv = `RowId\tName\n${rowId}\t${newName}`; + resp = await importSample(server, updateTsv, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf('"success" : true') > -1).toBeTruthy(); + + // Verify Rename: Attempt to update using the NEW Name + // If the rename worked, looking up by the new Name should succeed + updateTsv = `Name\tDescription\n${newName}\tDescription after rename`; + resp = await importSample(server, updateTsv, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf('"success" : true') > -1).toBeTruthy(); + + // Verify Rename: Attempt to update using the OLD Name + // This should now fail because the record was renamed, proving the old key is gone + updateTsv = `Name\tDescription\n${initialName}\tShould fail`; + resp = await importSample(server, updateTsv, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf('Sample does not exist') > -1).toBeTruthy(); + }); + it('Error when supplying RowId during MERGE', async () => { + const dataType = SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME; + const sampleName = 'MergeRowIdErrorTest'; + + const rows = await insertRows(server, [{ + name: sampleName, + description: 'created' + }], 'samples', dataType, topFolderOptions, editorUserOptions); + + const rowId = caseInsensitive(rows[0], 'rowId'); + expect(rowId).toBeDefined(); + + // MERGE with RowId should fail + // Even if the name matches and rowId is correct, the presence of the column should trigger the error + const mergeTsv = `RowId\tName\tDescription\n${rowId}\t${sampleName}\tShould fail`; + const resp = await importSample(server, mergeTsv, dataType, 'MERGE', topFolderOptions, editorUserOptions); + + // Check for the specific error message + expect(resp.text.indexOf('RowId is not accepted when merging samples. Specify only the sample name instead.') > -1).toBeTruthy(); + }); + it('Error when supplying LSID without RowId or Name', async () => { + const dataType = SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME; + const sampleName = 'LsidKeyErrorTest'; + const LSID_UPDATE_ERROR = "LSID is no longer accepted as a key for sample update. Specify a RowId or Name instead."; + const LSID_MERGE_ERROR = "LSID is no longer accepted as a key for sample merge. Specify a RowId or Name instead."; + + const rows = await insertRows(server, [{ + name: sampleName, + description: 'created' + }], 'samples', dataType, topFolderOptions, editorUserOptions); + + const lsid = caseInsensitive(rows[0], 'lsid'); + expect(lsid).toBeDefined(); + // UPDATE: LSID provided as key (Name/RowId missing) + const tsv = `LSID\tDescription\n${lsid}\tShould fail`; + let resp = await importSample(server, tsv, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf(LSID_UPDATE_ERROR) > -1).toBeTruthy(); + + // MERGE: LSID provided as key (Name/RowId missing) + resp = await importSample(server, tsv, dataType, 'MERGE', topFolderOptions, editorUserOptions); + expect(resp.text.indexOf(LSID_MERGE_ERROR) > -1).toBeTruthy(); + }); + it('Cross-type update should not be accepted', async () => { + // Arrange + const firstSampleType = SAMPLE_ALIQUOT_IMPORT_TYPE_NAME; + const secondSampleType = SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME; + const [firstSample] = await insertRows(server, [{ name: 'FL-1', description: 'Yolo' }], 'samples', firstSampleType, topFolderOptions, editorUserOptions); + const [secondSample] = await insertRows(server, [{ name: 'SP-10', description: 'Hello' }], 'samples', secondSampleType, topFolderOptions, editorUserOptions); + const firstRowId = caseInsensitive(firstSample, 'rowId'); + const secondRowId = caseInsensitive(secondSample, 'rowId'); + + let tsv = 'RowId\tSampleType\tDescription\n'; + tsv += `${firstRowId}\t${firstSampleType}\tShould be FL-1\n`; + tsv += `${secondRowId}\t${secondSampleType}\tShould be SP-10\n`; + + // Act + const resp = await importSample(server, tsv, firstSampleType, 'UPDATE', topFolderOptions, editorUserOptions); + + // Assert + // Verify that these rows are not updated + expect(resp.body.success).toEqual(false); + expect(resp.body.exception).toContain('Sample does not exist: (RowId)'); + }) +}); describe('Aliquot crud', () => { describe("SMAliquotImportExportTest", () => { @@ -1070,25 +1170,41 @@ describe('Amount/Unit CRUD', () => { expect(errorMsg.text).toContain(NEGATIVE_ERROR); errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\n" + dataName + "\t-1.1\tkg", dataType, "MERGE", topFolderOptions, editorUserOptions); expect(errorMsg.text).toContain(NEGATIVE_ERROR); + + // Using row-by-row await server.post('query', 'updateRows', { schemaName: 'samples', queryName: dataType, rows: [{ Amount: -1, Units: 'kg', - rowId: sampleRowId + rowId: sampleRowId, + },{ + rowId: sampleRowId, }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { const errorResp = JSON.parse(result.text); - // Note that the row by row update error is different from DIB. This is OK for now since we are planning to deprecate row by row updates. - expect(errorResp['exception']).toContain("Value '-1000.0 (g)' for field 'Amount' is invalid. Amounts must be non-negative."); + expect(errorResp['exception']).toContain("Value '-1' for field 'Amount' is invalid. Amounts must be non-negative."); + }); + + // Using data iterator + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + Amount: -1, + Units: 'kg', + rowId: sampleRowId, + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain("Value '-1' for field 'Amount' is invalid. Amounts must be non-negative."); }); errorMsg = await ExperimentCRUDUtils.importCrossTypeData(server, "Name\tStoredAmount\tUnits\tSampleType\nData1\t-1.1\tkg\t" + dataType ,'UPDATE', topFolderOptions, adminOptions, true); expect(errorMsg.text).toContain(NEGATIVE_ERROR); errorMsg = await ExperimentCRUDUtils.importCrossTypeData(server, "Name\tStoredAmount\tUnits\tSampleType\nData1\t-1.1\tkg\t" + dataType ,'MERGE', topFolderOptions, adminOptions, true); expect(errorMsg.text).toContain(NEGATIVE_ERROR); - }); it ("Test units conversion on insert/update", async () => { diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index f470308fb57..91329240e4d 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -15,6 +15,7 @@ */ package org.labkey.experiment; +import org.apache.commons.beanutils.ConversionException; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Strings; @@ -40,6 +41,7 @@ import org.labkey.api.data.DbScope; import org.labkey.api.data.ExpDataFileConverter; import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.JdbcType; import org.labkey.api.data.NameGenerator; import org.labkey.api.data.RemapCache; import org.labkey.api.data.SimpleFilter; @@ -64,6 +66,7 @@ import org.labkey.api.dataiterator.ValidatorIterator; import org.labkey.api.dataiterator.WrapperDataIterator; import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.OntologyManager; import org.labkey.api.exp.PropertyType; import org.labkey.api.exp.api.ExpData; import org.labkey.api.exp.api.ExpDataClass; @@ -82,13 +85,13 @@ import org.labkey.api.exp.api.NameExpressionOptionService; import org.labkey.api.exp.api.SampleTypeService; import org.labkey.api.exp.api.SimpleRunRecord; +import org.labkey.api.exp.property.DomainProperty; import org.labkey.api.exp.property.PropertyService; import org.labkey.api.exp.query.AbstractExpSchema; import org.labkey.api.exp.query.DataClassUserSchema; -import org.labkey.api.exp.query.ExpDataClassDataTable; import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpMaterialTable; import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.ExpTable; import org.labkey.api.exp.query.SamplesSchema; import org.labkey.api.qc.DataState; import org.labkey.api.qc.SampleStatusService; @@ -105,7 +108,6 @@ import org.labkey.api.query.ValidationException; import org.labkey.api.reader.DataLoader; import org.labkey.api.reader.TabLoader; -import org.labkey.api.search.SearchService; import org.labkey.api.security.User; import org.labkey.api.security.permissions.InsertPermission; import org.labkey.api.security.permissions.UpdatePermission; @@ -168,22 +170,8 @@ import static org.labkey.api.exp.api.ExpRunItem.INPUTS_PREFIX_LC; import static org.labkey.api.exp.api.ExperimentService.ALIASCOLUMNALIAS; import static org.labkey.api.exp.api.ExperimentService.QueryOptions.SkipBulkRemapCache; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawUnits; import static org.labkey.api.util.IntegerUtils.asLong; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotedFromLSID; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Folder; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.MaterialSourceId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RootMaterialRowId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.SampleState; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; import static org.labkey.api.query.AbstractQueryImportAction.configureLoader; import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.insert; import static org.labkey.experiment.api.SampleTypeUpdateServiceDI.PARENT_RECOMPUTE_NAME_COL; @@ -230,6 +218,8 @@ public CounterDataIteratorBuilder(@NotNull DataIteratorBuilder in, Container con public DataIterator getDataIterator(DataIteratorContext context) { DataIterator pre = _in.getDataIterator(context); + if (pre == null) + return null; // can happen if context has errors SimpleTranslator counterTranslator = new SimpleTranslator(pre, context); counterTranslator.setDebugName("Counter Def"); @@ -312,9 +302,9 @@ protected String validate(ColumnValidator v, int rowNum, Object value, DataItera Object aliquotedFromObj = data.get(_aliquotedFromColIdx); if (aliquotedFromObj != null) { - if (aliquotedFromObj instanceof String) + if (aliquotedFromObj instanceof String s) { - aliquotedFromValue = (String) aliquotedFromObj; + aliquotedFromValue = s; } else if (aliquotedFromObj instanceof Number) { @@ -363,6 +353,9 @@ public AliquotRollupDataIteratorBuilder(@NotNull DataIteratorBuilder in, Contain public DataIterator getDataIterator(DataIteratorContext context) { DataIterator pre = _in.getDataIterator(context); + if (pre == null) + return null; // can happen if context has errors + return LoggingDataIterator.wrap(new AliquotRollupDataIterator(pre, context, _container)); } } @@ -471,11 +464,11 @@ public Object get(int i) Map existingMap = getExistingRecord(); if (existingMap != null && !existingMap.isEmpty()) { - Pair needRecac = determineRecalcFromExistingRecord(i, existingMap); - if (needRecac == null) + Pair needRecalc = determineRecalcFromExistingRecord(i, existingMap); + if (needRecalc == null) return null; - if (needRecac.first && needRecac.second != null) - return needRecac.second; + if (needRecalc.first && needRecalc.second != null) + return needRecalc.second; } // without existing record, or if existing record is missing root information, we have to be conservative and assume this is a new aliquot, or an amount/status update @@ -522,8 +515,11 @@ public AliasDataIteratorBuilder(@NotNull DataIteratorBuilder in, Container conta @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator pre = _in.getDataIterator(context); - return LoggingDataIterator.wrap(new AliasDataIterator(pre, context, _container, _user, _expAliasTable, _dataType, _isSample)); + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + + return LoggingDataIterator.wrap(new AliasDataIterator(di, context, _container, _user, _expAliasTable, _dataType, _isSample)); } } @@ -535,6 +531,7 @@ private static class AliasDataIterator extends ExpDataTypeDataIterator final Supplier _nameCol; Map _lsidAliasMap = new HashMap<>(); private final TableInfo _expAliasTable; + private final boolean _isUpdateOnly; protected AliasDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, TableInfo expAliasTable, ExpObject dataType, boolean isSample) { @@ -542,9 +539,13 @@ protected AliasDataIterator(DataIterator di, DataIteratorContext context, Contai Map map = DataIteratorUtil.createColumnNameMap(di); _aliasCol = map.get(ALIASCOLUMNALIAS) == null ? null : di.getSupplier(map.get(ALIASCOLUMNALIAS)); - _lsidCol = map.get("lsid") == null ? null : di.getSupplier(map.get("lsid")); - _nameCol = map.get("name") == null ? null : di.getSupplier(map.get("name")); + _lsidCol = map.get(LSID.name()) == null ? null : di.getSupplier(map.get(LSID.name())); + _nameCol = map.get(Name.name()) == null ? null : di.getSupplier(map.get(Name.name())); _expAliasTable = expAliasTable; + _isUpdateOnly = _context.getInsertOption().updateOnly; + + if (_isUpdateOnly && !di.supportsGetExistingRecord()) + throw new IllegalArgumentException("DataIterator must support getExistingRecord() to update aliases"); } @Override @@ -565,7 +566,7 @@ public boolean next() throws BatchValidationException // Collect alias values and map them by LSID String lsid = null; - if (_nameCol != null && (_context.getInsertOption().mergeRows || _context.getInsertOption().updateOnly)) + if (_nameCol != null && (_context.getInsertOption().mergeRows || _isUpdateOnly)) { Object nameValue = _nameCol.get(); if (nameValue instanceof String name) @@ -583,6 +584,13 @@ public boolean next() throws BatchValidationException lsid = lsidString; } + if (lsid == null && _isUpdateOnly) + { + Map oldRow = getExistingRecord(); + if (oldRow != null) + lsid = (String) oldRow.get(LSID.name()); + } + if (!StringUtils.isEmpty(lsid)) _lsidAliasMap.put(lsid, _aliasCol.get()); @@ -629,6 +637,9 @@ public AutoLinkToStudyDataIteratorBuilder(@NotNull DataIteratorBuilder in, UserS public DataIterator getDataIterator(DataIteratorContext context) { DataIterator pre = _in.getDataIterator(context); + if (pre == null) + return null; // can happen if context has errors + return LoggingDataIterator.wrap(new AutoLinkToStudyDataIterator(DataIteratorUtil.wrapMap(pre, false), _schema, _container, _user, _sampleType)); } } @@ -749,6 +760,9 @@ public FlagDataIteratorBuilder(@NotNull DataIteratorBuilder in, User user, boole public DataIterator getDataIterator(DataIteratorContext context) { DataIterator pre = _in.getDataIterator(context); + if (pre == null) + return null; // can happen if context has errors + return LoggingDataIterator.wrap(new FlagDataIterator(pre, context, _user, _isSample, _expObject, _container)); } } @@ -759,6 +773,7 @@ private static class FlagDataIterator extends ExpDataTypeDataIterator final Integer _lsidCol; final Integer _nameCol; final Integer _flagCol; + final boolean _isUpdateOnly; protected FlagDataIterator(DataIterator di, DataIteratorContext context, User user, boolean isSample, ExpObject dataType, Container container) { @@ -769,6 +784,10 @@ protected FlagDataIterator(DataIterator di, DataIteratorContext context, User us _lsidCol = map.get("lsid"); _nameCol = map.get("name"); _flagCol = map.containsKey("flag") ? map.get("flag") : map.get("comment"); + _isUpdateOnly = _context.getInsertOption().updateOnly; + + if (_isUpdateOnly && !di.supportsGetExistingRecord()) + throw new IllegalArgumentException("DataIterator must support getExistingRecord() to update flag/comment"); } @Override @@ -786,7 +805,7 @@ public boolean next() throws BatchValidationException return true; ExpObject expObject = null; - if (_nameCol != null && (_context.getInsertOption().mergeRows || _context.getInsertOption().updateOnly)) + if (_nameCol != null && (_context.getInsertOption().mergeRows || _isUpdateOnly)) { Object nameValue = get(_nameCol); if (nameValue instanceof String name) @@ -800,6 +819,17 @@ public boolean next() throws BatchValidationException expObject = getExpObjectByLsid(lsid); } + if (expObject == null && _isUpdateOnly) + { + Map oldRow = getExistingRecord(); + if (oldRow != null) + { + String lsid = (String) oldRow.get(LSID.name()); + if (lsid != null) + expObject = getExpObjectByLsid(lsid); + } + } + if (expObject != null) { Object flagValue = get(_flagCol); @@ -873,15 +903,21 @@ public DerivationDataIteratorBuilder(DataIteratorBuilder pre, Container containe @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator pre = _pre.getDataIterator(context); + DataIterator di = _pre.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + if (context.getConfigParameters().containsKey(SampleTypeUpdateServiceDI.Options.SkipDerivation)) - { - return pre; - } + return di; - if (context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE) - return LoggingDataIterator.wrap(new ImportWithUpdateDerivationDataIterator(pre, context, _container, _user, _currentDataType, _isSample, _checkRequiredParents)); - return LoggingDataIterator.wrap(new DerivationDataIterator(pre, context, _container, _user, _currentDataType, _isSample, _skipAliquot)); + if (context.getInsertOption() != QueryUpdateService.InsertOption.UPDATE) + di = new DerivationDataIterator(di, context, _container, _user, _currentDataType, _isSample, _skipAliquot); + else if (_isSample) + di = new SampleUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); + else + di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); + + return LoggingDataIterator.wrap(di); } } @@ -1012,15 +1048,17 @@ protected Set> _getParentParts() return allParts; } - protected void _processRun(ExpRunItem runItem, - List runRecords, - Set> parentNames, - RemapCache cache, - Map materialCache, - Map dataCache, - @Nullable String aliquotedFrom, - String dataType /*sample type or source type name*/, - boolean updateOnly) throws ValidationException, ExperimentException + protected void _processRun( + ExpRunItem runItem, + List runRecords, + Set> parentNames, + RemapCache cache, + Map materialCache, + Map dataCache, + @Nullable String aliquotedFrom, + String dataType /*sample type or source type name*/, + boolean updateOnly + ) throws ValidationException, ExperimentException { Pair pair; if (_context.getInsertOption().allowUpdate) @@ -1110,7 +1148,7 @@ protected void _processRun(ExpRunItem runItem, } } - static class DerivationDataIterator extends DerivationDataIteratorBase + private static class DerivationDataIterator extends DerivationDataIteratorBase { final Integer _aliquotParentCol; final Map _lsidNames; @@ -1275,24 +1313,24 @@ else if (!_skipAliquot && _context.getInsertOption().mergeRows) } } - static class ImportWithUpdateDerivationDataIterator extends DerivationDataIteratorBase + private static class SampleUpdateDerivationDataIterator extends DerivationDataIteratorBase { - // Map from Data name to Set of (parentColName, parentName) - final Map>> _parentNames; - final Integer _aliquotParentCol; - // Map of Data name and its aliquotedFromLSID - final Map _aliquotParents; - final boolean _useLsid; + final Integer _aliquotParentCol; // Map from Data name to Set of (parentColName, parentName) + final Map _aliquotParents; // Map of Data name and its aliquotedFromLSID + final Map>> _parentNames; + final Integer _rowIdCol; + final boolean _useRowId; - protected ImportWithUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean isSample, boolean checkRequiredParent) + protected SampleUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean checkRequiredParent) { - super(di, context, container, user, currentDataType, isSample, checkRequiredParent); + super(di, context, container, user, currentDataType, true, checkRequiredParent); Map map = DataIteratorUtil.createColumnNameMap(di); _parentNames = new LinkedHashMap<>(); _aliquotParents = new LinkedHashMap<>(); - _aliquotParentCol = isSample() ? map.getOrDefault(AliquotedFromLSID.name(), -1) : -1; - _useLsid = map.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + _aliquotParentCol = map.getOrDefault(AliquotedFromLSID.name(), -1); + _rowIdCol = map.getOrDefault(RowId.name(), -1); + _useRowId = map.containsKey(RowId.name()); } @Override @@ -1307,11 +1345,17 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String key = null; - if (_useLsid && _lsidCol != null) - key = (String) get(_lsidCol); + Object key = null; + if (_useRowId && _rowIdCol != null) + { + key = get(_rowIdCol); + if (key instanceof String k) + key = Long.parseLong(k); + else + key = asLong(key); + } else if (_nameCol != null) - key = (String) get(_nameCol); + key = get(_nameCol); String aliquotParentName = null; @@ -1321,9 +1365,9 @@ else if (_nameCol != null) if (o != null) { - if (o instanceof String) + if (o instanceof String s) { - aliquotParentName = StringUtilsLabKey.unquoteString((String) o); + aliquotParentName = StringUtilsLabKey.unquoteString(s); } else if (o instanceof Number) { @@ -1345,7 +1389,7 @@ else if (o instanceof Number) for (Integer parentCol : _requiredParentCols.keySet()) { Object parentVal = get(parentCol); - if (parentVal == null || (parentVal instanceof String && ((String) parentVal).isEmpty())) + if (parentVal == null || (parentVal instanceof String s && s.isEmpty())) getErrors().addRowError(new ValidationException("Missing value for required property: " + _requiredParentCols.get(parentCol))); } } @@ -1367,50 +1411,114 @@ else if (o instanceof Number) Map dataCache = new LongHashMap<>(); List runRecords = new ArrayList<>(); - Set keys = new LinkedHashSet<>(); + Set keys = new LinkedHashSet<>(); keys.addAll(_parentNames.keySet()); keys.addAll(_aliquotParents.keySet()); - ExperimentService experimentService = ExperimentService.get(); - for (String key : keys) + for (Object key : keys) { - Set> parentNames = _parentNames.getOrDefault(key, Collections.emptySet()); + ExpMaterial expMaterial = _useRowId ? ExperimentService.get().getExpMaterial((Long) key) : getSampleType().getSample(_container, (String) key); + if (expMaterial == null) + continue; - ExpRunItem runItem; + materialCache.put(expMaterial.getRowId(), expMaterial); + String dataType = getSampleType().getName(); String aliquotedFromLSID = _aliquotParents.get(key); - String dataType = null; - if (isSample()) - { - ExpMaterial m = _useLsid ? experimentService.getExpMaterial(key) : getSampleType().getSample(_container, key); + Set> parentNames = _parentNames.getOrDefault(key, Collections.emptySet()); - if (m != null) - { - materialCache.put(m.getRowId(), m); - dataType = getSampleType().getName(); - } - runItem = m; - } - else - { - ExpData d = _useLsid ? experimentService.getExpData(key) : getDataClass().getData(_container, key); + _processRun(expMaterial, runRecords, parentNames, cache, materialCache, dataCache, aliquotedFromLSID, dataType, true); + } - if (d != null) - { - dataCache.put(d.getRowId(), d); - dataType = getDataClass().getName(); - } - runItem = d; - } - if (runItem == null) // nothing to do if the item does not exist + if (!runRecords.isEmpty()) + ExperimentService.get().deriveSamplesBulk(runRecords, new ViewBackgroundInfo(_container, _user, null), null); + } + catch (ExperimentException e) + { + throw new RuntimeException(e); + } + catch (ValidationException e) + { + getErrors().addRowError(e); + throw getErrors(); + } + } + + return hasNext; + } + } + + private static class DataUpdateDerivationDataIterator extends DerivationDataIteratorBase + { + // Map from Data name to Set of (parentColName, parentName) + final Map>> _parentNames; + final boolean _useLsid; + + protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean checkRequiredParent) + { + super(di, context, container, user, currentDataType, false, checkRequiredParent); + + Map map = DataIteratorUtil.createColumnNameMap(di); + _parentNames = new LinkedHashMap<>(); + _useLsid = map.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + } + + @Override + public boolean next() throws BatchValidationException + { + boolean hasNext = super.next(); + + // skip processing if there are errors upstream + if (getErrors().hasErrors()) + return hasNext; + + // For each iteration, collect the parent col values + if (hasNext) + { + String key = null; + if (_useLsid && _lsidCol != null) + key = (String) get(_lsidCol); + else if (_nameCol != null) + key = (String) get(_nameCol); + + for (Integer parentCol : _requiredParentCols.keySet()) + { + Object parentVal = get(parentCol); + if (parentVal == null || (parentVal instanceof String s && s.isEmpty())) + getErrors().addRowError(new ValidationException("Missing value for required property: " + _requiredParentCols.get(parentCol))); + } + + Set> allParts = _getParentParts(); + if (!allParts.isEmpty()) + _parentNames.put(key, allParts); + } + + if (getErrors().hasErrors()) + return hasNext; + + if (!hasNext) + { + try + { + RemapCache cache = new RemapCache(true); + Map materialCache = new LongHashMap<>(); + Map dataCache = new LongHashMap<>(); + + List runRecords = new ArrayList<>(); + for (String key : _parentNames.keySet()) + { + ExpData expData = _useLsid ? ExperimentService.get().getExpData(key) : getDataClass().getData(_container, key); + if (expData == null) continue; - _processRun(runItem, runRecords, parentNames, cache, materialCache, dataCache, aliquotedFromLSID, dataType, true); + dataCache.put(expData.getRowId(), expData); + String dataType = getDataClass().getName(); + Set> parentNames = _parentNames.getOrDefault(key, Collections.emptySet()); + + _processRun(expData, runRecords, parentNames, cache, materialCache, dataCache, null, dataType, true); } if (!runRecords.isEmpty()) - { ExperimentService.get().deriveSamplesBulk(runRecords, new ViewBackgroundInfo(_container, _user, null), null); - } } catch (ExperimentException e) { @@ -1422,6 +1530,7 @@ else if (o instanceof Number) throw getErrors(); } } + return hasNext; } } @@ -2014,6 +2123,9 @@ public SearchIndexIteratorBuilder(DataIteratorBuilder pre, Function NOT_FOR_UPDATE = Sets.newCaseInsensitiveHashSet( - ExpDataTable.Column.RowId.toString(), - ExpDataTable.Column.LSID.toString(), - ExpDataTable.Column.Created.toString(), - ExpDataTable.Column.CreatedBy.toString(), - AliquotedFromLSID.toString(), - RootMaterialRowId.toString(), - "genId"); + // Common fields in both exp.data and exp.material that cannot be updated + private static final Set COMMON_NOT_FOR_UPDATE = CaseInsensitiveHashSet.of( + Created.name(), + CreatedBy.name(), + LSID.name(), + RowId.name(), + "genId" + ); + + public static final Set DATA_NOT_FOR_UPDATE; + public static final Set MATERIAL_NOT_FOR_UPDATE; + + static { + DATA_NOT_FOR_UPDATE = COMMON_NOT_FOR_UPDATE; + + Set materialNotForUpdate = Sets.newCaseInsensitiveHashSet(COMMON_NOT_FOR_UPDATE); + materialNotForUpdate.addAll(CaseInsensitiveHashSet.of( + AliquotCount.name(), + AliquotedFromLSID.name(), + AliquotVolume.name(), + AvailableAliquotCount.name(), + AvailableAliquotVolume.name(), + RootMaterialRowId.name() + )); + MATERIAL_NOT_FOR_UPDATE = Collections.unmodifiableSet(materialNotForUpdate); + } public static class PersistDataIteratorBuilder implements DataIteratorBuilder { private final DataIteratorBuilder _in; - private final TableInfo _expTable; + private final ExpTable _expTable; private final TableInfo _propertiesTable; private final ExpObject _dataTypeObject; private final Container _container; private final User _user; - private final Set _excludedColumns = new HashSet<>(List.of("generated","runId","sourceapplicationid")); // generated has database DEFAULT 0 + private final Set _excludedColumns = CaseInsensitiveHashSet.of("generated", RunId.name(), SourceApplicationId.name()); // generated has database DEFAULT 0 private String _fileLinkDirectory = null; Function _indexFunction; final Map _importAliases; // expTable is the shared experiment table e.g. exp.Data or exp.Materials - public PersistDataIteratorBuilder(@NotNull DataIteratorBuilder in, TableInfo expTable, TableInfo propsTable, ExpObject typeObject, Container container, User user, Map importAliases, @Nullable Long ownerObjectId) + public PersistDataIteratorBuilder(@NotNull DataIteratorBuilder in, ExpTable expTable, TableInfo propsTable, ExpObject typeObject, Container container, User user, Map importAliases) { _in = in; _expTable = expTable; @@ -2212,7 +2345,7 @@ public PersistDataIteratorBuilder(@NotNull DataIteratorBuilder in, TableInfo exp _dataTypeObject = typeObject; _container = container; _user = user; - _importAliases = importAliases != null ? new CaseInsensitiveHashMap<>(importAliases) : new CaseInsensitiveHashMap<>(); + _importAliases = importAliases != null ? new CaseInsensitiveHashMap<>(importAliases) : Collections.emptyMap(); } public PersistDataIteratorBuilder setIndexFunction(Function indexFunction) @@ -2254,131 +2387,256 @@ public DataIterator getDataIterator(DataIteratorContext context) assert _expTable instanceof ExpMaterialTableImpl || _expTable instanceof ExpDataClassDataTableImpl; boolean isSample = _expTable instanceof ExpMaterialTableImpl; + boolean isMergeOrUpdate = context.getInsertOption().allowUpdate; + boolean isUpdateOnly = context.getInsertOption().updateOnly; SimpleTranslator step1 = new SimpleTranslator(input, context); - step1.selectAll(Sets.newCaseInsensitiveHashSet("alias"), _importAliases); - if (colNameMap.containsKey("alias")) - step1.addColumn(ExperimentService.ALIASCOLUMNALIAS, colNameMap.get("alias")); // see AliasDataIteratorBuilder + step1.selectAll(Sets.newCaseInsensitiveHashSet(Alias.name()), _importAliases); + if (colNameMap.containsKey(Alias.name())) + step1.addColumn(ExperimentService.ALIASCOLUMNALIAS, colNameMap.get(Alias.name())); // see AliasDataIteratorBuilder - CaseInsensitiveHashSet dontUpdate = new CaseInsensitiveHashSet(); - dontUpdate.addAll(NOT_FOR_UPDATE); - dontUpdate.add("rowid"); // rowid is added / not dropped for dataclass for QueryUpdateAuditEvent.rowpk audit purpose - if (context.getInsertOption().updateOnly) + CaseInsensitiveHashSet dontUpdate = new CaseInsensitiveHashSet(isSample ? MATERIAL_NOT_FOR_UPDATE : DATA_NOT_FOR_UPDATE); + if (isMergeOrUpdate) { - dontUpdate.add("objectid"); - dontUpdate.add("cpastype"); - dontUpdate.add("lastindexed"); - } + // Common fields in both exp.data and exp.material that cannot be updated + dontUpdate.addAll(CpasType.name(), ObjectId.name()); - boolean isMergeOrUpdate = context.getInsertOption().allowUpdate; - boolean isUpdateUsingLsid = context.getInsertOption().updateOnly && colNameMap.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + if (isSample) + dontUpdate.add(MaterialSourceId.name()); + } CaseInsensitiveHashSet keyColumns = new CaseInsensitiveHashSet(); CaseInsensitiveHashSet propertyKeyColumns = new CaseInsensitiveHashSet(); - if (!isMergeOrUpdate) - keyColumns.add(ExpDataTable.Column.LSID.toString()); + boolean canUpdateNames = NameExpressionOptionService.get().getAllowUserSpecificNamesValue(_container); - NameExpressionOptionService svc = NameExpressionOptionService.get(); - boolean canUpdateNames = svc.getAllowUserSpecificNamesValue(_container); + var keys = _expTable.getExistingRecordKeyColumnNames(context, colNameMap); + if (keys != null) + keyColumns.addAll(keys); + + for (String key : keyColumns) + { + if (_propertiesTable.getColumn(key) != null) + propertyKeyColumns.add(key); + } if (isSample) { - if (isMergeOrUpdate) - { - if (isUpdateUsingLsid) - { - keyColumns.add(ExpDataTable.Column.LSID.toString()); - if (!canUpdateNames) - dontUpdate.add("name"); - } - else - { - keyColumns.addAll(((ExpMaterialTableImpl) _expTable).getAltMergeKeys(context)); - propertyKeyColumns.add("name"); - } - } + if (isUpdateOnly && !canUpdateNames) + dontUpdate.add(Name.name()); dontUpdate.addAll(((ExpMaterialTableImpl) _expTable).getUniqueIdFields()); - dontUpdate.add(RootMaterialRowId.toString()); - dontUpdate.add(AliquotedFromLSID.toString()); - dontUpdate.add(ExpMaterialTable.Column.AliquotCount.name()); - dontUpdate.add(ExpMaterialTable.Column.AliquotVolume.name()); - dontUpdate.add(ExpMaterialTable.Column.AvailableAliquotCount.name()); - dontUpdate.add(ExpMaterialTable.Column.AvailableAliquotVolume.name()); } - else if (isMergeOrUpdate) + else { - if (isUpdateUsingLsid) + if (isMergeOrUpdate) { - keyColumns.add(ExpDataTable.Column.LSID.toString()); - if (!canUpdateNames) - dontUpdate.add("name"); + boolean isUpdateUsingLsid = isUpdateOnly && + colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && + context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + + if (isUpdateUsingLsid && !canUpdateNames) + dontUpdate.add(ExpDataTable.Column.Name.name()); } - else + } + + // Since we support detailed audit logging, add the ExistingRecordDataIterator here just before TableInsertDataIterator. + // This is a NOOP unless we are merging/updating and detailed logging is enabled + DataIteratorBuilder dib = ExistingRecordDataIterator.createBuilder(step1, _expTable, keyColumns, _expTable.getExistingRecordSharedKeyColumnNames(), true); + + if (isSample) + { + // Add RootMaterialRowId if it does not exist + dib = getRootMaterialRowIdBuilder(dib); + + if (isMergeOrUpdate) { - keyColumns.addAll(((ExpDataClassDataTableImpl) _expTable).getAltMergeKeys(context)); + dib = new SampleStatusCheckIteratorBuilder(dib, _container); + + if (isUpdateOnly) + { + dib = new SampleUpdateOnlyValidatorsIteratorBuilder(dib, _container, _user); + dib = new SampleNameChangeDataIteratorBuilder(dib, _user, canUpdateNames); + } } } - // Since we support detailed audit logging add the ExistingRecordDataIterator here just before TableInsertDataIterator - // this is a NOOP unless we are merging/updating and detailed logging is enabled - DataIteratorBuilder step2a = ExistingRecordDataIterator.createBuilder(step1, _expTable, keyColumns, Set.of(ExpMaterialTable.Column.MaterialSourceId.name(), ExpDataClassDataTable.Column.ClassId.name()), true); + Set vocabProps = PropertyService.get().findVocabularyProperties(_container, colNameMap.keySet()); - // Add RootMaterialRowId if it does not exist - DataIteratorBuilder step2b = ctx -> { - DataIterator in = step2a.getDataIterator(ctx); - var map = DataIteratorUtil.createColumnNameMap(in); - if (map.containsKey(RootMaterialRowId.toString()) || !map.containsKey(RowId.toString())) - return in; - var ret = new SimpleTranslator(in, ctx); - ret.selectAll(); - ret.addAliasColumn(RootMaterialRowId.toString(), map.get(RowId.toString())); - return ret; - }; - - DataIteratorBuilder step2c = step2b; - if (isSample && isMergeOrUpdate) + // Ensure the property cache is cleared after vocabulary changes + if (isMergeOrUpdate && !vocabProps.isEmpty()) { - step2c = LoggingDataIterator.wrap(new ExpDataIterators.SampleStatusCheckIteratorBuilder(step2b, _container)); + var tx = _expTable.getSchema().getScope().getCurrentTransaction(); + if (tx != null) + tx.addCommitTask(OntologyManager::clearPropertyCache, DbScope.CommitTaskOption.POSTCOMMIT); } // Insert into exp.data then the provisioned table - // Use embargo data iterator to ensure rows are committed before being sent along Issue 26082 (row at a time, reselect rowid) - DataIteratorBuilder step3 = LoggingDataIterator.wrap(new TableInsertDataIteratorBuilder(step2c, _expTable, _container) + // Use embargo data iterator to ensure rows are committed before being sent along Issue 26082 (row at a time, reselect rowId) + dib = LoggingDataIterator.wrap(new TableInsertDataIteratorBuilder(dib, _expTable, _container) .setKeyColumns(keyColumns) .setDontUpdate(dontUpdate) + .setVocabularyProperties(vocabProps) .setAddlSkipColumns(_excludedColumns) .setCommitRowsBeforeContinuing(true) .setFailOnEmptyUpdate(false)); // pass in remap columns to help reconcile columns that may be aliased in the virtual table - DataIteratorBuilder step4 = LoggingDataIterator.wrap(new TableInsertDataIteratorBuilder(step3, _propertiesTable, _container) - .setKeyColumns(propertyKeyColumns.isEmpty() ? keyColumns : propertyKeyColumns) + dib = LoggingDataIterator.wrap(new TableInsertDataIteratorBuilder(dib, _propertiesTable, _container) + .setKeyColumns(propertyKeyColumns) .setDontUpdate(dontUpdate) - .setVocabularyProperties(PropertyService.get().findVocabularyProperties(_container, colNameMap.keySet())) - .setRemapSchemaColumns(((UpdateableTableInfo)_expTable).remapSchemaColumns()) + .setRemapSchemaColumns(((UpdateableTableInfo) _expTable).remapSchemaColumns()) .setFailOnEmptyUpdate(false)); - DataIteratorBuilder step5 = step4; - if (colNameMap.containsKey("flag") || colNameMap.containsKey("comment")) - { - step5 = LoggingDataIterator.wrap(new ExpDataIterators.FlagDataIteratorBuilder(step4, _user, isSample, _dataTypeObject, _container)); - } + if (colNameMap.containsKey(Flag.name()) || colNameMap.containsKey("comment")) + dib = new FlagDataIteratorBuilder(dib, _user, isSample, _dataTypeObject, _container); // Wire up derived parent/child data and materials - DataIteratorBuilder step6 = LoggingDataIterator.wrap(new ExpDataIterators.DerivationDataIteratorBuilder(step5, _container, _user, isSample, _dataTypeObject, false, false/*Validation already done in StandardDataIterator*/)); + dib = new DerivationDataIteratorBuilder(dib, _container, _user, isSample, _dataTypeObject, false, false /* Validation already done in StandardDataIterator */); - DataIteratorBuilder step7 = step6; - boolean hasRollUpColumns = colNameMap.containsKey(ROOT_RECOMPUTE_ROWID_COL); - if (isSample && !context.getConfigParameterBoolean(SampleTypeService.ConfigParameters.DeferAliquotRuns) && hasRollUpColumns) - step7 = LoggingDataIterator.wrap(new ExpDataIterators.AliquotRollupDataIteratorBuilder(step6, _container)); + if (isSample && !context.getConfigParameterBoolean(SampleTypeService.ConfigParameters.DeferAliquotRuns) && colNameMap.containsKey(ROOT_RECOMPUTE_ROWID_COL)) + dib = new AliquotRollupDataIteratorBuilder(dib, _container); // Hack: add the alias and lsid values back into the input, so we can process them in the chained data iterator - DataIteratorBuilder step8 = step7; if (null != _indexFunction) - step8 = LoggingDataIterator.wrap(new ExpDataIterators.SearchIndexIteratorBuilder(step7, _indexFunction)); // may need to add this after the aliases are set + dib = new SearchIndexIteratorBuilder(dib, _indexFunction); // may need to add this after the aliases are set + + return dib.getDataIterator(context); + } + + private DataIteratorBuilder getRootMaterialRowIdBuilder(DataIteratorBuilder dib) + { + return ctx -> { + DataIterator in = dib.getDataIterator(ctx); + var map = DataIteratorUtil.createColumnNameMap(in); + if (map.containsKey(RootMaterialRowId.toString()) || !map.containsKey(RowId.toString())) + return in; + var ret = new SimpleTranslator(in, ctx); + ret.selectAll(); + ret.addAliasColumn(RootMaterialRowId.toString(), map.get(RowId.toString())); + return ret; + }; + } + } + + private static class SampleUpdateOnlyValidatorsIteratorBuilder implements DataIteratorBuilder + { + private final Container _container; + private final DataIteratorBuilder _in; + private final User _user; + + public SampleUpdateOnlyValidatorsIteratorBuilder(@NotNull DataIteratorBuilder in, Container container, User user) + { + _container = container; + _in = in; + _user = user; + } + + @Override + public DataIterator getDataIterator(DataIteratorContext context) + { + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + + ValidatorIterator validate = new ValidatorIterator(di, context, _container, _user); + Map map = DataIteratorUtil.createColumnNameMap(validate); + + Integer index = map.get(Name.name()); + if (index != null) + { + ColumnInfo column = di.getColumnInfo(index); + validate.addValidator(index, new RequiredValidator(column.getColumnName(), false, false, "Sample name cannot be blank")); + } + + // Add other column validators here... + + if (validate.hasValidators()) + di = validate; + + return LoggingDataIterator.wrap(di); + } + } + + private static class SampleNameChangeDataIteratorBuilder implements DataIteratorBuilder + { + private final DataIteratorBuilder _in; + private final boolean _canUpdateNames; + private final User _user; + + public SampleNameChangeDataIteratorBuilder(@NotNull DataIteratorBuilder in, User user, boolean canUpdateNames) + { + _in = in; + _canUpdateNames = canUpdateNames; + _user = user; + } + + @Override + public DataIterator getDataIterator(DataIteratorContext context) + { + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + + return LoggingDataIterator.wrap(new SampleNameChangeDataIterator(di, context, _user, _canUpdateNames)); + } + } + + private static class SampleNameChangeDataIterator extends WrapperDataIterator + { + private final DataIteratorContext _context; + private final Integer _nameCol; + private final boolean _canUpdateNames; + private final User _user; + + protected SampleNameChangeDataIterator( + DataIterator di, + DataIteratorContext context, + User user, + boolean canUpdateNames + ) + { + super(di); + _context = context; + _nameCol = DataIteratorUtil.createColumnNameMap(di).get(Name.name()); + _canUpdateNames = canUpdateNames; + _user = user; + + if (!di.supportsGetExistingRecord()) + throw new IllegalArgumentException("DataIterator must support getExistingRecord()"); + } - return LoggingDataIterator.wrap(step8.getDataIterator(context)); + @Override + public boolean next() throws BatchValidationException + { + boolean hasNext = super.next(); + if (!hasNext) + return false; + + if (_nameCol == null || _context.getErrors().hasErrors()) + return true; + + var existingRecord = getExistingRecord(); + if (existingRecord == null) + return true; + + Object newNameObj = get(_nameCol); + String newName = newNameObj == null ? null : String.valueOf(newNameObj); + String oldName = (String) existingRecord.get(Name.name()); + boolean hasNameChange = !StringUtils.isEmpty(newName) && !newName.equals(oldName); + if (!hasNameChange) + return true; + + if (_canUpdateNames) + { + Long rowId = asLong(existingRecord.get(RowId.name())); + ExpMaterial sample = ExperimentService.get().getExpMaterial(rowId); + if (sample != null) + ExperimentService.get().addObjectLegacyName(sample.getObjectId(), ExperimentServiceImpl.getNamespacePrefix(ExpMaterial.class), oldName, _user); + } + else + _context.getErrors().addRowError(new ValidationException("User-specified sample name not allowed")); + + return true; } } @@ -2397,7 +2655,7 @@ record TypeData( List fieldIndexes, Map dependencyIndexes, List dataRows, - List dataIds, + List dataIds, String headerRow, Map folderFiles ) { } @@ -2416,17 +2674,15 @@ record TypeData( private final Map> _typeFolderDataMap = new TreeMap<>(); private final Map> _orderDependencies = new HashMap<>(); private final int _dataIdIndex; + private final FieldKey _dataKey; + private final boolean _dataKeyIsNumeric; private final Map> _idsPerType = new HashMap<>(); private final Map> _parentIdsPerType = new HashMap<>(); - private final Map _containerMap = new CaseInsensitiveHashMap<>(); - private final boolean _isCrossFolderUpdate; - private final TSVWriter _tsvWriter; - - public MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, boolean isCrossType, boolean isCrossFolder, ExpObject dataType, boolean isSamples) + private MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, boolean isCrossType, boolean isCrossFolder, ExpObject dataType, boolean isSamples) { super(di); _context = context; @@ -2438,7 +2694,42 @@ public MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorContex _isCrossFolder = isCrossFolder; Map map = DataIteratorUtil.createColumnNameMap(di); - _dataIdIndex = map.getOrDefault("Name", -1); + // Determine the dataId column + { + int index; + FieldKey dataKey; + boolean isNumeric; + + if (_isSamples) + { + var foundId = RowId.namesAndLabels().stream() + .filter(map::containsKey) + .findFirst(); + + if (foundId.isPresent()) + { + index = map.get(foundId.get()); + dataKey = RowId.fieldKey(); + isNumeric = true; + } + else + { + index = map.getOrDefault(Name.name(), -1); + dataKey = Name.fieldKey(); + isNumeric = false; + } + } + else + { + index = map.getOrDefault(ExpDataTable.Column.Name.name(), -1); + dataKey = ExpDataTable.Column.Name.fieldKey(); + isNumeric = false; + } + + _dataIdIndex = index; + _dataKey = dataKey; + _dataKeyIsNumeric = isNumeric; + } _tsvWriter = new TSVWriter() // Used to quote values with newline/tabs/quotes { @@ -2480,7 +2771,7 @@ protected int write() if (_folderColIndex != null || _isCrossFolderUpdate) { - ContainerFilter cf = ContainerFilter.current(container, user); + ContainerFilter cf; if (container.isProductFoldersEnabled()) { // Note that this is slightly different from our treatment of lookups: @@ -2491,11 +2782,14 @@ protected int write() else cf = new ContainerFilter.CurrentPlusProjectAndShared(container, user); } - Collection validContainerIds = cf.getIds(); + else + cf = ContainerFilter.current(container, user); + + Collection validContainerIds; if (cf instanceof ContainerFilter.ContainerFilterWithPermission cfp) - { validContainerIds = cfp.generateIds(container, context.getInsertOption().allowUpdate ? UpdatePermission.class : InsertPermission.class, null); - } + else + validContainerIds = cf.getIds(); if (validContainerIds != null) { @@ -2596,7 +2890,7 @@ public boolean next() throws BatchValidationException boolean hasCrossFolderImport = false; - // process the individual files + // process the individual files for (String key : importOrderKeys) { Map typeFolderData = _typeFolderDataMap.get(key); @@ -2959,9 +3253,25 @@ private void addDataRow(TypeData typeData) { if (index == _dataIdIndex) { - _idsPerType.computeIfAbsent(typeData.dataType.getName(), k -> new HashSet<>()).add(data.toString()); + String dataString = data.toString(); + _idsPerType.computeIfAbsent(typeData.dataType.getName(), k -> new HashSet<>()).add(dataString); if (_isCrossFolderUpdate) - typeData.dataIds.add(data.toString()); + { + if (_dataKeyIsNumeric) + { + try + { + typeData.dataIds.add(JdbcType.BIGINT.convert(data)); + } + catch (ConversionException e) + { + _context.getErrors().addRowError(new ValidationException(e.getMessage() + " on row " + get(0), _dataKey.getName())); + return; + } + } + else + typeData.dataIds.add(dataString); + } } // if the data represents a derivation dependency between types, and we're creating ids within the file, @@ -2979,7 +3289,7 @@ private void addDataRow(TypeData typeData) else if (index == _dataIdIndex && _isCrossFolderUpdate) { // Issue 52922: Samples with blank sample id in the file are getting ignored - throw new IllegalArgumentException("Name value not provided on row " + get(0)); + throw new IllegalArgumentException(_dataKey.getName() + " value not provided on row " + get(0)); } }); typeData.dataRows.add(StringUtils.join(dataRow, "\t")); @@ -2990,11 +3300,10 @@ private void writeRowsToFile(TypeData typeData) if (typeData.dataRows.isEmpty()) return; - // for cross folder import, write to further partitions + // for cross-folder import, write to further partitions if (_isCrossFolderUpdate) { ExpObject dataType = typeData.dataType; - Map> containerRows = new HashMap<>(); TableInfo tableInfo; @@ -3002,29 +3311,30 @@ private void writeRowsToFile(TypeData typeData) if (_isSamples) { - filter = new SimpleFilter(FieldKey.fromParts("MaterialSourceId"), dataType.getRowId()); - filter.addCondition(FieldKey.fromParts("Name"), typeData.dataIds, CompareType.IN); + filter = new SimpleFilter(MaterialSourceId.fieldKey(), dataType.getRowId()); + filter.addCondition(_dataKey, typeData.dataIds, CompareType.IN); tableInfo = ExperimentService.get().getTinfoMaterial(); } else { filter = new SimpleFilter(FieldKey.fromParts("ClassId"), dataType.getRowId()); - filter.addCondition(FieldKey.fromParts("Name"), typeData.dataIds, CompareType.IN); + filter.addCondition(_dataKey, typeData.dataIds, CompareType.IN); tableInfo = ExperimentService.get().getTinfoData(); } - Map[] rows = new TableSelector(tableInfo, Set.of("name", "container"), filter, null).getMapArray(); + Map[] rows = new TableSelector(tableInfo, Set.of(_dataKey.getName(), "container"), filter, null).getMapArray(); - Set notFoundIds = new HashSet<>(typeData.dataIds); + Set notFoundIds = new HashSet<>(typeData.dataIds); for (Map row : rows) { - String name = (String) row.get("name"); - notFoundIds.remove(name); + Object raw = row.get(_dataKey.getName()); + Object identifier = _dataKeyIsNumeric ? asLong(raw) : raw; + notFoundIds.remove(identifier); String dataContainer = (String) row.get("container"); // could be updating the same data multiple times in a single import, the import will later be rejected List dataRowIds = IntStream.range(0, typeData.dataIds.size()).boxed() - .filter(i -> typeData.dataIds.get(i).equals(name)) + .filter(i -> typeData.dataIds.get(i).equals(identifier)) .toList(); containerRows.computeIfAbsent(dataContainer, k -> new ArrayList<>()).addAll(dataRowIds); } @@ -3112,8 +3422,11 @@ public MultiDataTypeCrossProjectDataIteratorBuilder(@NotNull User user, @NotNull @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator pre = _in.getDataIterator(context); - return LoggingDataIterator.wrap(new MultiDataTypeCrossProjectDataIterator(pre, context, _container, _user, _isCrossType, _isCrossFolder, _dataType, _isSamples)); + DataIterator di = _in.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + + return LoggingDataIterator.wrap(new MultiDataTypeCrossProjectDataIterator(di, context, _container, _user, _isCrossType, _isCrossFolder, _dataType, _isSamples)); } } @@ -3140,11 +3453,14 @@ public SampleStatusCheckIteratorBuilder(@NotNull DataIteratorBuilder in, Contain public DataIterator getDataIterator(DataIteratorContext context) { DataIterator pre = _in.getDataIterator(context); + if (pre == null) + return null; // can happen if context has errors + return LoggingDataIterator.wrap(new SampleStatusCheckDataIterator(pre, context, _container)); } } - public static class SampleStatusCheckDataIterator extends WrapperDataIterator + private static class SampleStatusCheckDataIterator extends WrapperDataIterator { private final Set SAMPLE_IMPORT_BASE_FIELDS = new CaseInsensitiveHashSet( "LSID", diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 0b4da589654..dd9c3c52d8e 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -129,6 +129,7 @@ import org.labkey.experiment.api.LogDataType; import org.labkey.experiment.api.Protocol; import org.labkey.experiment.api.SampleTypeServiceImpl; +import org.labkey.experiment.api.SampleTypeUpdateServiceDI; import org.labkey.experiment.api.UniqueValueCounterTestCase; import org.labkey.experiment.api.VocabularyDomainKind; import org.labkey.experiment.api.data.ChildOfCompareType; @@ -200,7 +201,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 25.014; + return 25.015; } @Nullable @@ -272,6 +273,8 @@ protected void init() "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", "Support for querying lineage of experiment objects", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(SampleTypeUpdateServiceDI.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE, "Allow RowId to be accepted when merging samples", + "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); RoleManager.registerPermission(new DesignVocabularyPermission(), true); RoleManager.registerRole(new SampleTypeDesignerRole()); diff --git a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java index e1b97b83aca..0612cc27174 100644 --- a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java +++ b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java @@ -17,31 +17,46 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.audit.AbstractAuditTypeProvider; import org.labkey.api.audit.AuditLogService; import org.labkey.api.audit.AuditTypeEvent; import org.labkey.api.audit.SampleTimelineAuditEvent; import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbScope; import org.labkey.api.data.JdbcType; import org.labkey.api.data.Parameter; import org.labkey.api.data.ParameterMapStatement; import org.labkey.api.data.PropertyManager; +import org.labkey.api.data.PropertyStorageSpec; import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SchemaTableInfo; import org.labkey.api.data.Selector; import org.labkey.api.data.SqlExecutor; import org.labkey.api.data.SqlSelector; import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; import org.labkey.api.data.UpgradeCode; +import org.labkey.api.exp.PropertyDescriptor; import org.labkey.api.exp.api.ExpSampleType; import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.SampleTypeDomainKind; import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainUtil; +import org.labkey.api.exp.property.PropertyService; import org.labkey.api.module.ModuleContext; import org.labkey.api.module.ModuleLoader; import org.labkey.api.ontology.Unit; import org.labkey.api.query.AbstractQueryUpdateService; +import org.labkey.api.query.FieldKey; import org.labkey.api.query.QueryService; import org.labkey.api.security.LimitedUser; import org.labkey.api.security.User; @@ -49,12 +64,19 @@ import org.labkey.api.settings.AppProps; import org.labkey.api.util.logging.LogHelper; import org.labkey.experiment.api.ClosureQueryHelper; +import org.labkey.experiment.api.ExpSampleTypeImpl; +import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.experiment.api.MaterialSource; +import org.labkey.experiment.api.property.DomainImpl; +import org.labkey.experiment.api.property.DomainPropertyImpl; +import org.labkey.experiment.api.property.StorageProvisionerImpl; import org.labkey.experiment.samples.SampleTimelineAuditProvider; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -406,6 +428,139 @@ private static int convertAmountsToBaseUnits(Container container, User user) { throw new RuntimeException(e); } + } + + /** + * Called from exp-25.014-25.015.sql + */ + @SuppressWarnings("unused") + public static void dropProvisionedSampleTypeLsidColumn(ModuleContext context) + { + if (context.isNewInstall()) + return; + + try (DbScope.Transaction tx = ExperimentService.get().ensureTransaction()) + { + // Process all sample types across all containers + TableInfo sampleTypeTable = ExperimentServiceImpl.get().getTinfoSampleType(); + List sampleTypes = new TableSelector(sampleTypeTable, null, null) + .stream(MaterialSource.class) + .map(ExpSampleTypeImpl::new) + .toList(); + + LOG.info("Dropping the lsid column from {} sample types", sampleTypes.size()); + + int successCount = 0; + for (ExpSampleTypeImpl st : sampleTypes) + { + boolean success = dropSampleLsid(st); + if (success) + successCount++; + } + + LOG.info("Dropped lsid column from {} of {} sample types successfully.", successCount, sampleTypes.size()); + + tx.commit(); + } + } + + private static boolean dropSampleLsid(ExpSampleTypeImpl st) + { + ProvisionedSampleTypeContext context = getProvisionedSampleTypeContext(st); + if (context == null) + return false; + + Domain domain = context.domain; + TableInfo table = context.provisionedTable; + + String lsidColumnName = "lsid"; + ColumnInfo lsidColumn = table.getColumn(FieldKey.fromParts(lsidColumnName)); + if (lsidColumn == null) + { + LOG.info("No lsid column found on table '{}'. Skipping drop.", table.getName()); + return false; + } + + Set indicesToRemove = new HashSet<>(); + for (var index : table.getAllIndices()) + { + var indexColumns = index.columns(); + if (indexColumns.contains(lsidColumn)) + { + // We only expect to be dropping indices on the LSID column alone. However, if we encounter + // another index on the provisioned table, log information about the index and continue to remove it. + if (indexColumns.size() > 1) + LOG.info("Dropping index '{}' on table '{}' because it contains the lsid column.", index.name(), table.getName()); + + indicesToRemove.add(index.name()); + } + } + + if (!indicesToRemove.isEmpty()) + StorageProvisionerImpl.get().dropTableIndices(domain, indicesToRemove); + else + LOG.info("No indices found on table '{}' that contain the lsid column.", table.getName()); + + // Remanufacture a property descriptor that matches the original LSID property descriptor. + var spec = new PropertyStorageSpec(lsidColumnName, JdbcType.VARCHAR, 300).setNullable(false); + PropertyDescriptor pd = new PropertyDescriptor(); + pd.setContainer(st.getContainer()); + pd.setDatabaseDefaultValue(spec.getDefaultValue()); + pd.setName(spec.getName()); + pd.setJdbcType(spec.getJdbcType(), spec.getSize()); + pd.setNullable(spec.isNullable()); + pd.setMvEnabled(spec.isMvEnabled()); + pd.setPropertyURI(DomainUtil.createUniquePropertyURI(domain.getTypeURI(), null, new CaseInsensitiveHashSet())); + pd.setDescription(spec.getDescription()); + pd.setImportAliases(spec.getImportAliases()); + pd.setScale(spec.getSize()); + DomainPropertyImpl dp = new DomainPropertyImpl((DomainImpl) domain, pd); + + LOG.debug("Dropping lsid column from table '{}' for sample type '{}' in folder {}.", table.getName(), st.getName(), st.getContainer().getPath()); + StorageProvisionerImpl.get().dropProperties(domain, Set.of(dp)); + + return true; + } + + private record ProvisionedSampleTypeContext(Domain domain, SchemaTableInfo provisionedTable) {} + + private static @Nullable ProvisionedSampleTypeContext getProvisionedSampleTypeContext(@NotNull ExpSampleTypeImpl st) + { + Domain domain = st.getDomain(); + SampleTypeDomainKind kind = null; + try + { + kind = (SampleTypeDomainKind) domain.getDomainKind(); + } + catch (IllegalArgumentException e) + { + // pass + } + + if (kind == null) + { + LOG.info("Sample type '" + st.getName() + "' (" + st.getRowId() + ") has no domain kind."); + return null; + } + else if (kind.getStorageSchemaName() == null) + { + // e.g., SpecimenSampleTypeDomainKind is not provisioned + LOG.info("Sample type '" + st.getName() + "' (" + st.getRowId() + ") has no provisioned storage schema."); + return null; + } + + DbSchema schema = kind.getSchema(); + StorageProvisioner.get().ensureStorageTable(domain, kind, schema.getScope()); + domain = PropertyService.get().getDomain(domain.getTypeId()); + assert (null != domain && null != domain.getStorageTableName()); + + SchemaTableInfo provisionedTable = schema.getTable(domain.getStorageTableName()); + if (provisionedTable == null) + { + LOG.error("Sample type '" + st.getName() + "' (" + st.getRowId() + ") has no provisioned table."); + return null; + } + return new ProvisionedSampleTypeContext(domain, provisionedTable); } } diff --git a/experiment/src/org/labkey/experiment/api/AbstractRunItemImpl.java b/experiment/src/org/labkey/experiment/api/AbstractRunItemImpl.java index 6bb4fb43885..a9a94cb4034 100644 --- a/experiment/src/org/labkey/experiment/api/AbstractRunItemImpl.java +++ b/experiment/src/org/labkey/experiment/api/AbstractRunItemImpl.java @@ -59,6 +59,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static java.util.Collections.emptyMap; import static org.labkey.api.util.IntegerUtils.isIntegral; /** @@ -313,16 +314,15 @@ protected List getTargetRuns(TableInfo inputTable, String rowIdColum return ExpRunImpl.fromRuns(new SqlSelector(ExperimentService.get().getSchema(), sql).getArrayList(ExperimentRun.class)); } - protected HashMap getObjectProperties(TableInfo ti) + protected Map getObjectProperties(TableInfo ti) { if (null == ti) - return new HashMap<>(); - var scope = OntologyManager.getExpSchema().getScope(); - return scope.executeWithRetryReadOnly(tx -> + return emptyMap(); + + return OntologyManager.getExpSchema().getScope().executeWithRetryReadOnly(tx -> { - var ret = new HashMap(); - var selector = new TableSelector(ti, TableSelector.ALL_COLUMNS, new SimpleFilter(FieldKey.fromParts("lsid"), getLSID()), null); - selector.forEach(rs -> + var ret = new HashMap(); + getObjectPropertiesSelector(ti).forEach(rs -> { for (ColumnInfo c : ti.getColumns()) { @@ -335,22 +335,31 @@ protected HashMap getObjectProperties(TableInfo ti) if (null != c.getMvColumnName()) { ColumnInfo mv = ti.getColumn(c.getMvColumnName()); - mvIndicator = null==mv ? null : (String)mv.getValue(rs); + mvIndicator = null == mv ? null : (String) mv.getValue(rs); } if (null == value && null == mvIndicator) continue; if (null != mvIndicator) value = null; - var prop = new ObjectProperty(getLSID(), getContainer(), c.getPropertyURI(), value, null==c.getPropertyType()? PropertyType.getFromJdbcType(c.getJdbcType()) : c.getPropertyType(), c.getName()); + + var propertyType = null == c.getPropertyType() ? PropertyType.getFromJdbcType(c.getJdbcType()) : c.getPropertyType(); + var prop = new ObjectProperty(getLSID(), getContainer(), c.getPropertyURI(), value, propertyType, c.getName()); if (null != mvIndicator) prop.setMvIndicator(mvIndicator); + ret.put(c.getPropertyURI(), prop); } }); + return ret; }); } + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(FieldKey.fromParts("lsid"), getLSID()), null); + } + protected void processIndexValues( Map props, @NotNull ExpRunItemTableImpl table, diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 858833a24b9..cc96470d824 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -881,7 +881,7 @@ public CaseInsensitiveHashMap remapSchemaColumns() } @Override - public Set getAltMergeKeys(DataIteratorContext context) + public @Nullable Set getAltMergeKeys(DataIteratorContext context) { if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) return getAltKeysForUpdate(); @@ -895,6 +895,38 @@ public Set getAltKeysForUpdate() return DATA_CLASS_ALT_UPDATE_KEYS; } + @Override + public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) + { + Set keyColumnNames = new CaseInsensitiveHashSet(); + + if (context.getInsertOption().allowUpdate) + { + boolean isUpdateUsingLsid = context.getInsertOption().updateOnly && + colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && + context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + + if (isUpdateUsingLsid) + keyColumnNames.add(Column.LSID.name()); + else + { + Set altMergeKeys = getAltMergeKeys(context); + if (altMergeKeys != null) + keyColumnNames.addAll(altMergeKeys); + } + } + else + keyColumnNames.add(Column.LSID.name()); + + return keyColumnNames.isEmpty() ? null : keyColumnNames; + } + + @Override + public @Nullable Set getExistingRecordSharedKeyColumnNames() + { + return CaseInsensitiveHashSet.of(Column.ClassId.name()); + } + @Override @NotNull public List> getAdditionalRequiredInsertColumns() @@ -918,7 +950,7 @@ public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorCon TableInfo propertiesTable = _dataClassDataTableSupplier.get(); try { - PersistDataIteratorBuilder step0 = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _dataClass, getUserSchema().getContainer(), getUserSchema().getUser(), _dataClass.getImportAliases(), null); + PersistDataIteratorBuilder step0 = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _dataClass, getUserSchema().getContainer(), getUserSchema().getUser(), _dataClass.getImportAliases()); ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); final var scope = propertiesTable.getSchema().getScope(); SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialImpl.java index 8a59ed07001..7108fc43a05 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialImpl.java @@ -47,7 +47,6 @@ import org.labkey.api.exp.api.SampleTypeService; import org.labkey.api.exp.property.Domain; import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.query.ExpDataTable; import org.labkey.api.exp.query.ExpMaterialTable; import org.labkey.api.exp.query.ExpSchema; import org.labkey.api.exp.query.SamplesSchema; @@ -125,9 +124,9 @@ public ActionURL detailsURL(Container container, boolean checkForOverride) { ExpSampleType st = getSampleType(); if (st != null) - return new QueryRowReference(getContainer(), SamplesSchema.SCHEMA_SAMPLES, st.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + return new QueryRowReference(getContainer(), SamplesSchema.SCHEMA_SAMPLES, st.getName(), ExpMaterialTable.Column.RowId.fieldKey(), getRowId()); else - return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Materials.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Materials.name(), ExpMaterialTable.Column.RowId.fieldKey(), getRowId()); } @Override @@ -246,7 +245,10 @@ public Double getAliquotVolume() } @Override - public Double getAvailableAliquotVolume() { return _object.getAvailableAliquotVolume(); } + public Double getAvailableAliquotVolume() + { + return _object.getAvailableAliquotVolume(); + } @Override public String getAliquotUnit() @@ -319,7 +321,7 @@ public void save(User user, ExpSampleTypeImpl st) TableInfo ti = st.getTinfo(); if (null != ti) { - new SqlExecutor(ti.getSchema()).execute("INSERT INTO " + ti + " (rowId, lsid, name) SELECT ?, ?, ? WHERE NOT EXISTS (SELECT lsid FROM " + ti + " WHERE lsid = ?)", getRowId(), getLSID(), getName(), getLSID()); + new SqlExecutor(ti.getSchema()).execute("INSERT INTO " + ti + " (rowId, name) SELECT ?, ? WHERE NOT EXISTS (SELECT rowId FROM " + ti + " WHERE rowId = ?)", getRowId(), getName(), getRowId()); SampleTypeServiceImpl.get().refreshSampleTypeMaterializedView(st, SampleTypeServiceImpl.SampleChangeType.insert); } } @@ -335,12 +337,24 @@ protected void save(User user, TableInfo table, boolean ensureObject) if (getRowId() == 0) { isInsert = true; - long longId = DbSequenceManager.getPreallocatingSequence(ContainerManager.getRoot(), ExperimentService.get().getTinfoMaterial().getDbSequenceName("RowId")).next(); + long longId = DbSequenceManager.getPreallocatingSequence(ContainerManager.getRoot(), ExperimentService.get().getTinfoMaterial().getDbSequenceName(ExpMaterialTable.Column.RowId.name())).next(); if (longId > Integer.MAX_VALUE) throw new OutOfRangeException(longId, 0, Integer.MAX_VALUE); setRowId((int) longId); if (null == getRootMaterialRowId()) setRootMaterialRowId(getRowId()); + + // If a MaterialSourceId is not yet specified and the material is associated with a sample type, + // then set the MaterialSourceId to the sample type + if (null == _object.getMaterialSourceId()) + { + ExpSampleType st = getSampleType(); + if (st != null) + { + assert st.getLSID().equals(getCpasType()); + _object.setMaterialSourceId(st.getRowId()); + } + } } super.save(user, table, true, isInsert); } @@ -507,8 +521,7 @@ public Map getProperties(ExpSampleTypeImpl st) var ti = null == st ? null : st.getTinfo(); if (null != ti) { - var selector = new TableSelector(ti, TableSelector.ALL_COLUMNS, new SimpleFilter(FieldKey.fromParts("lsid"), getLSID()), null); - selector.forEach(rs -> + getObjectPropertiesSelector(ti).forEach(rs -> { for (ColumnInfo c : ti.getColumns()) { @@ -556,9 +569,9 @@ public Map getObjectProperties(ExpSampleTypeImpl st) } @Override - public Object getProperty(DomainProperty prop) + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) { - return super.getProperty(prop); + return new TableSelector(table, new SimpleFilter(ExpMaterialTable.Column.RowId.fieldKey(), getRowId()), null); } @Override @@ -629,8 +642,8 @@ else if (values.containsKey(dp.getPropertyURI())) values.remove(key); } TableInfo tableInfo = st.getTinfo(); - ColumnInfo lsidCol = tableInfo.getColumn(ExpMaterialTable.Column.LSID.name()); - Table.update(user, st.getTinfo(), converted, lsidCol, getLSID(), null, Level.WARN); + ColumnInfo rowIdCol = tableInfo.getColumn(ExpMaterialTable.Column.RowId.fieldKey()); + Table.update(user, tableInfo, converted, rowIdCol, getRowId(), null, Level.WARN); } for (var entry : values.entrySet()) { diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index 89f4954b056..7ff117ba8e9 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -39,8 +39,6 @@ import org.labkey.api.data.DataRegion; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbScope; -import org.labkey.api.data.DisplayColumn; -import org.labkey.api.data.DisplayColumnFactory; import org.labkey.api.data.ForeignKey; import org.labkey.api.data.ImportAliasable; import org.labkey.api.data.JdbcType; @@ -95,10 +93,10 @@ import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.QueryUrls; import org.labkey.api.query.RowIdForeignKey; -import org.labkey.api.query.SchemaKey; import org.labkey.api.query.UserSchema; import org.labkey.api.query.column.BuiltInColumnTypes; import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; import org.labkey.api.security.UserPrincipal; import org.labkey.api.security.permissions.DeletePermission; import org.labkey.api.security.permissions.InsertPermission; @@ -140,24 +138,14 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static java.util.Objects.requireNonNull; import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLETYPE_FILE_DIRECTORY; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotUnit; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawAliquotUnit; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawAvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; +import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLE_TYPE_FILE_DIRECTORY_NAME; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; @@ -167,12 +155,10 @@ public class ExpMaterialTableImpl extends ExpRunItemTableImpl _uniqueIdFields; boolean _supportTableRules = true; - public static final Set MATERIAL_ALT_MERGE_KEYS; - public static final Set MATERIAL_ALT_UPDATE_KEYS; - public static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); + private static final Set MATERIAL_ALT_KEYS; + private static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); static { - MATERIAL_ALT_MERGE_KEYS = Set.of(Column.MaterialSourceId.name(), Column.Name.name()); - MATERIAL_ALT_UPDATE_KEYS = Set.of(Column.LSID.name()); + MATERIAL_ALT_KEYS = CaseInsensitiveHashSet.of(MaterialSourceId.name(), Name.name()); Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); @@ -212,11 +198,11 @@ protected ColumnInfo resolveColumn(String name) if (result == null) { if ("CpasType".equalsIgnoreCase(name)) - result = createColumn(Column.SampleSet.name(), Column.SampleSet); - else if (Column.Property.name().equalsIgnoreCase(name)) - result = createPropertyColumn(Column.Property.name()); - else if (Column.QueryableInputs.name().equalsIgnoreCase(name)) - result = createColumn(Column.QueryableInputs.name(), Column.QueryableInputs); + result = createColumn(SampleSet.name(), SampleSet); + else if (Property.name().equalsIgnoreCase(name)) + result = createPropertyColumn(Property.name()); + else if (QueryableInputs.name().equalsIgnoreCase(name)) + result = createColumn(QueryableInputs.name(), QueryableInputs); } return result; } @@ -252,11 +238,18 @@ public MutableColumnInfo createColumn(String alias, Column column) } case LSID -> { - return wrapColumn(alias, _rootTable.getColumn(Column.LSID.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(LSID.name())); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + return columnInfo; } case MaterialSourceId -> { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.MaterialSourceId.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(MaterialSourceId.name())); columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") { @Override @@ -280,9 +273,14 @@ public StringExpression getURL(ColumnInfo parent) } case RootMaterialRowId -> { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.RootMaterialRowId.name())); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.RowId.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(RootMaterialRowId.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), RowId.name())); columnInfo.setLabel("Root Material"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); columnInfo.setUserEditable(false); // NK: Here we mark the column as not required AND nullable which is the opposite of the database where @@ -295,17 +293,23 @@ public StringExpression getURL(ColumnInfo parent) } case AliquotedFromLSID -> { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.AliquotedFromLSID.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(AliquotedFromLSID.name())); columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.LSID.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), LSID.name())); columnInfo.setLabel("Aliquoted From Parent"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); return columnInfo; } case IsAliquot -> { - String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RootMaterialRowId.name(); - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); - ExprColumn columnInfo = new ExprColumn(this, FieldKey.fromParts(Column.IsAliquot.name()), new SQLFragment( + String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RootMaterialRowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); + ExprColumn columnInfo = new ExprColumn(this, IsAliquot.fieldKey(), new SQLFragment( "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 .append(" ELSE NULL END)"), JdbcType.BOOLEAN); @@ -329,12 +333,16 @@ public StringExpression getURL(ColumnInfo parent) } case RawAmount -> { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); - columnInfo.setUserEditable(false); + columnInfo.setHidden(true); columnInfo.setReadOnly(true); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); return columnInfo; } @@ -358,7 +366,7 @@ public StringExpression getURL(ColumnInfo parent) } else { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); columnInfo.setLabel(label); columnInfo.setImportAliasesSet(importAliases); @@ -370,10 +378,14 @@ public StringExpression getURL(ColumnInfo parent) } case RawUnits -> { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - columnInfo.setUserEditable(false); + columnInfo.setHidden(true); columnInfo.setReadOnly(true); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); return columnInfo; } case Units -> @@ -390,7 +402,7 @@ public StringExpression getURL(ColumnInfo parent) Unit typeUnit = getSampleTypeUnit(); if (typeUnit != null) { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Column.Units.name(), typeUnit); + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Units.name(), typeUnit); columnInfo.setFk(fk); columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); columnInfo.setShownInUpdateView(true); @@ -401,7 +413,7 @@ public StringExpression getURL(ColumnInfo parent) } else { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); columnInfo.setFk(fk); columnInfo.setDescription("The units associated with the Stored Amount for this sample."); return columnInfo; @@ -409,28 +421,37 @@ public StringExpression getURL(ColumnInfo parent) } case Description -> { - return wrapColumn(alias, _rootTable.getColumn(Column.Description.name())); + return wrapColumn(alias, _rootTable.getColumn(Description.name())); } case SampleSet -> { var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); - // NOTE: populateColumns() overwrites this with a QueryForeignKey. Can this be removed? - columnInfo.setFk(new LookupForeignKey(getContainerFilter(), null, null, null, null, "LSID", "Name") + columnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) { @Override - public TableInfo getLookupTableInfo() + protected ContainerFilter getLookupContainerFilter() { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); + // Be sure that we can resolve the sample type if it's defined in a separate container. + // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. + // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has + // parents in current sample type and a sample type in the parent container + Set containers = new HashSet<>(); + if (null != getSampleType()) + containers.add(getSampleType().getContainer()); + containers.add(getContainer()); + if (getContainer().getProject() != null) + containers.add(getContainer().getProject()); + containers.add(ContainerManager.getSharedContainer()); + ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); + + if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) + cf = new UnionContainerFilter(_containerFilter, cf); + return cf; } }); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); return columnInfo; } case SourceProtocolLSID -> @@ -460,7 +481,7 @@ public StringExpression getURL(ColumnInfo parent) } case SourceApplicationInput -> { - var col = createEdgeColumn(alias, Column.SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); + var col = createEdgeColumn(alias, SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); col.setHidden(true); return col; @@ -483,14 +504,17 @@ public StringExpression getURL(ColumnInfo parent) } case RunApplicationOutput -> { - var col = createEdgeColumn(alias, Column.RunApplication, ExpSchema.TableType.MaterialInputs); + var col = createEdgeColumn(alias, RunApplication, ExpSchema.TableType.MaterialInputs); col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); return col; } case Run -> { var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); + ret.setFk(getExpSchema().getRunIdForeignKey(getContainerFilter())); ret.setReadOnly(true); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); return ret; } case RowId -> @@ -552,7 +576,7 @@ public StringExpression getURL(ColumnInfo parent) } case SampleState -> { - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); + boolean statusEnabled = isStatusEnabled(getContainer()); var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); ret.setLabel("Status"); ret.setHidden(!statusEnabled); @@ -574,7 +598,10 @@ public StringExpression getURL(ColumnInfo parent) { var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); ret.setLabel(RawAliquotVolume.label()); + ret.setHidden(true); ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); return ret; } case AliquotVolume -> @@ -598,7 +625,10 @@ public StringExpression getURL(ColumnInfo parent) { var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); ret.setLabel(RawAvailableAliquotVolume.label()); + ret.setHidden(true); ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); return ret; } case AvailableAliquotVolume -> @@ -626,9 +656,12 @@ public StringExpression getURL(ColumnInfo parent) } case RawAliquotUnit -> { - var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); - ret.setShownInDetailsView(false); + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); ret.setLabel(RawAliquotUnit.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); return ret; } case AliquotUnit -> @@ -652,14 +685,14 @@ public StringExpression getURL(ColumnInfo parent) } else { - var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); ret.setShownInDetailsView(false); return ret; } } case MaterialExpDate -> { - var ret = wrapColumn(alias, _rootTable.getColumn("MaterialExpDate")); + var ret = wrapColumn(alias, _rootTable.getColumn(MaterialExpDate.name())); ret.setLabel("Expiration Date"); ret.setShownInDetailsView(true); ret.setShownInInsertView(true); @@ -673,34 +706,22 @@ public StringExpression getURL(ColumnInfo parent) @Override public MutableColumnInfo createPropertyColumn(String alias) { - var ret = super.createPropertyColumn(alias); - if (_ss != null) - { - final TableInfo t = _ss.getTinfo(); - if (t != null) - { - ret.setFk(new LookupForeignKey() - { - @Override - public TableInfo getLookupTableInfo() - { - return t; - } + var ret = wrapColumn(alias, _rootTable.getColumn(RowId.name())); + + if (_ss != null && _ss.getTinfo() != null) + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getLookupContainerFilter()).table(_ss.getTinfo()).key(RowId.name()).build()); - @Override - protected ColumnInfo getPkColumn(TableInfo table) - { - return t.getColumn("lsid"); - } - }); - } - } ret.setIsUnselectable(true); ret.setDescription("A holder for any custom fields associated with this sample"); ret.setHidden(true); return ret; } + private static boolean isStatusEnabled(Container c) + { + return SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(c).isEmpty(); + } + private Unit getSampleTypeUnit() { Unit typeUnit = null; @@ -748,15 +769,17 @@ public ExpSampleType getSampleType() protected void populateColumns() { var st = getSampleType(); - var rowIdCol = addColumn(Column.RowId); - addColumn(Column.MaterialSourceId); - addColumn(Column.SourceProtocolApplication); - addColumn(Column.SourceApplicationInput); - addColumn(Column.RunApplication); - addColumn(Column.RunApplicationOutput); - addColumn(Column.SourceProtocolLSID); - - var nameCol = addColumn(Column.Name); + var rowIdCol = addColumn(RowId); + addColumn(MaterialSourceId); + addColumn(SourceProtocolApplication); + addColumn(SourceApplicationInput); + addColumn(RunApplication); + addColumn(RunApplicationOutput); + addColumn(SourceProtocolLSID); + + List defaultCols = new ArrayList<>(); + var nameCol = addColumn(Name); + defaultCols.add(Name.fieldKey()); if (st != null && st.hasNameAsIdCol()) { // Show the Name field but don't mark is as required when using name expressions @@ -780,104 +803,35 @@ protected void populateColumns() nameCol.setShownInInsertView(false); } - addColumn(Column.Alias); - addColumn(Column.Description); - - var typeColumnInfo = addColumn(Column.SampleSet); - typeColumnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) - { - @Override - protected ContainerFilter getLookupContainerFilter() - { - // Be sure that we can resolve the sample type if it's defined in a separate container. - // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. - // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has - // parents in current sample type and a sample type in the parent container - Set containers = new HashSet<>(); - if (null != st) - containers.add(st.getContainer()); - containers.add(getContainer()); - if (getContainer().getProject() != null) - containers.add(getContainer().getProject()); - containers.add(ContainerManager.getSharedContainer()); - ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); - - if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) - cf = new UnionContainerFilter(_containerFilter, cf); - return cf; - } - }); - - typeColumnInfo.setReadOnly(true); - typeColumnInfo.setUserEditable(false); - typeColumnInfo.setShownInInsertView(false); - - addColumn(Column.MaterialExpDate); - addContainerColumn(Column.Folder, null); - var runCol = addColumn(Column.Run); - runCol.setFk(new ExpSchema(_userSchema.getUser(), getContainer()).getRunIdForeignKey(getContainerFilter())); - runCol.setShownInInsertView(false); - runCol.setShownInUpdateView(false); - - var colLSID = addColumn(Column.LSID); - colLSID.setHidden(true); - colLSID.setReadOnly(true); - colLSID.setUserEditable(false); - colLSID.setShownInInsertView(false); - colLSID.setShownInDetailsView(false); - colLSID.setShownInUpdateView(false); - - var rootRowId = addColumn(Column.RootMaterialRowId); - rootRowId.setHidden(true); - rootRowId.setReadOnly(true); - rootRowId.setUserEditable(false); - rootRowId.setShownInInsertView(false); - rootRowId.setShownInDetailsView(false); - rootRowId.setShownInUpdateView(false); - - var aliquotParentLSID = addColumn(Column.AliquotedFromLSID); - aliquotParentLSID.setHidden(true); - aliquotParentLSID.setReadOnly(true); - aliquotParentLSID.setUserEditable(false); - aliquotParentLSID.setShownInInsertView(false); - aliquotParentLSID.setShownInDetailsView(false); - aliquotParentLSID.setShownInUpdateView(false); - - addColumn(Column.IsAliquot); - addColumn(Column.Created); - addColumn(Column.CreatedBy); - addColumn(Column.Modified); - addColumn(Column.ModifiedBy); - - List defaultCols = new ArrayList<>(); - defaultCols.add(FieldKey.fromParts(Column.Name)); - defaultCols.add(FieldKey.fromParts(Column.MaterialExpDate)); - boolean hasProductFolders = getContainer().hasProductFolders(); - if (hasProductFolders) - defaultCols.add(FieldKey.fromParts(Column.Folder)); - defaultCols.add(FieldKey.fromParts(Column.Run)); - + addColumn(Alias); + addColumn(Description); + addColumn(SampleSet); + addColumn(MaterialExpDate); + defaultCols.add(MaterialExpDate.fieldKey()); + addContainerColumn(Folder, null); + if (getContainer().hasProductFolders()) + defaultCols.add(Folder.fieldKey()); + addColumn(Run); + defaultCols.add(Run.fieldKey()); + addColumn(LSID); + addColumn(RootMaterialRowId); + addColumn(AliquotedFromLSID); + addColumn(IsAliquot); + addColumn(Created); + addColumn(CreatedBy); + addColumn(Modified); + addColumn(ModifiedBy); if (st == null) - defaultCols.add(FieldKey.fromParts(Column.SampleSet)); - - addColumn(Column.Flag); - - var statusColInfo = addColumn(Column.SampleState); - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); - statusColInfo.setShownInDetailsView(statusEnabled); - statusColInfo.setShownInInsertView(statusEnabled); - statusColInfo.setShownInUpdateView(statusEnabled); - statusColInfo.setHidden(!statusEnabled); - statusColInfo.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - if (statusEnabled) - defaultCols.add(FieldKey.fromParts(Column.SampleState)); - statusColInfo.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + defaultCols.add(SampleSet.fieldKey()); + addColumn(Flag); + addColumn(SampleState); + if (isStatusEnabled(getContainer())) + defaultCols.add(SampleState.fieldKey()); // TODO is this a real Domain??? if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) { - defaultCols.add(FieldKey.fromParts(Column.Flag)); + defaultCols.add(Flag.fieldKey()); addSampleTypeColumns(st, defaultCols); setName(_ss.getName()); @@ -890,132 +844,23 @@ protected ContainerFilter getLookupContainerFilter() List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); defaultCols.addAll(calculatedFieldKeys); - addColumn(Column.AliquotCount); - addColumn(Column.AliquotVolume); + addColumn(AliquotCount); + addColumn(AliquotVolume); addColumn(AliquotUnit); - addColumn(Column.AvailableAliquotCount); - addColumn(Column.AvailableAliquotVolume); - - addColumn(Column.StoredAmount); - defaultCols.add(FieldKey.fromParts(Column.StoredAmount)); + addColumn(AvailableAliquotCount); + addColumn(AvailableAliquotVolume); - addColumn(Column.Units); - defaultCols.add(FieldKey.fromParts(Column.Units)); + addColumn(StoredAmount); + defaultCols.add(StoredAmount.fieldKey()); - var rawAmountColumn = addColumn(Column.RawAmount); - rawAmountColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.StoredAmount)); - - } - }; - } - }); - rawAmountColumn.setHidden(true); - rawAmountColumn.setShownInDetailsView(false); - rawAmountColumn.setShownInInsertView(false); - rawAmountColumn.setShownInUpdateView(false); + addColumn(Units); + defaultCols.add(Units.fieldKey()); - var rawUnitsColumn = addColumn(Column.RawUnits); - rawUnitsColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.Units)); - - } - }; - } - }); - rawUnitsColumn.setHidden(true); - rawUnitsColumn.setShownInDetailsView(false); - rawUnitsColumn.setShownInInsertView(false); - rawUnitsColumn.setShownInUpdateView(false); - - var rawAliquotVolumeColumn = addColumn(RawAliquotVolume); - rawAliquotVolumeColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(AliquotVolume.fieldKey()); - - } - }; - } - }); - rawAliquotVolumeColumn.setHidden(true); - rawAliquotVolumeColumn.setShownInDetailsView(false); - rawAliquotVolumeColumn.setShownInInsertView(false); - rawAliquotVolumeColumn.setShownInUpdateView(false); - - var rawAvailableAliquotVolumeColumn = addColumn(RawAvailableAliquotVolume); - rawAvailableAliquotVolumeColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(AvailableAliquotVolume.fieldKey()); - - } - }; - } - }); - rawAvailableAliquotVolumeColumn.setHidden(true); - rawAvailableAliquotVolumeColumn.setShownInDetailsView(false); - rawAvailableAliquotVolumeColumn.setShownInInsertView(false); - rawAvailableAliquotVolumeColumn.setShownInUpdateView(false); - - var rawAliquotUnitColumn = addColumn(RawAliquotUnit); - rawAliquotUnitColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(AliquotUnit.fieldKey()); - - } - }; - } - }); - rawAliquotUnitColumn.setHidden(true); - rawAliquotUnitColumn.setShownInDetailsView(false); - rawAliquotUnitColumn.setShownInInsertView(false); - rawAliquotUnitColumn.setShownInUpdateView(false); + addColumn(RawAmount).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(StoredAmount.fieldKey()))); + addColumn(RawUnits).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(Units.fieldKey()))); + addColumn(RawAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotVolume.fieldKey()))); + addColumn(RawAvailableAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AvailableAliquotVolume.fieldKey()))); + addColumn(RawAliquotUnit).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotUnit.fieldKey()))); if (InventoryService.get() != null && (st == null || !st.isMedia())) defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); @@ -1032,7 +877,7 @@ public void addQueryFieldKeys(Set keys) if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) { - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); SQLFragment existsSubquery = new SQLFragment() .append("SELECT 1 FROM ") .append(plateUserSchema.getTable("Well"), "well") @@ -1049,7 +894,7 @@ public void addQueryFieldKeys(Set keys) { sql = new SQLFragment("(SELECT NULL)"); } - var col = new ExprColumn(this, Column.IsPlated.name(), sql, JdbcType.VARCHAR); + var col = new ExprColumn(this, IsPlated.name(), sql, JdbcType.VARCHAR); col.setDescription("Whether the sample that has been plated, if plating is supported."); col.setUserEditable(false); col.setReadOnly(true); @@ -1057,18 +902,18 @@ public void addQueryFieldKeys(Set keys) col.setShownInInsertView(false); col.setShownInUpdateView(false); if (plateUserSchema != null) - col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + Column.RowId.name() + "}")); + col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + RowId.name() + "}")); addColumn(col); addVocabularyDomains(); - addColumn(Column.Properties); + addColumn(Properties); - var colInputs = addColumn(Column.Inputs); - addMethod("Inputs", new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); + var colInputs = addColumn(Inputs); + addMethod(Inputs.name(), new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); - var colOutputs = addColumn(Column.Outputs); - addMethod("Outputs", new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); + var colOutputs = addColumn(Outputs); + addMethod(Outputs.name(), new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); addExpObjectMethod(); @@ -1093,11 +938,11 @@ public void addQueryFieldKeys(Set keys) setUpdateURL(LINK_DISABLER); } - setTitleColumn(Column.Name.toString()); + setTitleColumn(Name.toString()); setDefaultVisibleColumns(defaultCols); - MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn("rowid"), _ss, true); + MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn(RowId.name()), _ss, true); addColumn(lineageLookup); } @@ -1123,7 +968,6 @@ public Domain getDomain(boolean forUpdate) return _ss == null ? null : _ss.getDomain(forUpdate); } - public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) { if (nameExpression == null) @@ -1158,11 +1002,10 @@ private void addSampleTypeColumns(ExpSampleType st, List visibleColumn UserSchema schema = getUserSchema(); Domain domain = st.getDomain(); - ColumnInfo rowIdColumn = getColumn(Column.RowId); - ColumnInfo lsidColumn = getColumn(Column.LSID); - ColumnInfo nameColumn = getColumn(Column.Name); + ColumnInfo rowIdColumn = getColumn(RowId); + ColumnInfo nameColumn = getColumn(Name); - visibleColumns.remove(FieldKey.fromParts(Column.Run.name())); + visibleColumns.remove(Run.fieldKey()); // When not using name expressions, mark the ID columns as required. // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. @@ -1182,7 +1025,6 @@ private void addSampleTypeColumns(ExpSampleType st, List visibleColumn if ( rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - lsidColumn.getFieldKey().equals(dbColumn.getFieldKey()) || nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) ) { @@ -1205,12 +1047,19 @@ private void addSampleTypeColumns(ExpSampleType st, List visibleColumn if (null != dp) { PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), - SchemaKey.fromParts("samples"), st.getName(), FieldKey.fromParts("RowId"), null, getLookupContainerFilter()); + SamplesSchema.SCHEMA_SAMPLES, st.getName(), RowId.fieldKey(), null, getLookupContainerFilter()); if (idCols.contains(dp)) { propColumn.setNullable(false); - propColumn.setDisplayColumnFactory(new IdColumnRendererFactory()); + propColumn.setDisplayColumnFactory(c -> new DataColumn(c) + { + @Override + protected boolean isDisabledInput(RenderContext ctx) + { + return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; + } + }); } // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected @@ -1265,9 +1114,9 @@ private void addSampleTypeColumns(ExpSampleType st, List visibleColumn if (selectedColumns.contains(new FieldKey(null, StoredAmount))) selectedColumns.add(new FieldKey(null, Units)); if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) - selectedColumns.add(new FieldKey(null, Column.AliquotedFromLSID.name())); - if (selectedColumns.contains(new FieldKey(null, Column.IsAliquot.name()))) - selectedColumns.add(new FieldKey(null, Column.RootMaterialRowId.name())); + selectedColumns.add(new FieldKey(null, AliquotedFromLSID.name())); + if (selectedColumns.contains(new FieldKey(null, IsAliquot.name()))) + selectedColumns.add(new FieldKey(null, RootMaterialRowId.name())); if (selectedColumns.contains(new FieldKey(null, AliquotVolume.name())) || selectedColumns.contains(new FieldKey(null, AvailableAliquotVolume.name()))) selectedColumns.add(new FieldKey(null, AliquotUnit.name())); selectedColumns.addAll(wrappedFieldKeys); @@ -1631,9 +1480,8 @@ private SQLFragment getJoinSQL(Set selectedColumns) { TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); - provisionedCols.remove(Column.RowId.name()); - provisionedCols.remove(Column.LSID.name()); - provisionedCols.remove(Column.Name.name()); + provisionedCols.remove(RowId.name()); + provisionedCols.remove(Name.name()); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); boolean hasSampleColumns = false; @@ -1661,9 +1509,8 @@ private SQLFragment getJoinSQL(Set selectedColumns) { // don't select twice if ( - Column.RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.LSID.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) + RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) ) { continue; @@ -1706,29 +1553,6 @@ else if (rootField) return sql; } - private class IdColumnRendererFactory implements DisplayColumnFactory - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new IdColumnRenderer(colInfo); - } - } - - private static class IdColumnRenderer extends DataColumn - { - public IdColumnRenderer(ColumnInfo col) - { - super(col); - } - - @Override - protected boolean isDisabledInput(RenderContext ctx) - { - return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; - } - } - private static class SampleTypeAmountDisplayColumn extends ExprColumn { public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) @@ -1794,22 +1618,20 @@ public List getUniqueIndices() { // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. - // Some FKs to ExpMaterialTable don't include the "Name" column (e.g. NabBaseTable.Specimen) + // Some FKs to ExpMaterialTable don't include the "Name" column (e.g., NabBaseTable.Specimen) String indexName = "idx_material_ak"; List ret = new ArrayList<>(super.getUniqueIndices()); if (getColumn("Name") != null) ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); else - ret.removeIf( def -> def.name().equals(indexName)); + ret.removeIf(def -> def.name().equals(indexName)); return Collections.unmodifiableList(ret); } - // // UpdatableTableInfo // - @Override public @Nullable Long getOwnerObjectId() { @@ -1839,17 +1661,50 @@ public CaseInsensitiveHashMap remapSchemaColumns() @Override public Set getAltMergeKeys(DataIteratorContext context) { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); + return MATERIAL_ALT_KEYS; + } - return MATERIAL_ALT_MERGE_KEYS; + @Override + public @NotNull Set getAltKeysForUpdate() + { + return MATERIAL_ALT_KEYS; + } + + @Override + public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) + { + Set keyColumnNames = new CaseInsensitiveHashSet(); + + if (context.getInsertOption().allowUpdate) + { + if (context.getInsertOption().updateOnly) + { + if (colNameMap.containsKey(RowId.name())) + keyColumnNames.add(RowId.name()); + else + { + for (String altKey : getAltKeysForUpdate()) + { + if (colNameMap.containsKey(altKey)) + keyColumnNames.add(altKey); + } + } + } + else + { + Set altMergeKeys = getAltMergeKeys(context); + if (altMergeKeys != null) + keyColumnNames.addAll(altMergeKeys); + } + } + + return keyColumnNames.isEmpty() ? null : keyColumnNames; } - @NotNull @Override - public Set getAltKeysForUpdate() + public @Nullable Set getExistingRecordSharedKeyColumnNames() { - return MATERIAL_ALT_UPDATE_KEYS; + return CaseInsensitiveHashSet.of(MaterialSourceId.name()); } @Override @@ -1876,46 +1731,42 @@ public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorCon // The specimens sample type doesn't have a properties table if (propertiesTable == null) - { return data; - } - - long sampleTypeObjectId = requireNonNull(getOwnerObjectId()); - // TODO: subclass PersistDataIteratorBuilder to index Materials! not DataClass! try { - var persist = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, getUserSchema().getContainer(), getUserSchema().getUser(), _ss.getImportAliasesIncludingAliquot(), sampleTypeObjectId) - .setFileLinkDirectory(SAMPLETYPE_FILE_DIRECTORY); - ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); - - persist.setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> - { - List lsids = searchIndexDataKeys.lsids(); - List orderedRowIds = searchIndexDataKeys.orderedRowIds(); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterials(sublist)) - expMaterial.index(q, this); - }) - ); - - ListUtils.partition(lsids, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterialsByLsid(sublist)) - expMaterial.index(q, this); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT) - ); - - DataIteratorBuilder builder = LoggingDataIterator.wrap(persist); - return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(builder, getUserSchema().getContainer(), getUserSchema().getUser(), ExperimentService.get().getTinfoMaterialAliasMap(), _ss, true)); + final Container container = getContainer(); + final User user = getUserSchema().getUser(); + ExperimentServiceImpl expService = ExperimentServiceImpl.get(); + SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); + + DataIteratorBuilder dib = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, container, user, _ss.getImportAliasesIncludingAliquot()) + .setFileLinkDirectory(SAMPLE_TYPE_FILE_DIRECTORY_NAME) + .setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> { + List lsids = searchIndexDataKeys.lsids(); + List orderedRowIds = searchIndexDataKeys.orderedRowIds(); + + // Issue 51263: order by RowId to reduce deadlock + ListUtils.partition(orderedRowIds, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterials(sublist)) + expMaterial.index(q, this); + }) + ); + + ListUtils.partition(lsids, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterialsByLsid(sublist)) + expMaterial.index(q, this); + }) + ); + }, DbScope.CommitTaskOption.POSTCOMMIT) + ); + + dib = LoggingDataIterator.wrap(dib); + return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(dib, container, user, expService.getTinfoMaterialAliasMap(), _ss, true)); } catch (IOException e) { @@ -1935,7 +1786,7 @@ public AuditBehaviorType getDefaultAuditBehavior() { var set = new CaseInsensitiveHashSet(); set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); - set.addAll(ExpDataIterators.NOT_FOR_UPDATE); + set.addAll(ExpDataIterators.MATERIAL_NOT_FOR_UPDATE); // We don't want the inventory columns to show up in the sample timeline audit record; // they are captured in their own audit record. set.addAll(InventoryService.InventoryStatusColumn.names()); @@ -1992,10 +1843,11 @@ public void overlayMetadata(String tableName, UserSchema schema, Collection <%@ page import="org.labkey.api.gwt.client.model.GWTPropertyDescriptor" %> <%@ page import="org.labkey.api.query.BatchValidationException" %> -<%@ page import="org.labkey.api.query.DefaultSchema" %> <%@ page import="org.labkey.api.query.FieldKey" %> -<%@ page import="org.labkey.api.query.QuerySchema" %> <%@ page import="org.labkey.api.query.QueryService" %> <%@ page import="org.labkey.api.query.QueryUpdateService" %> <%@ page import="static org.hamcrest.CoreMatchers.hasItems" %> <%@ page import="static org.junit.Assert.*" %> -<%@ page import="org.labkey.api.query.SchemaKey" %> <%@ page import="org.labkey.api.query.UserSchema" %> <%@ page import="org.labkey.api.reader.DataLoader" %> <%@ page import="org.labkey.api.reader.TabLoader" %> @@ -73,7 +70,6 @@ <%@ page import="java.io.StringBufferInputStream" %> <%@ page import="java.util.ArrayList" %> <%@ page import="java.util.Arrays" %> -<%@ page import="java.util.Collection" %> <%@ page import="static org.hamcrest.CoreMatchers.containsString" %> <%@ page import="java.util.Collections" %> <%@ page import="java.util.HashMap" %> @@ -308,7 +304,7 @@ public void idColsSet_nameExpressionNull_hasUnusedNameProperty() throws Exceptio props.add(new GWTPropertyDescriptor("name", "string")); props.add(new GWTPropertyDescriptor("age", "int")); - final ExpSampleType st = SampleTypeService.get().createSampleType(c, user, + SampleTypeService.get().createSampleType(c, user, "Samples", null, props, Collections.emptyList(), 1, -1, -1, -1, null, null); fail("Expected exception"); @@ -340,7 +336,7 @@ public void idColsSet_nameExpressionNull_hasNameProperty() throws Exception rows.add(CaseInsensitiveHashMap.of("name", "bob", "prop", "blue", "age", 10)); BatchValidationException errors = new BatchValidationException(); - QueryUpdateService svc = getSampleTypeUpdateService("Samples"); + QueryUpdateService svc = getSampleTypeUpdateService(st.getName()); svc.insertRows(user, c, rows, errors, null, null); if (errors.hasErrors()) throw errors; @@ -369,7 +365,7 @@ public void idColsSet_nameExpression_hasUnusedNameProperty() throws Exception props.add(new GWTPropertyDescriptor("name", "string")); props.add(new GWTPropertyDescriptor("age", "int")); - final ExpSampleType st = SampleTypeService.get().createSampleType(c, user, + SampleTypeService.get().createSampleType(c, user, "Samples", null, props, Collections.emptyList(), 1, -1, -1, -1, "S-${name}.${age}", null); fail("Expected exception"); @@ -507,27 +503,109 @@ public void testNameExpression() throws Exception @Test public void testAliases() throws Exception { - // setup List props = new ArrayList<>(); props.add(new GWTPropertyDescriptor("name", "string")); props.add(new GWTPropertyDescriptor("age", "int")); - final ExpSampleType st = createSampleType("Samples", props, null); - List> rows = new ArrayList<>(); - Map row = new CaseInsensitiveHashMap<>(); - row.put("name", "boo"); - row.put("age", 20); - row.put("alias", "a,b,c"); - rows.add(row); + ExpMaterial fooSample; + ExpMaterial booSample; + List> rows; - insertSampleRows("Samples", rows); + // Insert + { + rows = new ArrayList<>(); + rows.add(CaseInsensitiveHashMap.of("name", "foo", "age", 19, "alias", "youth, teen, minor", "flag", "new zealand")); + rows.add(CaseInsensitiveHashMap.of("name", "boo", "age", 21, "alias", "elder, adult, major?", "flag", "australia")); - ExpMaterial m = st.getSample(c, "boo"); - Collection aliases = m.getAliases(); - assertThat(aliases, hasItems("a", "b", "c")); -} + insertSampleRows(st.getName(), rows); + + fooSample = st.getSample(c, "foo"); + assertThat(fooSample.getAliases(), hasItems("youth", "teen", "minor")); + assertEquals("new zealand", fooSample.getComment()); + + booSample = st.getSample(c, "boo"); + assertThat(booSample.getAliases(), hasItems("elder", "adult", "major?")); + assertEquals("australia", booSample.getComment()); + } + + // Update, keyed by name + { + rows = new ArrayList<>(); + rows.add(CaseInsensitiveHashMap.of("name", fooSample.getName(), "alias", "gerald, r, ford", "flag", "kenya")); + rows.add(CaseInsensitiveHashMap.of("name", booSample.getName(), "alias", "dwight, d, eisenhower", "flag", "uganda")); + + updateSampleRows(st.getName(), rows); + + fooSample = st.getSample(c, fooSample.getName()); + assertThat(fooSample.getAliases(), hasItems("gerald", "r", "ford")); + assertEquals("kenya", fooSample.getComment()); + + booSample = st.getSample(c, "boo"); + assertThat(booSample.getAliases(), hasItems("dwight", "d", "eisenhower")); + assertEquals("uganda", booSample.getComment()); + } + + // Update, keyed by rowId + { + rows = new ArrayList<>(); + rows.add(CaseInsensitiveHashMap.of("Row Id", fooSample.getRowId(), "alias", "ken, griffey", "flag", "norway")); + rows.add(CaseInsensitiveHashMap.of("Row Id", booSample.getRowId(), "alias", "edgar, martinez", "flag", "sweden")); + + updateSampleRows(st.getName(), rows); + + fooSample = st.getSample(c, fooSample.getName()); + assertThat(fooSample.getAliases(), hasItems("ken", "griffey")); + assertEquals("norway", fooSample.getComment()); + + booSample = st.getSample(c, booSample.getName()); + assertThat(booSample.getAliases(), hasItems("edgar", "martinez")); + assertEquals("sweden", booSample.getComment()); + } + + // Update with different row shapes + { + rows = new ArrayList<>(); + rows.add(CaseInsensitiveHashMap.of("rowId", fooSample.getRowId(), "description", "east coast destination", "alias", "martha's, vineyard", "flag", "japan")); + rows.add(CaseInsensitiveHashMap.of("name", booSample.getName(), "age", 1_000_000, "alias", "cannon, beach", "flag", "fiji")); + + updateSampleRows(st.getName(), rows); + fooSample = st.getSample(c, fooSample.getName()); + assertThat(fooSample.getAliases(), hasItems("martha's", "vineyard")); + assertEquals("japan", fooSample.getComment()); + + booSample = st.getSample(c, booSample.getName()); + assertThat(booSample.getAliases(), hasItems("cannon", "beach")); + assertEquals("fiji", booSample.getComment()); + } + + // Merge + { + rows = new ArrayList<>(); + rows.add(CaseInsensitiveHashMap.of("name", fooSample.getName(), "age", 40, "alias", "son, family, father", "flag", "canada")); + rows.add(CaseInsensitiveHashMap.of("name", booSample.getName(), "age", 80, "alias", "grandma, family, mother", "flag", "america")); + rows.add(CaseInsensitiveHashMap.of("name", "moo", "age", 12, "alias", "cow, bovine", "flag", "mexico")); + + mergeSampleRows(st.getName(), rows); + + var expectedRowId = fooSample.getRowId(); + fooSample = st.getSample(c, fooSample.getName()); + assertEquals(expectedRowId, fooSample.getRowId()); + assertThat(fooSample.getAliases(), hasItems("son", "family", "father")); + assertEquals("canada", fooSample.getComment()); + + expectedRowId = booSample.getRowId(); + booSample = st.getSample(c, booSample.getName()); + assertEquals(expectedRowId, booSample.getRowId()); + assertThat(booSample.getAliases(), hasItems("grandma", "family", "mother")); + assertEquals("america", booSample.getComment()); + + ExpMaterial mooSample = st.getSample(c, "moo"); + assertThat(mooSample.getAliases(), hasItems("cow", "bovine")); + assertEquals("mexico", mooSample.getComment()); + } +} // Issue 33682: Calling insertRows on SampleType with empty values will not insert new samples @Test @@ -545,14 +623,8 @@ public void testBlankRows() throws Exception List allSamples = st.getSamples(c); assertTrue("Expected no samples", allSamples.isEmpty()); - // // insert via insertRows -- blank rows should be preserved - // - - UserSchema schema = QueryService.get().getUserSchema(user, c, SchemaKey.fromParts("Samples")); - TableInfo table = schema.getTable("Samples"); - QueryUpdateService svc = table.getUpdateService(); - assertNotNull(svc); + QueryUpdateService svc = getSampleTypeUpdateService(st.getName()); // insert 3 rows with no values List> rows = new ArrayList<>(); @@ -574,10 +646,7 @@ public void testBlankRows() throws Exception assertEquals("Expected 3 total samples", 3, allSamples.size()); assertEquals(0, allSamples.get(0).getAliquotCount()); - // // insert as if we pasted a tsv in the "upload samples" page -- blank rows should be skipped - // - // data has three lines, one blank. expect to insert only two samples String dataTxt = "age\n" + @@ -596,14 +665,16 @@ public void testBlankRows() throws Exception assertEquals(0, insertedRows.get(0).get("AliquotCount")); ExpMaterial material1 = ExperimentService.get().getExpMaterial(asLong(insertedRows.get(0).get("rowid"))); + assertNotNull(material1); Map map = material1.getPropertyValues(); assertEquals("Expected to only have 'age' property, got: " + map, 1, map.size()); - Integer age1 = (Integer)material1.getPropertyValues().values().iterator().next(); + Integer age1 = asInteger(material1.getPropertyValues().values().iterator().next()); assertNotNull(age1); assertEquals("Expected to insert age of 20, got: " + age1, 20, age1.intValue()); - ExpMaterial material2 = ExperimentService.get().getExpMaterial((Integer)insertedRows.get(1).get("rowid")); + ExpMaterial material2 = ExperimentService.get().getExpMaterial(asLong(insertedRows.get(1).get("rowid"))); + assertNotNull(material2); Integer age2 = asInteger(material2.getPropertyValues().values().iterator().next()); assertNotNull(age2); assertEquals("Expected to insert age of 30, got: " + age2, 30, age2.intValue()); @@ -618,7 +689,8 @@ public void testBlankRows() throws Exception updated.put("age", age1 + 1); svc.updateRows(user, c, Collections.singletonList(updated), null, errors, null, null); assertFalse(errors.hasErrors()); - var result = new TableSelector(table, TableSelector.ALL_COLUMNS, new SimpleFilter("lsid", material1.getLSID()), null).getMap(); + TableInfo table = getSampleTypeTable(st.getName()); + var result = new TableSelector(table, new SimpleFilter("lsid", material1.getLSID()), null).getMap(); assertEquals(21, asInteger(result.get("age")).intValue()); // and a delete @@ -641,39 +713,33 @@ public void testUpdateSomeParents() throws Exception final ExpSampleType parent1Type = createSampleType("Parent1Samples", props, null); final ExpSampleType parent2Type = createSampleType("Parent2Samples", props, null); - UserSchema schema = QueryService.get().getUserSchema(user, c, SchemaKey.fromParts("Samples")); - List> rows = new ArrayList<>(); - - // add first parents - TableInfo table = schema.getTable("Parent1Samples", null, true, false); - QueryUpdateService svcParent = table.getUpdateService(); + QueryUpdateService updateService = getSampleTypeUpdateService(parent1Type.getName()); BatchValidationException errors = new BatchValidationException(); + List> rows = new ArrayList<>(); rows.add(CaseInsensitiveHashMap.of("name", "P1-1")); rows.add(CaseInsensitiveHashMap.of("name", "P1-2")); rows.add(CaseInsensitiveHashMap.of("name", "P1-3")); rows.add(CaseInsensitiveHashMap.of("name", "P1-4,test")); - List> inserted = svcParent.insertRows(user, c, rows, errors, null, null); + List> inserted = updateService.insertRows(user, c, rows, errors, null, null); assertFalse(errors.hasErrors()); assertEquals("Number of parent1 samples inserted not as expected", 4, inserted.size()); // add second parents - table = schema.getTable("Parent2Samples", null, true, false); - QueryUpdateService svcParent2 = table.getUpdateService(); + updateService = getSampleTypeUpdateService(parent2Type.getName()); rows.clear(); rows.add(CaseInsensitiveHashMap.of("name", "P2-1")); rows.add(CaseInsensitiveHashMap.of("name", "P2-2")); rows.add(CaseInsensitiveHashMap.of("name", "P2-3")); - inserted = svcParent2.insertRows(user, c, rows, errors, null, null); + inserted = updateService.insertRows(user, c, rows, errors, null, null); assertFalse(errors.hasErrors()); assertEquals("Number of parent2 samples inserted not as expected", 3, inserted.size()); // add child samples - table = schema.getTable("ChildSamples", null, true, false); - QueryUpdateService svcChild = table.getUpdateService(); + updateService = getSampleTypeUpdateService(childType.getName()); rows.clear(); rows.add(CaseInsensitiveHashMap.of("name", "C1", "MaterialInputs/Parent1Samples", "P1-1,P1-2", "MaterialInputs/Parent2Samples", "P2-1")); @@ -682,7 +748,7 @@ public void testUpdateSomeParents() throws Exception rows.add(CaseInsensitiveHashMap.of("name", "C4", "MaterialInputs/Parent1Samples", "P1-2", "MaterialInputs/Parent2Samples", "P2-2")); rows.add(CaseInsensitiveHashMap.of("name", "C5", "MaterialInputs/Parent1Samples", "P1-1, \"P1-4,test\", P1-2")); - inserted = svcChild.insertRows(user, c, rows, errors, null, null); + inserted = updateService.insertRows(user, c, rows, errors, null, null); assertFalse(errors.hasErrors()); assertEquals("Number of child samples inserted not as expected", 5, inserted.size()); @@ -691,7 +757,7 @@ public void testUpdateSomeParents() throws Exception rows.add(CaseInsensitiveHashMap.of("rowId", parent2Type.getSample(c, "P2-1").getRowId(), "MaterialInputs/ChildSamples", "C1")); try { - svcParent2.updateRows(user, c, rows, null, errors, null, null); + getSampleTypeUpdateService(parent2Type.getName()).updateRows(user, c, rows, null, errors, null, null); fail("Expected to throw exception"); } catch (Exception e) @@ -700,14 +766,12 @@ public void testUpdateSomeParents() throws Exception } errors = new BatchValidationException(); - ExpMaterial P11 = parent1Type.getSample(c, "P1-1"); ExpMaterial P12 = parent1Type.getSample(c, "P1-2"); ExpMaterial P14 = parent1Type.getSample(c, "P1-4,test"); ExpMaterial P21 = parent2Type.getSample(c, "P2-1"); ExpMaterial P22 = parent2Type.getSample(c, "P2-2"); - ExpMaterial C1 = childType.getSample(c, "C1"); ExpMaterial C2 = childType.getSample(c, "C2"); ExpMaterial C4 = childType.getSample(c, "C4"); @@ -718,12 +782,62 @@ public void testUpdateSomeParents() throws Exception opts.setParents(true); opts.setDepth(2); + // Attempt to merge using rowIds + { + rows.clear(); + rows.add(CaseInsensitiveHashMap.of("rowId", C1.getRowId(), "name", "C1", "MaterialInputs/Parent1Samples", "P1-1")); + rows.add(CaseInsensitiveHashMap.of("rowId", C4.getRowId(), "name", "C5", "MaterialInputs/Parent1Samples", null)); // intentionally mix up name + + updateService.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); + assertThat(errors.getMessage(), containsString("RowId is not accepted when merging samples. Specify only the sample name instead.")); + errors = new BatchValidationException(); + } + + // Attempt to merge using "Row Id" label + { + rows.clear(); + rows.add(CaseInsensitiveHashMap.of("Row Id", C1.getRowId(), "name", "C1", "MaterialInputs/Parent1Samples", "P1-1")); + rows.add(CaseInsensitiveHashMap.of("Row Id", C4.getRowId(), "name", "C5", "MaterialInputs/Parent1Samples", null)); // intentionally mix up name + + updateService.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); + assertThat(errors.getMessage(), containsString("RowId is not accepted when merging samples. Specify only the sample name instead.")); + errors = new BatchValidationException(); + } + + // Attempt to update using outdated "LSID" and do not specify any other keys + // Note: using try/catch here as updateRows() executes with retry which throws + // if validation exceptions are encountered. + try + { + rows.clear(); + rows.add(CaseInsensitiveHashMap.of("LSID", C1.getLSID(), "MaterialInputs/Parent1Samples", "P1-1")); + rows.add(CaseInsensitiveHashMap.of("LSID", C4.getLSID(), "MaterialInputs/Parent1Samples", null)); + + updateService.updateRows(user, c, rows, null, errors, null, null); + fail("Expected to throw exception"); + } + catch (Exception e) + { + assertThat(e.getMessage(), containsString("LSID is no longer accepted as a key for sample update")); + } + + // Attempt to merge using outdated "LSID" and do not specify any other keys + { + rows.clear(); + rows.add(CaseInsensitiveHashMap.of("LSID", C1.getLSID(), "MaterialInputs/Parent1Samples", "P1-1")); + rows.add(CaseInsensitiveHashMap.of("LSID", C4.getLSID(), "MaterialInputs/Parent1Samples", null)); + + updateService.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); + assertThat(errors.getMessage(), containsString("LSID is no longer accepted as a key for sample merge")); + errors = new BatchValidationException(); + } + // now update the children with various types of modifications to the parentage rows.clear(); rows.add(CaseInsensitiveHashMap.of("name", "C1", "MaterialInputs/Parent1Samples", "P1-1")); // change one parent but not the other rows.add(CaseInsensitiveHashMap.of("name", "C4", "MaterialInputs/Parent1Samples", null)); // remove one parent but not the other - svcChild.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); + updateService.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); assertFalse(errors.hasErrors()); ExpLineage lineage = ExpLineageService.get().getLineage(c, user, C1, opts); @@ -735,12 +849,11 @@ public void testUpdateSomeParents() throws Exception assertFalse("Expected 'C4' to not be derived from 'P1-2'", lineage.getMaterials().contains(P12)); assertTrue("Expected 'C4' to still be derived from 'P2-2'", lineage.getMaterials().contains(P22)); - rows.clear(); rows.add(CaseInsensitiveHashMap.of("name", "C4", "MaterialInputs/Parent1Samples", "P1-1", "MaterialInputs/Parent2Samples", "P2-1")); // change both parents rows.add(CaseInsensitiveHashMap.of("name", "C2", "MaterialInputs/Parent1Samples", "", "MaterialInputs/Parent2Samples", null)); // remove both parents - svcChild.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); + updateService.mergeRows(user, c, MapDataIterator.of(rows), errors, null, null); assertFalse(errors.hasErrors()); lineage = ExpLineageService.get().getLineage(c, user, C2, opts); @@ -774,9 +887,6 @@ public void testParentColAndDataInputDerivation() throws Exception final ExpSampleType st = createSampleType(sampleTypeName, props, null); // insert and derive with both 'parent' column and 'DataInputs/Samples' - UserSchema schema = QueryService.get().getUserSchema(user, c, SamplesSchema.SCHEMA_SAMPLES); - TableInfo table = schema.getTable(sampleTypeName); - QueryUpdateService svc = table.getUpdateService(); List> rows = new ArrayList<>(); rows.add(CaseInsensitiveHashMap.of("name", "A", "data", 10, "parent", null)); @@ -795,6 +905,7 @@ public void testParentColAndDataInputDerivation() throws Exception // F BatchValidationException errors = new BatchValidationException(); + QueryUpdateService svc = getSampleTypeUpdateService(st.getName()); List> inserted = svc.insertRows(user, c, rows, errors, null, null); assertFalse(errors.hasErrors()); assertEquals(6, inserted.size()); @@ -847,6 +958,7 @@ public void testParentColAndDataInputDerivation() throws Exception assertEquals("Expected 'E' and 'D' to be derived in the same run since they share 'B' and 'C' as parents", E.getRowId(), E.getRowId()); ExpRun derivationRun = E.getRun(); + assertNotNull(derivationRun); assertTrue(derivationRun.getMaterialInputs().containsKey(B)); assertTrue(derivationRun.getMaterialInputs().containsKey(C)); @@ -890,6 +1002,7 @@ public void testParentColAndDataInputDerivation() throws Exception assertFalse(derivationRun2.getMaterialOutputs().contains(E)); ExpRun oldDerivationRun = ExperimentService.get().getExpRun(derivationRun.getRowId()); + assertNotNull(oldDerivationRun); assertEquals(oldDerivationRun.getRowId(), derivationRun.getRowId()); assertTrue(oldDerivationRun.getMaterialInputs().containsKey(B)); @@ -906,7 +1019,7 @@ public void testParentColAndDataInputDerivation() throws Exception var multiValueColumn = "Outputs/Materials/" + sampleTypeName + "/Created"; var url = new ActionURL("query", "selectRows", c); - url.addParameter(QueryParam.schemaName, schema.getName()); + url.addParameter(QueryParam.schemaName, getSampleSchema().getName()); url.addParameter("query." + QueryParam.queryName, sampleTypeName); url.addParameter("query." + QueryParam.columns, "Name, " + multiValueColumn); url.addFilter("query", FieldKey.fromParts("Name"), CompareType.EQUAL, "A"); @@ -946,7 +1059,7 @@ public void testSampleTypeWithVocabularyProperties() throws Exception { User user = TestContext.get().getUser(); - String sampleName = "SamplesWithVocabularyProperties"; + String sampleTypeName = "SamplesWithVocabularyProperties"; String sampleType = "TypeA"; String updatedSampleType = "TypeB"; String sampleColor = "Blue"; @@ -956,30 +1069,32 @@ public void testSampleTypeWithVocabularyProperties() throws Exception Map vocabularyPropertyURIs = helper.getVocabularyPropertyURIS(mockDomain); // create a sample type - createSampleType(sampleName, List.of(new GWTPropertyDescriptor("name", "string")), null); - - UserSchema schema = QueryService.get().getUserSchema(user, c, SchemaKey.fromParts("Samples")); + createSampleType(sampleTypeName, List.of(new GWTPropertyDescriptor("name", "string")), null); // insert a sample + var sampleName = "TestSample"; ArrayListMap row = new ArrayListMap<>(); - row.put("name", "TestSample"); + row.put("name", sampleName); row.put(vocabularyPropertyURIs.get(helper.typePropertyName), sampleType); row.put(vocabularyPropertyURIs.get(helper.colorPropertyName), sampleColor); row.put(vocabularyPropertyURIs.get(helper.agePropertyName), null); // inserting a property with null value List> rows = helper.buildRows(row); - var insertedSample = helper.insertRows(c, rows ,sampleName, schema); + UserSchema schema = getSampleSchema(); + var insertedSample = helper.insertRows(c, rows, sampleTypeName, schema).get(0); + var sampleLsid = insertedSample.get("LSID").toString(); + var sampleRowId = insertedSample.get("RowId"); assertEquals("Custom Property is not inserted", sampleType, - OntologyManager.getPropertyObjects(c, insertedSample.get(0).get("LSID").toString()).get(vocabularyPropertyURIs.get(helper.typePropertyName)).getStringValue()); + OntologyManager.getPropertyObjects(c, sampleLsid).get(vocabularyPropertyURIs.get(helper.typePropertyName)).getStringValue()); - //Verifying property with null value is not inserted + // Verifying property with null value is not inserted assertEquals("Property with null value is present.", 0, OntologyManager.getPropertyObjects(c, vocabularyPropertyURIs.get(helper.agePropertyName)).size()); - //update inserted sample + // update inserted sample ArrayListMap rowToUpdate = new ArrayListMap<>(); - rowToUpdate.put("name", "TestSample"); - rowToUpdate.put("RowId", insertedSample.get(0).get("RowId")); + rowToUpdate.put("name", sampleName); + rowToUpdate.put("RowId", sampleRowId); rowToUpdate.put(vocabularyPropertyURIs.get(helper.typePropertyName), updatedSampleType); rowToUpdate.put(vocabularyPropertyURIs.get(helper.colorPropertyName), null); // nulling out existing property rowToUpdate.put(vocabularyPropertyURIs.get(helper.agePropertyName), sampleAge); //inserting a new property in update rows @@ -987,27 +1102,26 @@ public void testSampleTypeWithVocabularyProperties() throws Exception List> oldKeys = new ArrayList<>(); ArrayListMap oldKey = new ArrayListMap<>(); - oldKey.put("name", "TestSample"); - oldKey.put("RowId", insertedSample.get(0).get("RowId")); + oldKey.put("name", sampleName); + oldKey.put("RowId", sampleRowId); oldKeys.add(oldKey); - var updatedSample = helper.updateRows(c, rowsToUpdate, oldKeys, sampleName, schema); + helper.updateRows(c, rowsToUpdate, oldKeys, sampleTypeName, schema); assertEquals("Custom Property is not updated", updatedSampleType, - OntologyManager.getPropertyObjects(c, updatedSample.get(0).get("LSID").toString()).get(vocabularyPropertyURIs.get(helper.typePropertyName)).getStringValue()); + OntologyManager.getPropertyObjects(c, sampleLsid).get(vocabularyPropertyURIs.get(helper.typePropertyName)).getStringValue()); - //Verify property updated to a null value gets deleted + // Verify property updated to a null value gets deleted assertEquals("Property with null value is present.", 0, OntologyManager.getPropertyObjects(c, vocabularyPropertyURIs.get(helper.colorPropertyName)).size()); - //Verify property inserted during update rows in inserted + // Verify property inserted during update rows in inserted assertEquals("New Property is not inserted with update rows", sampleAge, - OntologyManager.getPropertyObjects(c, updatedSample.get(0).get("LSID").toString()).get(vocabularyPropertyURIs.get(helper.agePropertyName)).getFloatValue().intValue()); + OntologyManager.getPropertyObjects(c, sampleLsid).get(vocabularyPropertyURIs.get(helper.agePropertyName)).getFloatValue().intValue()); } @Test public void testDetailedAuditLog() throws Exception { User user = TestContext.get().getUser(); - UserSchema auditSchema = AuditLogService.get().createSchema(user, c); TableInfo auditTable = auditSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); Integer RowId = new SqlSelector(auditSchema.getDbSchema(), new SQLFragment("select max(rowid) FROM ").append(auditTable.getFromSQL("_"))) @@ -1020,27 +1134,17 @@ public void testDetailedAuditLog() throws Exception props.add(new GWTPropertyDescriptor("Value", "float")); final ExpSampleType st = createSampleType("SamplesDAL", props, null); - QuerySchema samplesSchema = DefaultSchema.get(user, c, "samples"); - assertNotNull(samplesSchema); - TableInfo samplesTable = samplesSchema.getTable("SamplesDAL"); - assertNotNull(samplesTable); - QueryUpdateService qus = samplesTable.getUpdateService(); - assertNotNull(qus); BatchValidationException errors = new BatchValidationException(); - Map config = Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, AuditBehaviorType.DETAILED); // insert a sample - List> rows = new ArrayList<>(); - rows.add(PageFlowUtil.mapInsensitive("Name", "A1", "Measure", "Initial", "Value", 1.0)); - List> ret = qus.insertRows(user, c, rows, errors, config, null); - String msg = !errors.getRowErrors().isEmpty() ? errors.getRowErrors().get(0).toString() : "no message"; - assertFalse(msg, errors.hasErrors()); - assertEquals(1,ret.size()); + List> ret = insertSampleRows(st.getName(), List.of(CaseInsensitiveHashMap.of("Name", "A1", "Measure", "Initial", "Value", 1.0))); + assertEquals(1, ret.size()); assertNotNull(ret.get(0).get("rowid")); int rowid = (int) JdbcType.INTEGER.convert(ret.get(0).get("rowid")); + // check audit log - SimpleFilter f = new SimpleFilter(new FieldKey(null,"RowId"),auditMaxRowid, CompareType.GT); - List events = AuditLogService.get().getAuditEvents(c,user,SampleTimelineAuditEvent.EVENT_TYPE,f,new Sort("-RowId")); + SimpleFilter f = new SimpleFilter(new FieldKey(null, "RowId"), auditMaxRowid, CompareType.GT); + List events = AuditLogService.get().getAuditEvents(c, user, SampleTimelineAuditEvent.EVENT_TYPE, f, new Sort("-RowId")); assertFalse(events.isEmpty()); assertNull(events.get(0).getOldRecordMap()); assertNotNull(events.get(0).getNewRecordMap()); @@ -1054,12 +1158,10 @@ public void testDetailedAuditLog() throws Exception assertNull(newRecordMap.get("AliquotUnit")); // UPDATE - rows.clear(); errors.clear(); - rows.add(PageFlowUtil.mapInsensitive("RowId", rowid, "Measure", "Updated", "Value", 2.0)); - qus.updateRows(user, c, rows, null, errors, config, null); - assertFalse(errors.hasErrors()); + updateSampleRows(st.getName(), List.of(CaseInsensitiveHashMap.of("RowId", rowid, "Measure", "Updated", "Value", 2.0))); + // check audit log - events = AuditLogService.get().getAuditEvents(c,user,SampleTimelineAuditEvent.EVENT_TYPE,f,new Sort("-RowId")); + events = AuditLogService.get().getAuditEvents(c, user, SampleTimelineAuditEvent.EVENT_TYPE, f, new Sort("-RowId")); assertFalse(events.isEmpty()); assertNotNull(events.get(0).getOldRecordMap()); Map oldRecordMap = new CaseInsensitiveHashMap<>(PageFlowUtil.mapFromQueryString(events.get(0).getOldRecordMap())); @@ -1076,12 +1178,11 @@ public void testDetailedAuditLog() throws Exception // MERGE // and since merge is a different code path... - rows.clear(); errors.clear(); - rows.add(PageFlowUtil.mapInsensitive("Name", "A1", "Measure", "Merged", "Value", 3.0)); - int count = qus.mergeRows(user, c, MapDataIterator.of(rows), errors, config, null); + int count = mergeSampleRows(st.getName(), List.of(CaseInsensitiveHashMap.of("Name", "A1", "Measure", "Merged", "Value", 3.0))); assertEquals(1, count); + // check audit log - events = AuditLogService.get().getAuditEvents(c,user,SampleTimelineAuditEvent.EVENT_TYPE,f,new Sort("-RowId")); + events = AuditLogService.get().getAuditEvents(c, user, SampleTimelineAuditEvent.EVENT_TYPE, f, new Sort("-RowId")); assertFalse(events.isEmpty()); assertNotNull(events.get(0).getOldRecordMap()); oldRecordMap = new CaseInsensitiveHashMap<>(PageFlowUtil.mapFromQueryString(events.get(0).getOldRecordMap())); @@ -1105,26 +1206,19 @@ public void testDetailedAuditLog() throws Exception @Test public void testExpMaterialPermissions() throws Exception { - User user = TestContext.get().getUser(); - var schema = QueryService.get().getUserSchema(user, c, ExpSchema.SCHEMA_EXP); - // create a sample type ExpSampleType st = createSampleType("MySamples", List.of(new GWTPropertyDescriptor("name", "string")), null); // insert a sample - var errors = new BatchValidationException(); - List> ret = QueryService.get().getUserSchema(user, c, SamplesSchema.SCHEMA_SAMPLES) - .getTable("MySamples") - .getUpdateService() - .insertRows(user, c, List.of(CaseInsensitiveHashMap.of("name", "SampleInSampleType")), errors, null, null); - String msg = !errors.getRowErrors().isEmpty() ? errors.getRowErrors().get(0).toString() : "no message"; - assertFalse(msg, errors.hasErrors()); - assertEquals(1,ret.size()); + List> ret = insertSampleRows(st.getName(), List.of(CaseInsensitiveHashMap.of("name", "SampleInSampleType"))); + assertEquals(1, ret.size()); assertNotNull(ret.get(0).get("rowid")); long stSampleId = (long) JdbcType.BIGINT.convert(ret.get(0).get("rowid")); // verify insert, update aren't allowed, but read and delete are allowed - var materialsTable = schema.getTable(ExpSchema.TableType.Materials.name()); + User user = TestContext.get().getUser(); + var schema = QueryService.get().getUserSchema(user, c, ExpSchema.SCHEMA_EXP); + var materialsTable = schema.getTableOrThrow(ExpSchema.TableType.Materials.name()); var qus = materialsTable.getUpdateService(); assertNotNull(qus); assertTrue(qus.hasPermission(user, ReadPermission.class)); @@ -1132,7 +1226,7 @@ public void testExpMaterialPermissions() throws Exception assertFalse(qus.hasPermission(user, InsertPermission.class)); assertFalse(qus.hasPermission(user, UpdatePermission.class)); - // create a sample outside of a SampleType + // create a sample outside a SampleType var lsid = ExperimentService.get().generateLSID(c, ExpMaterial.class, "SampleNotInSampleType"); ExpMaterial m = ExperimentService.get().createExpMaterial(c, Lsid.parse(lsid)); m.save(user); @@ -1191,10 +1285,8 @@ public void testInsertOptionUpdate() throws Exception final String sampleTypeName = "TestSamplesWithRequired"; ExpSampleType sampleType = createSampleType(sampleTypeName, props, null); - TableInfo table = getSampleTypeTable(sampleTypeName); - QueryUpdateService qus = table.getUpdateService(); - assertNotNull(qus); + QueryUpdateService qus = getSampleTypeUpdateService(sampleTypeName); String longFieldAlias = table.getColumn(longFieldName).getAlias().getId(); assertFalse("Unexpected long field alias", longFieldName.equalsIgnoreCase(longFieldAlias)); @@ -1212,7 +1304,7 @@ public void testInsertOptionUpdate() throws Exception assertFalse(context.getErrors().hasErrors()); assertEquals("Unexpected count from IMPORT on loadRows()", 3, count); - // Issue 53168: aliquot cannot be parent to its aliquot parent + // Issue 53168: aliquot cannot be a parent to its aliquot parent BatchValidationException errors = new BatchValidationException(); List> rows = new ArrayList<>(); rows.add(CaseInsensitiveHashMap.of("rowId", sampleType.getSample(c, "S-1").getRowId(), "MaterialInputs/" + sampleTypeName, "S-1-1")); @@ -1334,10 +1426,16 @@ private ExpSampleType createSampleType(String sampleTypeName, List> insertSampleRows(String sampleType, List> rows) throws Exception +{ + BatchValidationException errors = new BatchValidationException(); + QueryUpdateService svc = getSampleTypeUpdateService(sampleType); + Map config = Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, AuditBehaviorType.DETAILED); + int count = svc.mergeRows(TestContext.get().getUser(), c, MapDataIterator.of(rows), errors, config, null); + if (errors.hasErrors()) + throw errors; + return count; +} + +private List> updateSampleRows(String sampleType, List> rows) throws Exception +{ + BatchValidationException errors = new BatchValidationException(); + QueryUpdateService svc = getSampleTypeUpdateService(sampleType); + Map config = Map.of(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior, AuditBehaviorType.DETAILED); + List> ret = svc.updateRows(TestContext.get().getUser(), c, rows, null, errors, config, null); + if (errors.hasErrors()) + throw errors; + return ret; +} %> diff --git a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java index 9baee1183c3..ac942ee485e 100644 --- a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java @@ -4976,9 +4976,8 @@ public int deleteMaterialBySqlFilter( // NOTE: study specimens don't have a domain for their samples, so no table if (null != dbTinfo) { - SQLFragment sampleTypeSQL = new SQLFragment("DELETE FROM " + dbTinfo + " WHERE lsid IN (SELECT lsid FROM exp.Material WHERE "); - sampleTypeSQL.append(materialFilterSQL); - sampleTypeSQL.append(")"); + SQLFragment sampleTypeSQL = new SQLFragment("DELETE FROM ").append(dbTinfo) + .append(" WHERE RowId IN ").append(materialIdSql); executor.execute(sampleTypeSQL); } } @@ -9939,10 +9938,11 @@ public int moveAuditEvents(Container targetContainer, List runLsids) continue; } query.append(unionAll); - query.append("SELECT LSID, ") - .append("CAST (").appendIdentifier(col.getSelectIdentifier()).append(" AS VARCHAR)") + query.append("SELECT M.LSID, ") + .append("CAST (ST.").appendIdentifier(col.getSelectIdentifier()).append(" AS VARCHAR)") .append(" AS ").append(UNIQUE_ID_COL_NAME); - query.append(" FROM expsampleset.").append(dialect.quoteIdentifier(provisioned.getName())); + query.append(" FROM ").append(provisioned, "ST"); + query.append(" INNER JOIN ").append(ExperimentService.get().getTinfoMaterial(), "M").append(" ON M.RowId = ST.RowId"); query.append(" WHERE ").appendIdentifier(col.getSelectIdentifier()).appendInClause(isIntegerField ? intIds : uniqueIds, dialect); unionAll = "\n UNION ALL\n"; } diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeServiceImpl.java b/experiment/src/org/labkey/experiment/api/SampleTypeServiceImpl.java index 17227bfb785..882e1ec3559 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeServiceImpl.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeServiceImpl.java @@ -698,11 +698,8 @@ public void deleteSampleType(long rowId, Container c, User user, @Nullable Strin // Delete sequences (genId and the unique counters) DbSequenceManager.deleteLike(c, ExpSampleType.SEQUENCE_PREFIX, (int)source.getRowId(), getExpSchema().getSqlDialect()); - SchemaKey samplesSchema = SchemaKey.fromParts(SamplesSchema.SCHEMA_NAME); - QueryService.get().fireQueryDeleted(user, c, null, samplesSchema, singleton(source.getName())); - - SchemaKey expMaterialsSchema = SchemaKey.fromParts(ExpSchema.SCHEMA_NAME, materials.toString()); - QueryService.get().fireQueryDeleted(user, c, null, expMaterialsSchema, singleton(source.getName())); + QueryService.get().fireQueryDeleted(user, c, null, SamplesSchema.SCHEMA_SAMPLES, singleton(source.getName())); + QueryService.get().fireQueryDeleted(user, c, null, ExpSchema.SCHEMA_EXP_MATERIALS, singleton(source.getName())); // Remove SampleType from search index try (Timing ignored = MiniProfiler.step("search docs")) diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index d9e5a25995c..1db58f8c260 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -16,22 +16,16 @@ package org.labkey.experiment.api; import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.converters.IntegerConverter; -import org.apache.commons.collections4.ListUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Strings; -import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import org.labkey.api.assay.AssayFileWriter; import org.labkey.api.audit.AuditLogService; import org.labkey.api.audit.TransactionAuditProvider; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.collections.IntHashMap; import org.labkey.api.collections.LongHashSet; import org.labkey.api.collections.Sets; import org.labkey.api.data.BaseColumnInfo; @@ -53,7 +47,6 @@ import org.labkey.api.data.RemapCache; import org.labkey.api.data.RuntimeSQLException; import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; import org.labkey.api.data.TableInfo; import org.labkey.api.data.TableSelector; import org.labkey.api.data.UpdateableTableInfo; @@ -103,12 +96,13 @@ import org.labkey.api.query.ValidationException; import org.labkey.api.reader.ColumnDescriptor; import org.labkey.api.reader.DataLoader; -import org.labkey.api.search.SearchService; import org.labkey.api.security.User; import org.labkey.api.security.permissions.MoveEntitiesPermission; import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.settings.OptionalFeatureService; import org.labkey.api.study.publish.StudyPublishService; import org.labkey.api.usageMetrics.SimpleMetricsService; +import org.labkey.api.util.GUID; import org.labkey.api.util.JobRunner; import org.labkey.api.util.Pair; import org.labkey.api.util.StringUtilsLabKey; @@ -118,7 +112,6 @@ import org.labkey.experiment.SampleTypeAuditProvider; import java.io.IOException; -import java.nio.file.Path; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; @@ -133,12 +126,12 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; -import java.util.stream.Stream; import static java.util.Collections.emptyMap; import static org.labkey.api.audit.AuditHandler.DELTA_PROVIDED_DATA_PREFIX; import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; import static org.labkey.api.data.TableSelector.ALL_COLUMNS; +import static org.labkey.api.dataiterator.DataIteratorUtil.DUPLICATE_COLUMN_IN_DATA_ERROR; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs; import static org.labkey.api.dataiterator.SampleUpdateAddColumnsDataIterator.CURRENT_SAMPLE_STATUS_COLUMN_NAME; import static org.labkey.api.exp.api.ExpRunItem.PARENT_IMPORT_ALIAS_MAP_PROP; @@ -149,21 +142,7 @@ import static org.labkey.api.exp.api.SampleTypeService.MISSING_AMOUNT_ERROR_MESSAGE; import static org.labkey.api.exp.api.SampleTypeService.MISSING_UNITS_ERROR_MESSAGE; import static org.labkey.api.exp.api.SampleTypeService.UNPROVIDED_VALUE_ERROR_MESSAGE_PATTERN; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotedFromLSID; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.LSID; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RawUnits; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RootMaterialRowId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.SampleState; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; -import static org.labkey.api.exp.query.SamplesSchema.SCHEMA_SAMPLES; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; import static org.labkey.api.util.IntegerUtils.asLong; import static org.labkey.experiment.ExpDataIterators.incrementCounts; import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.insert; @@ -171,13 +150,7 @@ import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.update; /** - * - * This replaces the old row at a time UploadSamplesHelper.uploadMaterials() implementations. - * - * originally copied from ExpDataClassDataTableImpl.DataClassDataUpdateService, - * - * TODO find remaining shared code and refactor - * + * QueryUpdateService implementation for samples in sample types. */ public class SampleTypeUpdateServiceDI extends DefaultQueryUpdateService { @@ -188,6 +161,8 @@ public class SampleTypeUpdateServiceDI extends DefaultQueryUpdateService public static final String ROOT_RECOMPUTE_ROWID_SET = "RootIdToRecomputeSet"; public static final String PARENT_RECOMPUTE_NAME_SET = "ParentNameToRecomputeSet"; + public static final String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; + public static final Map SAMPLE_ALT_IMPORT_NAME_COLS; private static final Map ALIQUOT_ROLLUP_FIELDS = Map.of( @@ -258,7 +233,7 @@ public void configureDataIteratorContext(DataIteratorContext context) protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) { assert _sampleType != null : "SampleType required for insert/update, but not required for read/delete"; - return new PrepareDataIteratorBuilder(_sampleType, (ExpMaterialTableImpl) getQueryTable(), in, getContainer(), getUser()); + return new PreTriggerDataIteratorBuilder(_sampleType, (ExpMaterialTableImpl) getQueryTable(), in, getContainer(), getUser()); } @Override @@ -312,12 +287,13 @@ private Pair, Set> getSampleParentsForRecalc(List> rows, DataIteratorContext context) { DataIterator it = etl.getDataIterator(context); + if (it == null || context.getErrors().hasErrors()) + return 0; try { if (null != rows) { - MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); Map columnMap = DataIteratorUtil.createColumnNameMap(it); Integer parenRowIdToRecomputeCol = columnMap.get(ROOT_RECOMPUTE_ROWID_COL); @@ -427,7 +403,6 @@ public DataIteratorBuilder createImportDIB(User user, Container container, DataI return new ExpDataIterators.MultiDataTypeCrossProjectDataIteratorBuilder(user, container, data, context.isCrossTypeImport(), context.isCrossFolderImport(), _sampleType, true); DataIteratorBuilder dib = new ExpDataIterators.ExpMaterialDataIteratorBuilder(getQueryTable(), data, container, user); - dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, this::extractProvidedAmountsAndUnits); @@ -522,9 +497,8 @@ public List> insertRows(User user, Container container, List * presence or absence of columns in the incoming data. If both columns are present, no exception is thrown. * * @param columns The set of columns in the input - * @param allowsUpdate Whether the type of import supports updates or not */ - public static void confirmAmountAndUnitsColumns(Collection columns, boolean allowsUpdate) + public static void confirmAmountAndUnitsColumns(Collection columns) { boolean hasUnits = columns.stream().anyMatch(column -> column.equalsIgnoreCase(Units.name())); boolean hasAmount = columns.stream().anyMatch(column -> StoredAmount.namesAndLabels().contains(column)); @@ -538,75 +512,92 @@ public static void confirmAmountAndUnitsColumns(Collection columns, bool } @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + public List> updateRows( + User user, + Container container, + List> rows, + List> oldKeys, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException { assert _sampleType != null : "SampleType required for insert/update, but not required for read/delete"; - - if (rows != null && !rows.isEmpty()) - confirmAmountAndUnitsColumns(rows.get(0).keySet(), true); - - boolean useDib = false; - if (rows != null && !rows.isEmpty() && oldKeys == null) - useDib = rows.get(0).containsKey("lsid"); - - useDib = useDib && hasUniformKeys(rows); + if (rows == null || rows.isEmpty()) + return Collections.emptyList(); List> results; - DbScope scope = getSchema().getDbSchema().getScope(); - if (useDib) - { - Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - finalConfigParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - recordDataIteratorUsed(configParameters); + Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; + recordDataIteratorUsed(configParameters); - try + try + { + results = getSchema().getDbSchema().getScope().executeWithRetry(tx -> { - results = scope.executeWithRetry(transaction -> + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) { - var context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - var ret = super._updateRowsUsingDIB(user, container, rows, context, extraScriptContext); + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + confirmAmountAndUnitsColumns(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + // we need to throw if we don't want executeWithRetry() attempt commit() if (context.getErrors().hasErrors()) throw new DbScope.RetryPassthroughException(context.getErrors()); - return ret; - }); - } - catch (DbScope.RetryPassthroughException retryException) - { - retryException.rethrow(BatchValidationException.class); - throw retryException.throwRuntimeException(); - } - } - else - { - results = super.updateRows(user, container, rows, oldKeys, errors, configParameters, extraScriptContext); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); - scope.addCommitTask(() -> - { - List orderedRowIds = new ArrayList<>(); - for (Map result : results) + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) { - Long rowId = MapUtils.getLong(result, RowId.name()); - if (rowId != null) - orderedRowIds.add(rowId); + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); } - // Issue 51263: order by RowId to reduce deadlock - Collections.sort(orderedRowIds); - ExpMaterialTableImpl tableInfo = (ExpMaterialTableImpl) QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_SAMPLES).getTable(_sampleType.getName()); - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : ExperimentServiceImpl.get().getExpMaterials(sublist)) - expMaterial.index(q, tableInfo); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT); + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), ret.size()); - /* setup mini dataiterator pipeline to process lineage */ - DataIterator di = _toDataIteratorBuilder("updateRows.lineage", results).getDataIterator(new DataIteratorContext()); - ExpDataIterators.derive(user, container, di, true, _sampleType, true); + return ret; + }); + } + catch (DbScope.RetryPassthroughException retryException) + { + retryException.rethrow(BatchValidationException.class); + throw retryException.throwRuntimeException(); } if (results != null && !results.isEmpty() && !errors.hasErrors()) @@ -618,6 +609,26 @@ public List> updateRows(User user, Container container, List return results; } + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + /** * Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ @@ -631,12 +642,12 @@ protected Map coerceTypes(Map row, Map moveRows(User user, Container container, Container ta private Map> getMaterialsForMoveRows(Container container, Container targetContainer, List> rows, BatchValidationException errors) { - Set sampleIds = rows.stream().map(row -> MapUtils.getLong(row,RowId.name())).collect(Collectors.toSet()); + Set sampleIds = rows.stream().map(row -> MapUtils.getLong(row, RowId.name())).collect(Collectors.toSet()); if (sampleIds.isEmpty()) { errors.addRowError(new ValidationException("Sample IDs must be specified for the move operation.")); @@ -779,23 +790,12 @@ protected Map _select(Container container, Object[] keys) throws throw new IllegalStateException("Overridden .getRow()/.getRows() calls .getMaterialMap()"); } - public Set getAliquotSpecificFields() - { - Domain domain = getDomain(); - Set fields = domain.getProperties().stream() - .filter(dp -> ExpSchema.DerivationDataScopeType.ChildOnly.name().equalsIgnoreCase(dp.getDerivationDataScope())) - .map(ImportAliasable::getName) - .collect(Collectors.toSet()); - - return new CaseInsensitiveHashSet(fields); - } - public Set getSampleMetaFields() { Domain domain = getDomain(); Set fields = domain.getProperties().stream() .filter(dp -> !LSID.name().equalsIgnoreCase(dp.getName()) - && !ExpMaterialTable.Column.Name.name().equalsIgnoreCase(dp.getName()) + && !Name.name().equalsIgnoreCase(dp.getName()) && (StringUtils.isEmpty(dp.getDerivationDataScope()) || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(dp.getDerivationDataScope()))) .map(ImportAliasable::getName) @@ -833,33 +833,13 @@ public static boolean isAliquotStatusChangeNeedRecalc(Collection available return false; } - // Customize negative amount error message when the provided unit doesn't match sample type unit. - // For example, provided value of "-1 kg" would have been converted to "-1000 mg" by now. - // This updateRow (going to be deprecated) inconsistent with the data iterator code path, which use provided value "-1" in error message. - // TODO: remove this override when consolidating sample update method to remove row by row update - @Override - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - if (row.containsKey(col.getColumnName())) - { - // if provided value is present, validate provided - Object value = row.get(col.getColumnName()); - Object providedValue = null; - if (_sampleType != null && _sampleType.getMetricUnit() != null && value != null && (StoredAmount.name().equalsIgnoreCase(col.getColumnName()) || "Amount".equalsIgnoreCase(col.getColumnName()))) - { - providedValue = value + " (" + _sampleType.getMetricUnit() + ")"; - } - validateValue(col, value, providedValue); - } - } - } - @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException { + if (row.containsKey(LSID.name()) && !(row.containsKey(RowId.name()) || row.containsKey(Name.name()))) + throw new ValidationException("Either RowId or Name is required to update a sample."); + Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); // add MaterialInput/DataInputs field from parent alias @@ -878,177 +858,12 @@ protected Map updateRow(User user, Container container, Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) { - assert _sampleType != null : "SampleType required for insert/update, but not required for read/delete"; - // LSID was stripped by super.updateRows() and is needed to insert into the dataclass provisioned table - String lsid = (String) oldRow.get("lsid"); - if (lsid == null) - throw new ValidationException("lsid required to update row"); - - String newName = (String) row.get(Name.name()); - if (row.containsKey(Name.name()) && StringUtils.isEmpty(newName)) - throw new ValidationException("Sample name cannot be blank"); - - String oldName = (String) oldRow.get(Name.name()); - boolean hasNameChange = !StringUtils.isEmpty(newName) && !newName.equals(oldName); - if (hasNameChange && !NameExpressionOptionService.get().getAllowUserSpecificNamesValue(c)) - throw new ValidationException("User-specified sample name not allowed"); - - String oldAliquotedFromLSID = (String) oldRow.get(AliquotedFromLSID.name()); - boolean isAliquot = !StringUtils.isEmpty(oldAliquotedFromLSID); - - Integer aliquotRollupRoot = null; - SampleTypeService stService = SampleTypeService.get(); - if (!_sampleType.isMedia() && isAliquot) - { - Integer aliquotRoot = (Integer) oldRow.get(RootMaterialRowId.name()); - - if (row.containsKey(StoredAmount.name()) || row.containsKey(Units.name())) - { - Unit oldRowUnits = stService.getValidatedUnit(oldRow.get(Units.name()), _sampleType.getBaseUnit(), _sampleType.getName()); - Unit rowUnits = stService.getValidatedUnit(row.get(Units.name()), _sampleType.getBaseUnit(), _sampleType.getName()); - Quantity oldQuantity = null; - Quantity newQuantity = null; - if (oldRowUnits != null && oldRow.get(StoredAmount.name()) != null) - oldQuantity = Quantity.of((Number) oldRow.get(StoredAmount.name()), oldRowUnits); - if (rowUnits != null && row.get(StoredAmount.name()) != null) - newQuantity = Quantity.of((Number) row.get(StoredAmount.name()), rowUnits); - - if (newQuantity != null && (oldQuantity == null || !oldQuantity.equals(newQuantity))) - { - if (aliquotRoot != null) - aliquotRollupRoot = aliquotRoot; - } - } - - if (aliquotRollupRoot == null && row.containsKey(SampleState.name())) - { - List availableSampleStatuses = new ArrayList<>(); - if (SampleStatusService.get().supportsSampleStatus()) - { - for (DataState state: SampleStatusService.get().getAllProjectStates(c)) - { - if (ExpSchema.SampleStateType.Available.name().equals(state.getStateType())) - availableSampleStatuses.add(state.getRowId()); - } - } - - if (!availableSampleStatuses.isEmpty()) - { - Long oldState = asLong(oldRow.get(SampleState.name())); - Long newState = asLong(row.get(SampleState.name())); - if (isAliquotStatusChangeNeedRecalc(availableSampleStatuses, oldState, newState)) - aliquotRollupRoot = aliquotRoot; - } - } - } - - Set aliquotFields = getAliquotSpecificFields(); - Set sampleMetaFields = getSampleMetaFields(); - - // Replace attachment columns with filename and keep AttachmentFiles - Map rowCopy = new CaseInsensitiveHashMap<>(); - - // remove aliquotedFrom from row, or error out - rowCopy.putAll(row); - String newAliquotedFromLSID = (String) rowCopy.get(AliquotedFromLSID.name()); - if (!StringUtils.isEmpty(newAliquotedFromLSID) && !newAliquotedFromLSID.equals(oldAliquotedFromLSID)) - throw new ValidationException("Updating aliquotedFrom is not supported"); - rowCopy.remove(AliquotedFromLSID.name()); - rowCopy.remove(RootMaterialRowId.name()); - rowCopy.remove(ExpMaterial.ALIQUOTED_FROM_INPUT); - - // We need to allow updating from one locked status to another locked status, but without other changes - // and updating from either locked or unlocked to something else while also updating other metadata - DataState oldStatus = SampleStatusService.get().getStateForRowId(getContainer(), MapUtils.getLong(oldRow,SampleState.name())); - boolean oldAllowsOp = SampleStatusService.get().isOperationPermitted(oldStatus, SampleTypeService.SampleOperations.EditMetadata); - DataState newStatus = SampleStatusService.get().getStateForRowId(getContainer(), MapUtils.getLong(rowCopy,SampleState.name())); - boolean newAllowsOp = SampleStatusService.get().isOperationPermitted(newStatus, SampleTypeService.SampleOperations.EditMetadata); - - Map ret = new CaseInsensitiveHashMap<>(super._update(user, c, rowCopy, oldRow, keys)); - - if (aliquotRollupRoot != null) - ret.put(ROOT_RECOMPUTE_ROWID_COL, aliquotRollupRoot); - - Map validRowCopy = new CaseInsensitiveHashMap<>(); - boolean hasNonStatusChange = false; - boolean hasStatusCol = false; - for (String updateField : rowCopy.keySet()) - { - Object updateValue = rowCopy.get(updateField); - boolean isAliquotField = aliquotFields.contains(updateField); - boolean isSampleMetaField = sampleMetaFields.contains(updateField); - - if (isAliquot && isSampleMetaField) - { - Object oldMetaValue = oldRow.get(updateField); - if (!Objects.equals(oldMetaValue, updateValue)) - LOG.warn("Sample metadata update has been skipped for an aliquot"); - } - else if (!isAliquot && isAliquotField) - { - LOG.warn("Aliquot-specific field update has been skipped for a sample."); - } - else - { - hasNonStatusChange = hasNonStatusChange || !SampleTypeServiceImpl.statusUpdateColumns.contains(updateField.toLowerCase()); - validRowCopy.put(updateField, updateValue); - } - - if (ExpMaterialTable.Column.SampleState.name().equalsIgnoreCase(updateField)) - hasStatusCol = true; - } - // had a locked status before and either not updating the status or updating to a new locked status - if (hasNonStatusChange && !oldAllowsOp && (!hasStatusCol || !newAllowsOp)) - { - throw new ValidationException(String.format("Updating sample data when status is %s is not allowed.", oldStatus.getLabel())); - } - - keys = new Object[]{lsid}; - TableInfo t = _sampleType.getTinfo(); - // Sample type uses FILE_LINK not FILE_ATTACHMENT, use convertTypes() to handle posted files - Path path = AssayFileWriter.getUploadDirectoryPath(c, "sampletype").toNioPathForWrite(); - convertTypes(user, c, validRowCopy, t, path); - if (t.getColumnNameSet().stream().anyMatch(validRowCopy::containsKey)) - { - ret.putAll(Table.update(user, t, validRowCopy, t.getColumn("lsid"), keys, null, Level.DEBUG)); - } - - ExpMaterialImpl sample = null; - if (hasNameChange) - { - sample = ExperimentServiceImpl.get().getExpMaterial(lsid); - if (sample != null) - ExperimentService.get().addObjectLegacyName(sample.getObjectId(), ExperimentServiceImpl.getNamespacePrefix(ExpMaterial.class), oldName, user); - } - - // update comment - if (row.containsKey("flag") || row.containsKey("comment")) - { - Object o = row.containsKey("flag") ? row.get("flag") : row.get("comment"); - String flag = Objects.toString(o, null); - - if (sample == null) - sample = ExperimentServiceImpl.get().getExpMaterial(lsid); - if (sample != null) - sample.setComment(user, flag); - } - - // update aliases - if (row.containsKey("Alias")) - AliasInsertHelper.handleInsertUpdate(getContainer(), user, lsid, ExperimentService.get().getTinfoMaterialAliasMap(), row.get("Alias")); - - // search done in postcommit - - ret.put("lsid", lsid); - ret.put(AliquotedFromLSID.name(), oldRow.get(AliquotedFromLSID.name())); - ret.put(RowId.name(), oldRow.get(RowId.name())); // add RowId for SearchService - return ret; + throw new UnsupportedOperationException("_update() is no longer supported for samples"); } @Override @@ -1099,19 +914,17 @@ public List> deleteRows(User user, Container container, List for (Map k : keys) { - Long rowId = getMaterialRowId(k); // Issue 40621 // adding input fields is expensive, skip input fields for delete since deleted samples are not surfaced on Timeline UI - Map map = getMaterialMap(rowId, getMaterialLsid(k), user, container, false); + Map map = getMaterialMap(k); if (map == null) throw new QueryUpdateServiceException("No Sample Type Material found for RowID or LSID"); - if (rowId == null) - rowId = getMaterialRowId(map); + Long rowId = getMaterialRowId(map); if (rowId == null) throw new QueryUpdateServiceException("RowID is required to delete a Sample Type Material"); - Long sampleStateId = MapUtils.getLong(map,SampleState.name()); + Long sampleStateId = MapUtils.getLong(map, SampleState.name()); if (!SampleStatusService.get().isOperationPermitted(getContainer(), sampleStateId, SampleTypeService.SampleOperations.Delete)) { DataState dataState = SampleStatusService.get().getStateForRowId(container, sampleStateId); @@ -1153,59 +966,67 @@ public List> deleteRows(User user, Container container, List return getMaterialStringValue(row, Name.name()); } - IntegerConverter _converter = new IntegerConverter(); - - private @Nullable Integer getMaterialIntegerValue(Map row, String columnName) + private @Nullable Long getMaterialSourceId(Map row) { - if (row != null) - { - Object o = row.get(columnName); - if (o != null) - return _converter.convert(Integer.class, o); - } - - return null; - } - - private @Nullable Integer getMaterialSourceId(Map row) - { - return getMaterialIntegerValue(row, ExpMaterialTable.Column.MaterialSourceId.name()); + return MapUtils.getLong(row, MaterialSourceId.name()); } private @Nullable Long getMaterialRowId(Map row) { - return MapUtils.getLong(row, ExpMaterialTable.Column.RowId.name()); + return MapUtils.getLong(row, RowId.name()); } - private Map getMaterialMap(Long rowId, String lsid, User user, Container container, boolean addInputs) - throws QueryUpdateServiceException + private @Nullable Filter getMaterialFilter(Map keys, boolean useSampleType) { - Filter filter; + Long rowId = getMaterialRowId(keys); if (rowId != null) - filter = new SimpleFilter(ExpMaterialTable.Column.RowId.fieldKey(), rowId); - else if (lsid != null) - filter = new SimpleFilter(LSID.fieldKey(), lsid); - else - throw new QueryUpdateServiceException("Either RowId or LSID is required to get Sample Type Material."); + return new SimpleFilter(RowId.fieldKey(), rowId); - Map sampleRow = new TableSelector(getQueryTable(), filter, null).getMap(); - if (null == sampleRow || !addInputs) - return sampleRow; + String lsid = getMaterialLsid(keys); + if (lsid != null) + return new SimpleFilter(LSID.fieldKey(), lsid); - ExperimentService experimentService = ExperimentService.get(); - ExpMaterial seed = rowId != null ? experimentService.getExpMaterial(rowId) : experimentService.getExpMaterial(lsid); - if (null == seed) - return sampleRow; + String name = getMaterialName(keys); + if (name != null) + { + Long materialSourceId = null; + if (useSampleType) + { + if (_sampleType != null) + materialSourceId = _sampleType.getRowId(); + } + else + materialSourceId = getMaterialSourceId(keys); - ExperimentServiceImpl.get().addParentsFields(seed, sampleRow, user, container); + if (materialSourceId != null) + { + SimpleFilter filter = new SimpleFilter(Name.fieldKey(), name); + filter.addCondition(MaterialSourceId.fieldKey(), materialSourceId); + return filter; + } + } - return sampleRow; + return null; + } + + private Map getMaterialMap(Map keys) throws QueryUpdateServiceException + { + return getMaterialMap(keys, false); + } + + private Map getMaterialMap(Map keys, boolean useSampleType) throws QueryUpdateServiceException + { + Filter filter = getMaterialFilter(keys, useSampleType); + if (filter == null) + throw new QueryUpdateServiceException("Either RowId, LSID, or Name and MaterialSourceId is required to get sample."); + + return new TableSelector(getQueryTable(), filter, null).getMap(); } @Override public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) { - Integer sampleTypeId = null; + Long sampleTypeId = null; Set sampleNames = new HashSet<>(); for (Map.Entry> keyMap : keys.entrySet()) { @@ -1218,26 +1039,38 @@ public boolean hasExistingRowsInOtherContainers(Container container, Map columns, boolean includeParent) {} + private record ExistingRowSelect(Set columns, boolean includeParent) {} private @NotNull ExistingRowSelect getExistingRowSelect(@Nullable Set dataColumns) { if (!(getQueryTable() instanceof UpdateableTableInfo updatable) || dataColumns == null) - return new ExistingRowSelect(getQueryTable(), ALL_COLUMNS, true); + return new ExistingRowSelect(ALL_COLUMNS, true); CaseInsensitiveHashMap remap = updatable.remapSchemaColumns(); if (null == remap) remap = CaseInsensitiveHashMap.of(); // AliquotRollupDataIterator needs "samplestate", "storedamount", "rootmaterialrowId", "units" for MERGE option - Set includedColumns = new CaseInsensitiveHashSet(Name.name(), LSID.name(), RowId.name(), SampleState.name(), StoredAmount.name(), RootMaterialRowId.name(), Units.name()); + // "RawAmount" and "RawUnits" are needed to replace converted amount and unit values with raw values so the + // audit difference is accurate. + Set includedColumns = new CaseInsensitiveHashSet( + LSID.name(), + Name.name(), + RawAmount.name(), + RawUnits.name(), + RootMaterialRowId.name(), + RowId.name(), + SampleState.name(), + StoredAmount.name(), + Units.name() + ); for (ColumnInfo column : getQueryTable().getColumns()) { if (dataColumns.contains(column.getColumnName())) @@ -1246,27 +1079,6 @@ else if (dataColumns.contains(remap.get(column.getColumnName()))) includedColumns.add(remap.get(column.getColumnName())); } - boolean isAllFromMaterialTable = new CaseInsensitiveHashSet(Stream.of(ExpMaterialTable.Column.values()) - .map(Enum::name) - .collect(Collectors.toSet())) - .containsAll(includedColumns); - - // only include RawAmount and Raw units if no isAllFromMaterialTable, - // needed to replace converted amount and unit values with raw values so audit difference is accurate - if (!isAllFromMaterialTable) - { - includedColumns.add(RawAmount.name()); - includedColumns.add(RawUnits.name()); - } - - TableInfo selectTable = isAllFromMaterialTable ? ExperimentService.get().getTinfoMaterial() : getQueryTable(); - if (isAllFromMaterialTable) - { - // ExperimentService.get().getTinfoMaterial() uses Container column, not Folder - includedColumns.remove(ExpMaterialTable.Column.Folder.name()); - includedColumns.add("container"); - } - boolean hasParentInput = false; if (_sampleType != null) { @@ -1275,7 +1087,7 @@ else if (dataColumns.contains(remap.get(column.getColumnName()))) Map importAliases = _sampleType.getImportAliases(); for (String col : dataColumns) { - if (ExperimentService.isInputOutputColumn(col) || Strings.CI.equals("parent",col) || importAliases.containsKey(col)) + if (ExperimentService.isInputOutputColumn(col) || Strings.CI.equals("parent", col) || importAliases.containsKey(col)) { hasParentInput = true; break; @@ -1287,128 +1099,127 @@ else if (dataColumns.contains(remap.get(column.getColumnName()))) } } - return new ExistingRowSelect(selectTable, includedColumns, hasParentInput); + return new ExistingRowSelect(includedColumns, hasParentInput); } @Override - public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) - throws InvalidKeyException, QueryUpdateServiceException + public Map> getExistingRows( + User user, + Container container, + Map> keys, + boolean verifyNoCrossFolderData, + boolean verifyExisting, + @Nullable Set columns + ) throws InvalidKeyException, QueryUpdateServiceException { ExistingRowSelect existingRowSelect = getExistingRowSelect(columns); - TableInfo queryTableInfo = existingRowSelect.tableInfo; Set selectColumns = existingRowSelect.columns; Map> sampleRows = new LinkedHashMap<>(); - Map rowNumLsid = new IntHashMap<>(); - Map rowIdRowNumMap = new LinkedHashMap<>(); - Map lsidRowNumMap = new CaseInsensitiveMapWrapper<>(new LinkedHashMap<>()); Map nameRowNumMap = new LinkedHashMap<>(); - Integer sampleTypeId = null; + Long sampleTypeId = null; for (Map.Entry> keyMap : keys.entrySet()) { Integer rowNum = keyMap.getKey(); - Long rowId = getMaterialRowId(keyMap.getValue()); - String lsid = getMaterialLsid(keyMap.getValue()); - String name = getMaterialName(keyMap.getValue()); - Integer materialSourceId = getMaterialSourceId(keyMap.getValue()); - if (rowId != null) - rowIdRowNumMap.put(rowId, rowNum); - else if (lsid != null) { - lsidRowNumMap.put(lsid, rowNum); - rowNumLsid.put(rowNum, lsid); + rowIdRowNumMap.put(rowId, rowNum); + continue; } - else if (name != null && materialSourceId != null) + + String name = getMaterialName(keyMap.getValue()); + Long materialSourceId = getMaterialSourceId(keyMap.getValue()); + if (name != null && materialSourceId != null) { sampleTypeId = materialSourceId; nameRowNumMap.put(name, rowNum); + continue; } - else - throw new QueryUpdateServiceException("Either RowId or LSID is required to get Sample Type Material."); + + throw new QueryUpdateServiceException("Either RowId or Name is required to get Sample Type Material."); } - if (!rowIdRowNumMap.isEmpty()) + Set missingRowIds; + if (rowIdRowNumMap.isEmpty()) + missingRowIds = Collections.emptySet(); + else { - SimpleFilter filter = new SimpleFilter(ExpMaterialTable.Column.RowId.fieldKey(), rowIdRowNumMap.keySet(), CompareType.IN); + missingRowIds = new HashSet<>(rowIdRowNumMap.keySet()); + SimpleFilter filter = new SimpleFilter(RowId.fieldKey(), rowIdRowNumMap.keySet(), CompareType.IN); filter.addCondition(FieldKey.fromParts("Container"), container); - Map[] rows = new TableSelector(queryTableInfo, selectColumns, filter, null).getMapArray(); + Map[] rows = new TableSelector(getQueryTable(), selectColumns, filter, null).getMapArray(); for (Map row : rows) { - Long rowId = asLong(row.get("rowid")); + Long rowId = asLong(row.get(RowId.name())); Integer rowNum = rowIdRowNumMap.get(rowId); - String sampleLsid = (String) row.get("lsid"); - - rowNumLsid.put(rowNum, sampleLsid); + missingRowIds.remove(rowId); sampleRows.put(rowNum, row); } } - Set allKeys = new HashSet<>(); - boolean useLsid = false; - - if (!lsidRowNumMap.isEmpty()) + Set missingNames; + if (nameRowNumMap.isEmpty()) + missingNames = Collections.emptySet(); + else { - useLsid = true; - allKeys.addAll(lsidRowNumMap.keySet()); - - SimpleFilter filter = new SimpleFilter(FieldKey.fromParts(LSID), lsidRowNumMap.keySet(), CompareType.IN); + missingNames = new HashSet<>(nameRowNumMap.keySet()); + SimpleFilter filter = new SimpleFilter(MaterialSourceId.fieldKey(), sampleTypeId); + filter.addCondition(Name.fieldKey(), nameRowNumMap.keySet(), CompareType.IN); filter.addCondition(FieldKey.fromParts("Container"), container); - Map[] rows = new TableSelector(queryTableInfo, selectColumns, filter, null).getMapArray(); + Map[] rows = new TableSelector(getQueryTable(), selectColumns, filter, null).getMapArray(); for (Map row : rows) { - String sampleLsid = (String) row.get("lsid"); - Integer rowNum = lsidRowNumMap.get(sampleLsid); + String name = (String) row.get(Name.name()); + Integer rowNum = nameRowNumMap.get(name); sampleRows.put(rowNum, row); - - allKeys.remove(sampleLsid); + missingNames.remove(name); } } - if (!nameRowNumMap.isEmpty()) + if (verifyNoCrossFolderData && (!missingNames.isEmpty() || !missingRowIds.isEmpty())) { - allKeys.addAll(nameRowNumMap.keySet()); - SimpleFilter filter = new SimpleFilter(ExpMaterialTable.Column.MaterialSourceId.fieldKey(), sampleTypeId); - filter.addCondition(ExpMaterialTable.Column.Name.fieldKey(), nameRowNumMap.keySet(), CompareType.IN); - filter.addCondition(FieldKey.fromParts("Container"), container); - Map[] rows = new TableSelector(queryTableInfo, selectColumns, filter, null).getMapArray(); - for (Map row : rows) + // Issue 52922: cross-folder merge without Product Folders enabled silently ignores the cross-folder + // row update. Use a relaxed container filter to find existing data from cross-containers. + ContainerFilter cf = new ContainerFilter.AllInProjectPlusShared(container, user); + Set containerIds = new HashSet<>(Objects.requireNonNull(cf.getIds())); + containerIds.remove(container.getEntityId()); + + if (!containerIds.isEmpty()) { - String name = (String) row.get("name"); - Integer rowNum = nameRowNumMap.get(name); - String sampleLsid = (String) row.get("lsid"); - sampleRows.put(rowNum, row); - rowNumLsid.put(rowNum, sampleLsid); + if (!missingRowIds.isEmpty()) + { + SimpleFilter filter = new SimpleFilter(RowId.fieldKey(), missingRowIds, CompareType.IN); + filter.addCondition(FieldKey.fromParts("Container"), containerIds, CompareType.IN); + var row = new TableSelector(ExperimentService.get().getTinfoMaterial(), CaseInsensitiveHashSet.of(RowId.name(), Name.name()), filter, null).setMaxRows(1).getMap(); + if (row != null) + throw new InvalidKeyException("Sample does not belong to " + container.getName() + " container: " + row.get(Name.name()) + " (" + row.get(RowId.name()) + ")."); + } + + if (!missingNames.isEmpty()) + { + SimpleFilter filter = new SimpleFilter(MaterialSourceId.fieldKey(), sampleTypeId); + filter.addCondition(FieldKey.fromParts("Container"), containerIds, CompareType.IN); + filter.addCondition(Name.fieldKey(), missingNames, CompareType.IN); - allKeys.remove(name); + var row = new TableSelector(ExperimentService.get().getTinfoMaterial(), CaseInsensitiveHashSet.of(Name.name()), filter, null).setMaxRows(1).getMap(); + if (row != null) + throw new InvalidKeyException("Sample does not belong to " + container.getName() + " container: " + row.get(Name.name()) + "."); + } } } - if (verifyNoCrossFolderData && !allKeys.isEmpty()) + if (verifyExisting) { - // Issue 52922: cross folder merge without Product Folders enabled silently ignores the cross folder row update - ContainerFilter allCf = new ContainerFilter.AllInProjectPlusShared(container, user); // use a relaxed CF to find existing data from cross containers - - SimpleFilter existingDataFilter = new SimpleFilter(ExpMaterialTable.Column.MaterialSourceId.fieldKey(), sampleTypeId); - existingDataFilter.addCondition(FieldKey.fromParts("Container"), allCf.getIds(), CompareType.IN); - - existingDataFilter.addCondition(FieldKey.fromParts(useLsid ? "LSID" : "Name"), allKeys, CompareType.IN); - Map[] cfRows = new TableSelector(ExperimentService.get().getTinfoMaterial(), existingDataFilter, null).getMapArray(); - for (Map row : cfRows) - { - String dataContainer = (String) row.get("container"); - if (!dataContainer.equals(container.getId())) - throw new InvalidKeyException("Sample does not belong to " + container.getName() + " container: " + row.get("name") + "."); - } + if (!missingRowIds.isEmpty()) + throw new InvalidKeyException("Sample does not exist: (RowId) " + missingRowIds.iterator().next() + "."); + if (!missingNames.isEmpty()) + throw new InvalidKeyException("Sample does not exist: " + missingNames.iterator().next() + "."); } - if (verifyExisting && !allKeys.isEmpty()) - throw new InvalidKeyException("Sample does not exist: " + allKeys.iterator().next() + "."); - // if contains domain fields, check for aliquot specific fields - if (!queryTableInfo.getName().equalsIgnoreCase("material")) + if (!getQueryTable().getName().equalsIgnoreCase("material")) { Set parentOnlyFields = getSampleMetaFields(); for (Map.Entry> rowNumSampleRow : sampleRows.entrySet()) @@ -1428,7 +1239,7 @@ else if (name != null && materialSourceId != null) Set lsids = new HashSet<>(); for (Map sampleRow : sampleRows.values()) - lsids.add((String) sampleRow.get("lsid")); + lsids.add(getMaterialLsid(sampleRow)); List seeds = ExperimentServiceImpl.get().getExpMaterialsByLsid(lsids); ExperimentServiceImpl.get().addRowsParentsFields(new HashSet<>(seeds), sampleRows, user, container); @@ -1443,20 +1254,93 @@ public List> getRows(User user, Container container, List> result = new ArrayList<>(keys.size()); for (Map k : keys) { - Map materialMap = getMaterialMap(getMaterialRowId(k), getMaterialLsid(k), user, container, false); + Map materialMap = getMaterialMap(k); if (materialMap != null) result.add(materialMap); } return result; } + private @Nullable SimpleFilter getRowsFilter(List> keys) throws QueryUpdateServiceException + { + List rowIds = new ArrayList<>(); + List lsids = new ArrayList<>(); + Map> namesBySourceId = new HashMap<>(); + int nameCount = 0; + + // Each row could be keyed differently + for (Map row : keys) + { + Long rowId = getMaterialRowId(row); + if (rowId != null) + { + rowIds.add(rowId); + continue; + } + + String lsid = getMaterialLsid(row); + if (lsid != null) + { + lsids.add(lsid); + continue; + } + + String name = getMaterialName(row); + Long materialSourceId = getMaterialSourceId(row); + if (name != null && materialSourceId != null) + { + namesBySourceId.computeIfAbsent(materialSourceId, k -> new ArrayList<>()).add(name); + nameCount++; + continue; + } + + throw new QueryUpdateServiceException("Either RowId, LSID, or Name and MaterialSourceId is required to get Sample Type Material."); + } + + // But we can optimize if they all share the same filter + SimpleFilter filter = null; + if (rowIds.size() == keys.size()) + filter = new SimpleFilter(RowId.fieldKey(), rowIds, CompareType.IN); + else if (lsids.size() == keys.size()) + filter = new SimpleFilter(LSID.fieldKey(), lsids, CompareType.IN); + else if (nameCount == keys.size() && namesBySourceId.size() == 1) + { + // If all rows are being queried by name and share the same material source id, use a single filter + Map.Entry> entry = namesBySourceId.entrySet().iterator().next(); + filter = new SimpleFilter(MaterialSourceId.fieldKey(), entry.getKey()); + filter.addCondition(Name.fieldKey(), entry.getValue(), CompareType.IN); + } + + return filter; + } + @Override protected Map getRow(User user, Container container, Map keys) throws QueryUpdateServiceException { - return getMaterialMap(getMaterialRowId(keys), getMaterialLsid(keys), user, container, true); + Map sampleRow = getMaterialMap(keys, true); + if (sampleRow == null) + return null; + + Long sampleRowId = asLong(sampleRow.get(RowId.name())); + if (sampleRowId == null) + throw new QueryUpdateServiceException("Failed to resolve sample rowId."); + + ExpMaterial seed = ExperimentService.get().getExpMaterial(sampleRowId); + if (null == seed) + return sampleRow; + + ExperimentServiceImpl.get().addParentsFields(seed, sampleRow, user, container); + + return sampleRow; } private void onSamplesChanged(List> results, Map params, Container container, SampleTypeServiceImpl.SampleChangeType reason) @@ -1547,7 +1431,7 @@ void audit(QueryService.AuditAction auditAction) } // TODO: validate/compare functionality of CoerceDataIterator and loadRows() - private static class PrepareDataIteratorBuilder implements DataIteratorBuilder + private static class PreTriggerDataIteratorBuilder implements DataIteratorBuilder { private static final int BATCH_SIZE = 100; @@ -1557,7 +1441,7 @@ private static class PrepareDataIteratorBuilder implements DataIteratorBuilder final Container container; final User user; - public PrepareDataIteratorBuilder(@NotNull ExpSampleTypeImpl sampleType, ExpMaterialTableImpl materialTable, DataIteratorBuilder in, Container container, User user) + public PreTriggerDataIteratorBuilder(@NotNull ExpSampleTypeImpl sampleType, ExpMaterialTableImpl materialTable, DataIteratorBuilder in, Container container, User user) { this.sampleType = sampleType; this.builder = in; @@ -1569,15 +1453,21 @@ public PrepareDataIteratorBuilder(@NotNull ExpSampleTypeImpl sampleType, ExpMate @Override public DataIterator getDataIterator(DataIteratorContext context) { - DataIterator source = LoggingDataIterator.wrap(builder.getDataIterator(context)); + DataIterator di = builder.getDataIterator(context); + if (di == null) + return null; // can happen if context has errors + + boolean isMerge = context.getInsertOption() == InsertOption.MERGE; + boolean isUpdate = context.getInsertOption() == InsertOption.UPDATE; // drop columns - ColumnInfo containerColumn = this.materialTable.getColumn(this.materialTable.getContainerFieldKey()); + ColumnInfo containerColumn = materialTable.getColumn(materialTable.getContainerFieldKey()); String containerFieldLabel = containerColumn.getLabel(); var drop = new CaseInsensitiveHashSet(); - for (int i = 1; i <= source.getColumnCount(); i++) + var keysCheck = new CaseInsensitiveHashSet(); + for (int i = 1; i <= di.getColumnCount(); i++) { - String name = source.getColumnInfo(i).getName(); + String name = di.getColumnInfo(i).getName(); boolean isContainerField = name.equalsIgnoreCase(containerFieldLabel); if (!isContainerField) isContainerField = name.equalsIgnoreCase("Container") || name.equalsIgnoreCase("Folder"); @@ -1588,58 +1478,91 @@ public DataIterator getDataIterator(DataIteratorContext context) if (isCommentHeader(name)) continue; if (isNameHeader(name)) + { + keysCheck.add(Name.name()); continue; + } if (isDescriptionHeader(name)) continue; if (ExperimentService.isInputOutputColumn(name)) continue; if (isAliasHeader(name)) continue; - if (isSampleStateHeader(name)) + if (isExpMaterialColumn(SampleState, name)) continue; - if (isMaterialExpDateHeader(name)) + if (isExpMaterialColumn(MaterialExpDate, name)) continue; - if (isStoredAmountHeader(name)) + if (isExpMaterialColumn(StoredAmount, name)) continue; - if (isUnitsHeader(name)) + if (isExpMaterialColumn(Units, name)) continue; if (isContainerField && context.isCrossFolderImport() && !context.getInsertOption().updateOnly) continue; + if (isExpMaterialColumn(RowId, name)) + { + keysCheck.add(RowId.name()); + if (isUpdate) + continue; + + // While accepting RowId during merge is not our preferred behavior, we want to give users a way + // to opt-in to the old behavior where RowId is accepted and ignored. + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE)) + { + context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging samples. Specify only the sample name instead.", RowId.name())); + return null; + } + } + if (isExpMaterialColumn(LSID, name)) + keysCheck.add(LSID.name()); drop.add(name); } } + if ((isMerge || isUpdate) && keysCheck.size() == 1 && keysCheck.contains(LSID.name())) + { + String message = String.format("LSID is no longer accepted as a key for sample %s. Specify a RowId or Name instead.", isMerge ? "merge" : "update"); + context.getErrors().addRowError(new ValidationException(message, LSID.name())); + return null; + } + if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - drop.remove("lsid"); + drop.remove(LSID.name()); if (!drop.isEmpty()) - source = new DropColumnsDataIterator(source, drop); + di = new DropColumnsDataIterator(di, drop); - Map columnNameMap = DataIteratorUtil.createColumnNameMap(source); - if (context.getInsertOption() == InsertOption.UPDATE) + Map columnNameMap = DataIteratorUtil.createColumnNameMap(di); + if (isUpdate) { - SimpleTranslator addAliquotedFrom = new SimpleTranslator(source, context); + SimpleTranslator addAliquotedFrom = new SimpleTranslator(di, context); if (!columnNameMap.containsKey(AliquotedFromLSID.name())) addAliquotedFrom.addNullColumn(AliquotedFromLSID.name(), JdbcType.VARCHAR); if (!columnNameMap.containsKey(RootMaterialRowId.name())) addAliquotedFrom.addNullColumn(RootMaterialRowId.name(), JdbcType.INTEGER); addAliquotedFrom.addNullColumn(CURRENT_SAMPLE_STATUS_COLUMN_NAME, JdbcType.INTEGER); - addAliquotedFrom.addColumn(new BaseColumnInfo("cpasType", JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); - addAliquotedFrom.addColumn(new BaseColumnInfo("materialSourceId", JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); + addAliquotedFrom.addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); + addAliquotedFrom.addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); addAliquotedFrom.addNullColumn(ROOT_RECOMPUTE_ROWID_COL, JdbcType.INTEGER); addAliquotedFrom.addNullColumn(PARENT_RECOMPUTE_NAME_COL, JdbcType.VARCHAR); addAliquotedFrom.selectAll(); - var addRequiredColsDI = new SampleUpdateAddColumnsDataIterator(new CachingDataIterator(addAliquotedFrom), materialTable, sampleType.getRowId(), columnNameMap.containsKey("lsid")); + String keyColumnAlias = getKeyColumnAlias(materialTable, columnNameMap); + if (keyColumnAlias == null) + { + context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, RowId.name()))); + return null; + } + di = new SampleUpdateAddColumnsDataIterator(new CachingDataIterator(addAliquotedFrom), materialTable, sampleType.getRowId(), keyColumnAlias); - SimpleTranslator c = new _SamplesCoerceDataIterator(addRequiredColsDI, context, sampleType, materialTable); + di = new _SamplesCoerceDataIterator(di, context, sampleType, materialTable); context.setWithLookupRemapping(false); - return LoggingDataIterator.wrap(c); + + return LoggingDataIterator.wrap(di); } // CoerceDataIterator to handle the lookup/alternatekeys functionality of loadRows(), // TODO: check if this covers all the functionality, in particular how is alternateKeyCandidates used? - DataIterator c = LoggingDataIterator.wrap(new _SamplesCoerceDataIterator(source, context, sampleType, materialTable)); + DataIterator c = LoggingDataIterator.wrap(new _SamplesCoerceDataIterator(di, context, sampleType, materialTable)); context.setWithLookupRemapping(false); SimpleTranslator addColumns = new SimpleTranslator(c, context); addColumns.setDebugName("add genId and other required columns"); @@ -1659,26 +1582,43 @@ public DataIterator getDataIterator(DataIteratorContext context) addColumns.addNullColumn(ROOT_RECOMPUTE_ROWID_COL, JdbcType.INTEGER); addColumns.addNullColumn(PARENT_RECOMPUTE_NAME_COL, JdbcType.VARCHAR); } - DataIterator dataIterator = LoggingDataIterator.wrap(addColumns); + + di = LoggingDataIterator.wrap(addColumns); // Table Counters - DataIteratorBuilder dib = ExpDataIterators.CounterDataIteratorBuilder.create(dataIterator, sampleType.getContainer(), materialTable, ExpSampleType.SEQUENCE_PREFIX, sampleType.getRowId()); - dataIterator = dib.getDataIterator(context); + di = ExpDataIterators.CounterDataIteratorBuilder + .create(di, sampleType.getContainer(), materialTable, ExpSampleType.SEQUENCE_PREFIX, sampleType.getRowId()) + .getDataIterator(context); // sampleset.createSampleNames() + generate lsid // TODO: does not handle insertIgnore - DataIterator names = new _GenerateNamesDataIterator(sampleType, container, user, DataIteratorUtil.wrapMap(dataIterator, false), context, batchSize); - + DataIterator names = new _GenerateNamesDataIterator(sampleType, container, user, DataIteratorUtil.wrapMap(di, false), context, batchSize); return LoggingDataIterator.wrap(names); } + private static @Nullable String getKeyColumnAlias(TableInfo materialTable, @NotNull Map columnNameMap) + { + // Currently, SampleUpdateAddColumnsDataIterator is being called before a translator is invoked to + // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the + // map of columns for the key column. + var rowIdAliases = ImportAliasable.Helper.createImportSet(materialTable.getColumn(RowId.fieldKey())); + rowIdAliases.retainAll(columnNameMap.keySet()); + + if (rowIdAliases.size() == 1) + return rowIdAliases.iterator().next(); + if (rowIdAliases.isEmpty()) + return Name.name(); + + return null; + } + private static boolean isReservedHeader(String name) { if (isNameHeader(name) || isDescriptionHeader(name) || isCommentHeader(name) || "CpasType".equalsIgnoreCase(name) || isAliasHeader(name)) return true; if (ExperimentService.isInputOutputColumn(name)) return true; - for (ExpMaterialTable.Column column : ExpMaterialTable.Column.values()) + for (ExpMaterialTable.Column column : values()) { if (isExpMaterialColumn(column, name)) return true; @@ -1693,42 +1633,22 @@ private static boolean isExpMaterialColumn(ExpMaterialTable.Column column, Strin private static boolean isNameHeader(String name) { - return isExpMaterialColumn(ExpMaterialTable.Column.Name, name); + return isExpMaterialColumn(Name, name); } private static boolean isDescriptionHeader(String name) { - return isExpMaterialColumn(ExpMaterialTable.Column.Description, name); - } - - private static boolean isSampleStateHeader(String name) - { - return isExpMaterialColumn(SampleState, name); + return isExpMaterialColumn(Description, name); } private static boolean isCommentHeader(String name) { - return isExpMaterialColumn(ExpMaterialTable.Column.Flag, name) || "Comment".equalsIgnoreCase(name); + return isExpMaterialColumn(Flag, name) || "Comment".equalsIgnoreCase(name); } private static boolean isAliasHeader(String name) { - return isExpMaterialColumn(ExpMaterialTable.Column.Alias, name); - } - - private static boolean isMaterialExpDateHeader(String name) - { - return isExpMaterialColumn(ExpMaterialTable.Column.MaterialExpDate, name); - } - - private static boolean isStoredAmountHeader(String name) - { - return isExpMaterialColumn(ExpMaterialTable.Column.StoredAmount, name) || "Amount".equalsIgnoreCase(name); - } - - public static boolean isUnitsHeader(String name) - { - return isExpMaterialColumn(ExpMaterialTable.Column.Units, name); + return isExpMaterialColumn(Alias, name); } private static boolean isAliquotRollupHeader(String name) @@ -1743,13 +1663,11 @@ private static boolean isAliquotRollupHeader(String name) static class _GenerateNamesDataIterator extends SimpleTranslator { final boolean _allowUserSpecifiedNames; // whether manual names specification is allowed or only name expression generation - final int _batchSize; final RemapCache _cache; final Container _container; final List>> _extraPropsFns; final SampleNameGeneratorState _nameState; final Lsid.LsidBuilder lsidBuilder; - final DbSequence _lsidDbSeq; final ExpSampleTypeImpl _sampleType; final User _user; @@ -1792,7 +1710,6 @@ static class _GenerateNamesDataIterator extends SimpleTranslator boolean skipDuplicateCheck = context.getConfigParameterBoolean(SkipMaxSampleCounterFunction); _nameState = sampleType.getNameGenState(skipDuplicateCheck, true, _container, user); lsidBuilder = sampleType.generateSampleLSID(); - _batchSize = batchSize; boolean useLsidForUpdate = context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); if (useLsidForUpdate) @@ -1800,14 +1717,14 @@ static class _GenerateNamesDataIterator extends SimpleTranslator else selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); - _lsidDbSeq = sampleType.getSampleLsidDbSeq(_batchSize, sampleType.getContainer()); - - addColumn(new BaseColumnInfo("name", JdbcType.VARCHAR), (Supplier)() -> generatedName); + addColumn(new BaseColumnInfo(Name.fieldKey(), JdbcType.VARCHAR), (Supplier)() -> generatedName); if (!useLsidForUpdate) - addColumn(new BaseColumnInfo("lsid", JdbcType.VARCHAR), (Supplier)() -> lsidBuilder.setObjectId(String.valueOf(_lsidDbSeq.next())).toString()); - // Ensure we have a cpasType column and it is of the right value - addColumn(new BaseColumnInfo("cpasType", JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); - addColumn(new BaseColumnInfo("materialSourceId", JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); + { + DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); + addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + } + addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); + addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); } @Override @@ -2011,7 +1928,7 @@ else if (StoredAmount.name().equalsIgnoreCase(name)) { ExpMaterialTable.Column field = entry.getKey(); JdbcType jdbcType = entry.getValue(); - var col = new BaseColumnInfo(field.name(), jdbcType); + var col = new BaseColumnInfo(field.fieldKey(), jdbcType); addColumn(col, new AliquotRollupConvertColumn(field, jdbcType, aliquotedFromDataColInd)); } diff --git a/experiment/src/org/labkey/experiment/api/property/StorageProvisionerImpl.java b/experiment/src/org/labkey/experiment/api/property/StorageProvisionerImpl.java index 6ebc330eea3..727423a1a70 100644 --- a/experiment/src/org/labkey/experiment/api/property/StorageProvisionerImpl.java +++ b/experiment/src/org/labkey/experiment/api/property/StorageProvisionerImpl.java @@ -892,7 +892,7 @@ private DbScope validateDomain(Domain domain) return scope; } - private void dropTableIndices(Domain domain, Set indexNames) + public void dropTableIndices(Domain domain, Set indexNames) { DbScope scope = validateDomain(domain); diff --git a/pipeline/package-lock.json b/pipeline/package-lock.json index 6a6710ddab5..4b1397a8fbc 100644 --- a/pipeline/package-lock.json +++ b/pipeline/package-lock.json @@ -8,7 +8,7 @@ "name": "pipeline", "version": "0.0.0", "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0", @@ -2759,9 +2759,9 @@ } }, "node_modules/@labkey/components": { - "version": "6.72.0", - "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-6.72.0.tgz", - "integrity": "sha512-ouYWpvQBF0GZ/j/ErGRcAOHTAwkGP/fSA4hDKaql59U1kMGI7gZdoHZNb5aX0YWX+FLor8FDqLXz9WWmkykEWw==", + "version": "7.3.1", + "resolved": "https://labkey.jfrog.io/artifactory/api/npm/libs-client/@labkey/components/-/@labkey/components-7.3.1.tgz", + "integrity": "sha512-rGH7uNiU3yWMhIUbcQZtctom8dbfrFFzhrHhcOcqzSNEQ6mH1/XXOPGmNdSsgAwKidqdgDfkTd5y5QC+I7PBmA==", "license": "SEE LICENSE IN LICENSE.txt", "dependencies": { "@hello-pangea/dnd": "18.0.1", diff --git a/pipeline/package.json b/pipeline/package.json index fa8766f4f9f..a62353c2f69 100644 --- a/pipeline/package.json +++ b/pipeline/package.json @@ -14,7 +14,7 @@ "build-prod": "npm run clean && cross-env NODE_ENV=production PROD_SOURCE_MAP=source-map webpack --config node_modules/@labkey/build/webpack/prod.config.js --color --progress --profile" }, "dependencies": { - "@labkey/components": "6.72.0" + "@labkey/components": "7.3.1" }, "devDependencies": { "@labkey/build": "8.7.0",