Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
public class SecretSessionBaseService {

static final String EMPTY_STRING_VALUE = "";
static final BigInteger BULK_DATASET_VOLUME = BigInteger.TWO.pow(53).subtract(BigInteger.ONE);
static final BigInteger BULK_DATASET_VOLUME = BigInteger.TWO.pow(53).subtract(BigInteger.TWO);
static final String IEXEC_DATASET_PREFIX = "IEXEC_DATASET_";
static final String IEXEC_DATASET_URL_SUFFIX = "_URL";
static final String IEXEC_DATASET_CHECKSUM_SUFFIX = "_CHECKSUM";
Expand Down Expand Up @@ -213,9 +213,14 @@ private Map<String, String> getBulkDatasetTokens(final int index,

boolean isBulkDatasetOrderCompatibleWithDeal(final DatasetOrder datasetOrder, final TaskDescription taskDescription) {
try {
log.debug("Check dataset order against deal [chainTaskId:{}, deal:{}, dataset:{}",
log.debug("Check dataset order against deal [chainTaskId:{}, deal:{}, dataset:{}]",
taskDescription.getChainTaskId(), taskDescription.getChainDealId(), datasetOrder.getDataset());
iexecHubService.assertDatasetDealCompatibility(datasetOrder, taskDescription.getChainDealId());
// if BULK_DATASET_VOLUME is greater than dataset order volume, it is an error
if (BULK_DATASET_VOLUME.compareTo(datasetOrder.getVolume()) > 0) {
throw new IllegalStateException(
String.format("Dataset order volume %s is invalid for bulk processing", datasetOrder.getVolume()));
}
return true;
} catch (Exception e) {
log.error("Failed to perform all checks on dataset [chainTaskId:{}, dataset:{}, error:{}]",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,11 +245,11 @@ void shouldNotGetSecretsTokensSinceDealParamsAreMissing() {
// endregion

// region getPreComputeTokens
private DatasetOrder createDatasetOrderForBulk(final EIP712Domain pocoDomain,
final Credentials credentials,
final BigInteger datasetPrice,
private DatasetOrder createDatasetOrderForBulk(final BigInteger datasetPrice,
final BigInteger volume,
final OrderTag orderTag) {
final OrderTag orderTag) throws Exception {
final EIP712Domain pocoDomain = new EIP712Domain(65535, "");
final Credentials credentials = Credentials.create(Keys.createEcKeyPair());
final SignerService signerService = new SignerService(null, 65535, credentials);
final String datasetAddress = createEthereumAddress();
final DatasetOrder datasetOrder = DatasetOrder.builder()
Expand All @@ -265,18 +265,31 @@ private DatasetOrder createDatasetOrderForBulk(final EIP712Domain pocoDomain,
return (DatasetOrder) signerService.signOrderForDomain(datasetOrder, pocoDomain);
}

private Map<String, String> expectedTokens(final String datasetAddress, final boolean empty) {
return Map.ofEntries(
Map.entry("IEXEC_BULK_SLICE_SIZE", "1"),
Map.entry("IEXEC_DATASET_1_URL", empty ? "" : DATASET_URL),
Map.entry("IEXEC_DATASET_1_CHECKSUM", empty ? "" : DATASET_CHECKSUM),
Map.entry("IEXEC_DATASET_1_KEY", empty ? "" : DATASET_KEY),
Map.entry("IEXEC_DATASET_1_FILENAME", datasetAddress)
);
}

@Test
void shouldGetPreComputeBulkProcessingTokensForInvalidOrder() throws Exception {
void validateBulkDatasetVolume() {
assertThat(BULK_DATASET_VOLUME.longValue()).isEqualTo(9007199254740990L);
}

@Test
void shouldGetPreComputeBulkProcessingTokensForPoCoRejectedOrder() throws Exception {
final DealParams dealParams = DealParams.builder().bulkCid("bulkCid").build();
final TaskDescription taskDescription = createTaskDescription(dealParams, enclaveConfig).build();
final TeeSessionRequest request = createSessionRequest(taskDescription);
final TeeChallenge challenge = TeeChallenge.builder()
.credentials(EthereumCredentials.generate())
.build();
final EIP712Domain pocoDomain = new EIP712Domain(65535, "");
final Credentials credentials = Credentials.create(Keys.createEcKeyPair());
final DatasetOrder signedDatasetOrder = createDatasetOrderForBulk(
pocoDomain, credentials, BigInteger.ONE, BigInteger.ONE, OrderTag.STANDARD);
BigInteger.ONE, BigInteger.ONE, OrderTag.STANDARD);
final String datasetAddress = signedDatasetOrder.getDataset();

when(ipfsClient.readBulkCid("bulkCid")).thenReturn(List.of("ordersCid"));
Expand All @@ -291,13 +304,36 @@ void shouldGetPreComputeBulkProcessingTokensForInvalidOrder() throws Exception {

assertThat(enclaveBase.getName()).isEqualTo("pre-compute");
assertThat(enclaveBase.getMrenclave()).isEqualTo(PRE_COMPUTE_FINGERPRINT);
assertThat(enclaveBase.getEnvironment()).containsAllEntriesOf(Map.ofEntries(
Map.entry("IEXEC_BULK_SLICE_SIZE", "1"),
Map.entry("IEXEC_DATASET_1_URL", ""),
Map.entry("IEXEC_DATASET_1_CHECKSUM", ""),
Map.entry("IEXEC_DATASET_1_KEY", ""),
Map.entry("IEXEC_DATASET_1_FILENAME", datasetAddress)
));
assertThat(enclaveBase.getEnvironment()).containsAllEntriesOf(
expectedTokens(datasetAddress, true));
}

@Test
void shouldGetPreComputeBulkProcessingTokensForSmsRejectedOrder() throws Exception {
final DealParams dealParams = DealParams.builder().bulkCid("bulkCid").build();
final TaskDescription taskDescription = createTaskDescription(dealParams, enclaveConfig).build();
final TeeSessionRequest request = createSessionRequest(taskDescription);
final TeeChallenge challenge = TeeChallenge.builder()
.credentials(EthereumCredentials.generate())
.build();
final DatasetOrder signedDatasetOrder = createDatasetOrderForBulk(
BigInteger.ONE, BigInteger.ONE, OrderTag.STANDARD);
final String datasetAddress = signedDatasetOrder.getDataset();

when(ipfsClient.readBulkCid("bulkCid")).thenReturn(List.of("ordersCid"));
when(ipfsClient.readOrders("ordersCid")).thenReturn(List.of(signedDatasetOrder));
when(iexecHubService.getChainDataset(datasetAddress)).thenReturn(Optional.of(ChainDataset.builder().chainDatasetId(datasetAddress).build()));
doNothing().when(iexecHubService).assertDatasetDealCompatibility(signedDatasetOrder, DEAL_ID);

final SecretEnclaveBase enclaveBase = teeSecretsService.getPreComputeTokens(
request,
getSignTokens(challenge.getCredentials().getPrivateKey())
);

assertThat(enclaveBase.getName()).isEqualTo("pre-compute");
assertThat(enclaveBase.getMrenclave()).isEqualTo(PRE_COMPUTE_FINGERPRINT);
assertThat(enclaveBase.getEnvironment()).containsAllEntriesOf(
expectedTokens(datasetAddress, true));
}

@Test
Expand All @@ -308,10 +344,8 @@ void shouldGetPreComputeBulkProcessingTokensForValidOrder() throws Exception {
final TeeChallenge challenge = TeeChallenge.builder()
.credentials(EthereumCredentials.generate())
.build();
final EIP712Domain pocoDomain = new EIP712Domain(65535, "");
final Credentials credentials = Credentials.create(Keys.createEcKeyPair());
final DatasetOrder signedDatasetOrder = createDatasetOrderForBulk(
pocoDomain, credentials, BigInteger.ZERO, BULK_DATASET_VOLUME, OrderTag.TEE_SCONE);
BigInteger.ZERO, BULK_DATASET_VOLUME, OrderTag.TEE_SCONE);
final String datasetAddress = signedDatasetOrder.getDataset();
final ChainDataset chainDataset = ChainDataset.builder()
.chainDatasetId(datasetAddress)
Expand All @@ -332,13 +366,8 @@ void shouldGetPreComputeBulkProcessingTokensForValidOrder() throws Exception {

assertThat(enclaveBase.getName()).isEqualTo("pre-compute");
assertThat(enclaveBase.getMrenclave()).isEqualTo(PRE_COMPUTE_FINGERPRINT);
assertThat(enclaveBase.getEnvironment()).containsAllEntriesOf(Map.ofEntries(
Map.entry("IEXEC_BULK_SLICE_SIZE", "1"),
Map.entry("IEXEC_DATASET_1_URL", DATASET_URL),
Map.entry("IEXEC_DATASET_1_CHECKSUM", DATASET_CHECKSUM),
Map.entry("IEXEC_DATASET_1_KEY", DATASET_KEY),
Map.entry("IEXEC_DATASET_1_FILENAME", datasetAddress)
));
assertThat(enclaveBase.getEnvironment()).containsAllEntriesOf(
expectedTokens(datasetAddress, false));
}

@Test
Expand Down