diff --git a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts index 9bd91976c1d4..5954ef8983d3 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts @@ -58,7 +58,7 @@ describe('e2e_multiple_blobs', () => { await aztecNodeAdmin.setConfig({ minTxsPerBlock: TX_COUNT }); const provenTxs = [ - // 1 contract deployment tx. + // 1 contract deployment tx (publishes public_dispatch bytecode: ~1,931 fields). await publishContractClass(wallet, AvmTestContract.artifact), // 2 private function broadcast txs. We pick [2] because it has large bytecode (~1,807 fields), // which combined with the contract class publication exceeds FIELDS_PER_BLOB (4,096). diff --git a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts index 7520a21f6a88..f3e0b0fa43f2 100644 --- a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts +++ b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts @@ -123,6 +123,7 @@ export class ContractFunctionSimulator { * @param senderForTags - The address that is used as a tagging sender when emitting private logs. Returned from * the `privateGetSenderForTags` oracle. * @param scopes - The accounts whose notes we can access in this call. Currently optional and will default to all. + * @param jobId - The job ID for staged writes. * @returns The result of the execution. */ public async run( @@ -131,13 +132,14 @@ export class ContractFunctionSimulator { selector: FunctionSelector, msgSender = AztecAddress.fromField(Fr.MAX_FIELD_VALUE), anchorBlockHeader: BlockHeader, - senderForTags?: AztecAddress, - scopes?: AztecAddress[], + senderForTags: AztecAddress | undefined, + scopes: AztecAddress[] | undefined, + jobId: string, ): Promise { const simulatorSetupTimer = new Timer(); await this.contractStore.syncPrivateState(contractAddress, selector, privateSyncCall => - this.runUtility(privateSyncCall, [], anchorBlockHeader, scopes), + this.runUtility(privateSyncCall, [], anchorBlockHeader, scopes, jobId), ); await verifyCurrentClassId(contractAddress, this.aztecNode, this.contractStore, anchorBlockHeader); @@ -174,7 +176,7 @@ export class ContractFunctionSimulator { callContext, anchorBlockHeader, async call => { - await this.runUtility(call, [], anchorBlockHeader, scopes); + await this.runUtility(call, [], anchorBlockHeader, scopes, jobId); }, request.authWitnesses, request.capsules, @@ -192,6 +194,7 @@ export class ContractFunctionSimulator { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + jobId, 0, // totalPublicArgsCount startSideEffectCounter, undefined, // log @@ -255,13 +258,15 @@ export class ContractFunctionSimulator { * @param anchorBlockHeader - The block header to use as base state for this run. * @param scopes - Optional array of account addresses whose notes can be accessed in this call. Defaults to all * accounts if not specified. + * @param jobId - The job ID for staged writes. * @returns A return value of the utility function in a form as returned by the simulator (Noir fields) */ public async runUtility( call: FunctionCall, authwits: AuthWitness[], anchorBlockHeader: BlockHeader, - scopes?: AztecAddress[], + scopes: AztecAddress[] | undefined, + jobId: string, ): Promise { await verifyCurrentClassId(call.to, this.aztecNode, this.contractStore, anchorBlockHeader); @@ -286,6 +291,7 @@ export class ContractFunctionSimulator { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + jobId, undefined, scopes, ); diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/oracle_version_is_checked.test.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/oracle_version_is_checked.test.ts index e7a8404efb9d..22e28b8be010 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/oracle_version_is_checked.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/oracle_version_is_checked.test.ts @@ -140,7 +140,16 @@ describe('Oracle Version Check test suite', () => { // Call the private function with arbitrary message sender and sender for tags const msgSender = await AztecAddress.random(); const senderForTags = await AztecAddress.random(); - await acirSimulator.run(txRequest, contractAddress, selector, msgSender, anchorBlockHeader, senderForTags); + await acirSimulator.run( + txRequest, + contractAddress, + selector, + msgSender, + anchorBlockHeader, + senderForTags, + undefined, + 'test', + ); expect(utilityAssertCompatibleOracleVersionSpy).toHaveBeenCalledTimes(1); }, 30_000); @@ -169,7 +178,7 @@ describe('Oracle Version Check test suite', () => { }; // Call the utility function - await acirSimulator.runUtility(execRequest, [], anchorBlockHeader, []); + await acirSimulator.runUtility(execRequest, [], anchorBlockHeader, [], 'test'); expect(utilityAssertCompatibleOracleVersionSpy).toHaveBeenCalledTimes(1); }, 30_000); diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts index 846afe6c7b5c..01aae6d59af5 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts @@ -211,7 +211,16 @@ describe('Private Execution test suite', () => { salt: Fr.random(), }); - return acirSimulator.run(txRequest, contractAddress, selector, msgSender, anchorBlockHeader, senderForTags); + return acirSimulator.run( + txRequest, + contractAddress, + selector, + msgSender, + anchorBlockHeader, + senderForTags, + undefined, + 'test', + ); }; const insertLeaves = async (leaves: Fr[], name = 'noteHash') => { diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts index 9b9438049700..9013b95fc3b4 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts @@ -104,6 +104,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP senderAddressBookStore: SenderAddressBookStore, capsuleStore: CapsuleStore, privateEventStore: PrivateEventStore, + jobId: string, private totalPublicCalldataCount: number = 0, protected sideEffectCounter: number = 0, log = createLogger('simulator:client_execution_context'), @@ -126,6 +127,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP senderAddressBookStore, capsuleStore, privateEventStore, + jobId, log, scopes, ); @@ -281,9 +283,9 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP // This is a tagging secret we've not yet used in this tx, so first sync our store to make sure its indices // are up to date. We do this here because this store is not synced as part of the global sync because // that'd be wasteful as most tagging secrets are not used in each tx. - await syncSenderTaggingIndexes(secret, this.contractAddress, this.aztecNode, this.senderTaggingStore); + await syncSenderTaggingIndexes(secret, this.contractAddress, this.aztecNode, this.senderTaggingStore, this.jobId); - const lastUsedIndex = await this.senderTaggingStore.getLastUsedIndex(secret); + const lastUsedIndex = await this.senderTaggingStore.getLastUsedIndex(secret, this.jobId); // If lastUsedIndex is undefined, we've never used this secret, so start from 0 // Otherwise, the next index to use is one past the last used index return lastUsedIndex === undefined ? 0 : lastUsedIndex + 1; @@ -372,7 +374,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP const pendingNullifiers = this.noteCache.getNullifiers(this.callContext.contractAddress); - const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore); + const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore, this.jobId); const dbNotes = await noteService.getNotes( this.callContext.contractAddress, owner, @@ -586,6 +588,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + this.jobId, this.totalPublicCalldataCount, sideEffectCounter, this.log, diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution.test.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution.test.ts index 3a3714156158..98be9ac239da 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution.test.ts @@ -190,7 +190,7 @@ describe('Utility Execution test suite', () => { returnTypes: artifact.returnTypes, }; - const result = await acirSimulator.runUtility(execRequest, [], anchorBlockHeader, []); + const result = await acirSimulator.runUtility(execRequest, [], anchorBlockHeader, [], 'test'); expect(result).toEqual([new Fr(9)]); }, 30_000); @@ -222,6 +222,7 @@ describe('Utility Execution test suite', () => { senderAddressBookStore, capsuleStore, privateEventStore, + 'test', ); }); diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts index 35da37b003bd..4f906bd0dea3 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts @@ -64,6 +64,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra protected readonly senderAddressBookStore: SenderAddressBookStore, protected readonly capsuleStore: CapsuleStore, protected readonly privateEventStore: PrivateEventStore, + protected readonly jobId: string, protected log = createLogger('simulator:client_view_context'), protected readonly scopes?: AztecAddress[], ) {} @@ -256,7 +257,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra offset: number, status: NoteStatus, ): Promise { - const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore); + const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore, this.jobId); const dbNotes = await noteService.getNotes(this.contractAddress, owner, storageSlot, status, this.scopes); return pickNotes(dbNotes, { @@ -353,11 +354,12 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra this.recipientTaggingStore, this.senderAddressBookStore, this.addressStore, + this.jobId, ); await logService.syncTaggedLogs(this.contractAddress, pendingTaggedLogArrayBaseSlot, this.scopes); - const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore); + const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore, this.jobId); await noteService.syncNoteNullifiers(this.contractAddress); } @@ -384,14 +386,14 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra // We read all note and event validation requests and process them all concurrently. This makes the process much // faster as we don't need to wait for the network round-trip. const noteValidationRequests = ( - await this.capsuleStore.readCapsuleArray(contractAddress, noteValidationRequestsArrayBaseSlot) + await this.capsuleStore.readCapsuleArray(contractAddress, noteValidationRequestsArrayBaseSlot, this.jobId) ).map(NoteValidationRequest.fromFields); const eventValidationRequests = ( - await this.capsuleStore.readCapsuleArray(contractAddress, eventValidationRequestsArrayBaseSlot) + await this.capsuleStore.readCapsuleArray(contractAddress, eventValidationRequestsArrayBaseSlot, this.jobId) ).map(EventValidationRequest.fromFields); - const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore); + const noteService = new NoteService(this.noteStore, this.aztecNode, this.anchorBlockStore, this.jobId); const noteDeliveries = noteValidationRequests.map(request => noteService.deliverNote( request.contractAddress, @@ -407,7 +409,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra ), ); - const eventService = new EventService(this.anchorBlockStore, this.aztecNode, this.privateEventStore); + const eventService = new EventService(this.anchorBlockStore, this.aztecNode, this.privateEventStore, this.jobId); const eventDeliveries = eventValidationRequests.map(request => eventService.deliverEvent( request.contractAddress, @@ -422,8 +424,8 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra await Promise.all([...noteDeliveries, ...eventDeliveries]); // Requests are cleared once we're done. - await this.capsuleStore.setCapsuleArray(contractAddress, noteValidationRequestsArrayBaseSlot, []); - await this.capsuleStore.setCapsuleArray(contractAddress, eventValidationRequestsArrayBaseSlot, []); + await this.capsuleStore.setCapsuleArray(contractAddress, noteValidationRequestsArrayBaseSlot, [], this.jobId); + await this.capsuleStore.setCapsuleArray(contractAddress, eventValidationRequestsArrayBaseSlot, [], this.jobId); } public async utilityBulkRetrieveLogs( @@ -439,7 +441,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra // We read all log retrieval requests and process them all concurrently. This makes the process much faster as we // don't need to wait for the network round-trip. const logRetrievalRequests = ( - await this.capsuleStore.readCapsuleArray(contractAddress, logRetrievalRequestsArrayBaseSlot) + await this.capsuleStore.readCapsuleArray(contractAddress, logRetrievalRequestsArrayBaseSlot, this.jobId) ).map(LogRetrievalRequest.fromFields); const logService = new LogService( @@ -450,18 +452,20 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra this.recipientTaggingStore, this.senderAddressBookStore, this.addressStore, + this.jobId, ); const maybeLogRetrievalResponses = await logService.bulkRetrieveLogs(logRetrievalRequests); // Requests are cleared once we're done. - await this.capsuleStore.setCapsuleArray(contractAddress, logRetrievalRequestsArrayBaseSlot, []); + await this.capsuleStore.setCapsuleArray(contractAddress, logRetrievalRequestsArrayBaseSlot, [], this.jobId); // The responses are stored as Option in a second CapsuleArray. await this.capsuleStore.setCapsuleArray( contractAddress, logRetrievalResponsesArrayBaseSlot, maybeLogRetrievalResponses.map(LogRetrievalResponse.toSerializedOption), + this.jobId, ); } @@ -470,7 +474,8 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.capsuleStore.storeCapsule(this.contractAddress, slot, capsule); + this.capsuleStore.storeCapsule(this.contractAddress, slot, capsule, this.jobId); + return Promise.resolve(); } public async utilityLoadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { @@ -481,7 +486,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra return ( // TODO(#12425): On the following line, the pertinent capsule gets overshadowed by the transient one. Tackle this. this.capsules.find(c => c.contractAddress.equals(contractAddress) && c.storageSlot.equals(slot))?.data ?? - (await this.capsuleStore.loadCapsule(this.contractAddress, slot)) + (await this.capsuleStore.loadCapsule(this.contractAddress, slot, this.jobId)) ); } @@ -490,7 +495,8 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.capsuleStore.deleteCapsule(this.contractAddress, slot); + this.capsuleStore.deleteCapsule(this.contractAddress, slot, this.jobId); + return Promise.resolve(); } public utilityCopyCapsule( @@ -503,7 +509,7 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.capsuleStore.copyCapsule(this.contractAddress, srcSlot, dstSlot, numEntries); + return this.capsuleStore.copyCapsule(this.contractAddress, srcSlot, dstSlot, numEntries, this.jobId); } // TODO(#11849): consider replacing this oracle with a pure Noir implementation of aes decryption. diff --git a/yarn-project/pxe/src/events/event_service.test.ts b/yarn-project/pxe/src/events/event_service.test.ts index e3d6608984cc..b7dd88ec6d0a 100644 --- a/yarn-project/pxe/src/events/event_service.test.ts +++ b/yarn-project/pxe/src/events/event_service.test.ts @@ -30,6 +30,7 @@ describe('deliverEvent', () => { let aztecNode: ReturnType>; let eventService: EventService; + const TEST_JOB_ID = 'test-job-id'; const setSyncedBlockNumber = (blockNumber: BlockNumber) => { return anchorBlockStore.setHeader( @@ -89,7 +90,7 @@ describe('deliverEvent', () => { ]), ); - eventService = new EventService(anchorBlockStore, aztecNode, privateEventStore); + eventService = new EventService(anchorBlockStore, aztecNode, privateEventStore, TEST_JOB_ID); }); function runDeliverEvent( @@ -138,12 +139,16 @@ describe('deliverEvent', () => { await runDeliverEvent(); // I should be able to retrieve the private event I just saved using getPrivateEvents - const result = await privateEventStore.getPrivateEvents(eventSelector, { - contractAddress, - fromBlock: blockNumber, - toBlock: blockNumber + 1, - scopes: [recipient], - }); + const result = await privateEventStore.getPrivateEvents( + eventSelector, + { + contractAddress, + fromBlock: blockNumber, + toBlock: blockNumber + 1, + scopes: [recipient], + }, + TEST_JOB_ID, + ); expect(result.length).toEqual(1); expect(result[0].packedEvent).toEqual(eventContent); diff --git a/yarn-project/pxe/src/events/event_service.ts b/yarn-project/pxe/src/events/event_service.ts index 74b37ba27503..6391d2544c17 100644 --- a/yarn-project/pxe/src/events/event_service.ts +++ b/yarn-project/pxe/src/events/event_service.ts @@ -14,6 +14,7 @@ export class EventService { private readonly anchorBlockStore: AnchorBlockStore, private readonly aztecNode: AztecNode, private readonly privateEventStore: PrivateEventStore, + private readonly jobId: string, ) {} public async deliverEvent( @@ -72,6 +73,7 @@ export class EventService { l2BlockNumber: nullifierIndex.l2BlockNumber, // Block number in which the event was emitted l2BlockHash: nullifierIndex.l2BlockHash, // Block hash in which the event was emitted }, + this.jobId, ); } } diff --git a/yarn-project/pxe/src/job_coordinator/index.ts b/yarn-project/pxe/src/job_coordinator/index.ts new file mode 100644 index 000000000000..8e2d88220221 --- /dev/null +++ b/yarn-project/pxe/src/job_coordinator/index.ts @@ -0,0 +1 @@ +export { JobCoordinator, type StagedStore } from './job_coordinator.js'; diff --git a/yarn-project/pxe/src/job_coordinator/job_coordinator.test.ts b/yarn-project/pxe/src/job_coordinator/job_coordinator.test.ts new file mode 100644 index 000000000000..aa52728ee98a --- /dev/null +++ b/yarn-project/pxe/src/job_coordinator/job_coordinator.test.ts @@ -0,0 +1,113 @@ +import type { AztecAsyncKVStore } from '@aztec/kv-store'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; + +import { jest } from '@jest/globals'; + +import { JobCoordinator, type StagedStore } from './job_coordinator.js'; + +describe('JobCoordinator', () => { + let store: AztecAsyncKVStore; + let coordinator: JobCoordinator; + + beforeEach(async () => { + store = await openTmpStore('job_coordinator_test'); + coordinator = new JobCoordinator(store); + }); + + describe('beginJob', () => { + it('creates a new job id', () => { + const jobId = coordinator.beginJob(); + + expect(typeof jobId).toBe('string'); + expect(jobId.length).toBeGreaterThan(0); + }); + + // Note: we could eventually be relax this if we want more concurrency, + // but it's good to start with this guardrail + it('throws if job already in progress', () => { + coordinator.beginJob(); + expect(() => coordinator.beginJob()).toThrow(/already in progress/); + }); + + it('tracks job in progress', () => { + coordinator.beginJob(); + expect(coordinator.hasJobInProgress()).toBe(true); + }); + }); + + describe('commitJob', () => { + it('clears job marker on commit', async () => { + const jobId = coordinator.beginJob(); + await coordinator.commitJob(jobId); + expect(coordinator.hasJobInProgress()).toBe(false); + }); + + it('throws if no matching job in progress', async () => { + const jobId = coordinator.beginJob(); + await coordinator.commitJob(jobId); + await expect(coordinator.commitJob(jobId)).rejects.toThrow(/no matching job/); + }); + + it('calls commit on registered stores', async () => { + const commitMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const discardStagedMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const mockStore: StagedStore = { + storeName: 'mock_store', + commit: commitMock, + discardStaged: discardStagedMock, + }; + + coordinator.registerStore(mockStore); + + const jobId = coordinator.beginJob(); + + await coordinator.commitJob(jobId); + + expect(commitMock).toHaveBeenCalledWith(jobId); + }); + }); + + describe('abortJob', () => { + it('clears job marker on abort', async () => { + const jobId = coordinator.beginJob(); + + await coordinator.abortJob(jobId); + + expect(coordinator.hasJobInProgress()).toBe(false); + }); + + it('calls discardStaged on all registered stores', async () => { + const commitMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const discardStagedMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const mockStore: StagedStore = { + storeName: 'mock_store', + commit: commitMock, + discardStaged: discardStagedMock, + }; + + coordinator.registerStore(mockStore); + + const jobId = coordinator.beginJob(); + + await coordinator.abortJob(jobId); + + expect(discardStagedMock).toHaveBeenCalledWith(jobId); + }); + }); + + describe('registerStore', () => { + it('throws on duplicate registration', () => { + const commitMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const discardStagedMock = jest.fn<() => Promise>().mockResolvedValue(undefined); + const mockStore: StagedStore = { + storeName: 'mock_store', + commit: commitMock, + discardStaged: discardStagedMock, + }; + + coordinator.registerStore(mockStore); + + expect(() => coordinator.registerStore(mockStore)).toThrow(/already registered/); + }); + }); +}); diff --git a/yarn-project/pxe/src/job_coordinator/job_coordinator.ts b/yarn-project/pxe/src/job_coordinator/job_coordinator.ts new file mode 100644 index 000000000000..c12739e3c310 --- /dev/null +++ b/yarn-project/pxe/src/job_coordinator/job_coordinator.ts @@ -0,0 +1,149 @@ +import { randomBytes } from '@aztec/foundation/crypto/random'; +import { createLogger } from '@aztec/foundation/log'; +import type { AztecAsyncKVStore } from '@aztec/kv-store'; + +/** + * Interface that stores must implement to support staged writes. + */ +export interface StagedStore { + /** Unique name identifying this store (used for tracking staged stores from JobCoordinator) */ + readonly storeName: string; + + /** + * Commits staged data to main storage. + * Should be called within a transaction for atomicity. + * + * @param jobId - The job identifier + */ + commit(jobId: string): Promise; + + /** + * Discards staged data without committing. + * Called on abort. + * + * @param jobId - The job identifier + */ + discardStaged(jobId: string): Promise; +} + +/** + * JobCoordinator manages job lifecycle and provides crash resilience for PXE operations. + * + * It uses a staged writes pattern: + * 1. When a job begins, a unique job ID is created + * 2. During the job, all writes go to staging (keyed by job ID) + * 3. On commit, staging is promoted to main storage + * 4. On abort, staged data is discarded + * + * Note: PXE should only rely on a single JobCoordinator instance, so it can eventually + * orchestrate concurrent jobs. Right now it doesn't make a difference because we're + * using a job queue with concurrency=1. + */ +export class JobCoordinator { + private readonly log = createLogger('pxe:job_coordinator'); + + /** The underlying KV store */ + kvStore: AztecAsyncKVStore; + + #currentJobId: string | undefined; + #stores: Map = new Map(); + + constructor(kvStore: AztecAsyncKVStore) { + this.kvStore = kvStore; + } + + /** + * Registers a staged store. + * Must be called during initialization for all stores that need staging support. + */ + registerStore(store: StagedStore): void { + if (this.#stores.has(store.storeName)) { + throw new Error(`Store "${store.storeName}" is already registered`); + } + this.#stores.set(store.storeName, store); + this.log.debug(`Registered staged store: ${store.storeName}`); + } + + /** + * Registers multiple staged stores. + */ + registerStores(stores: StagedStore[]): void { + for (const store of stores) { + this.registerStore(store); + } + } + + /** + * Begins a new job and returns a job ID for staged writes. + * + * @returns Job ID to pass to store operations + */ + beginJob(): string { + if (this.#currentJobId) { + throw new Error( + `Cannot begin job: job ${this.#currentJobId} is already in progress. ` + + `This should not happen - ensure jobs are properly committed or aborted.`, + ); + } + + const jobId = randomBytes(8).toString('hex'); + this.#currentJobId = jobId; + + this.log.debug(`Started job ${jobId}`); + return jobId; + } + + /** + * Commits a job by promoting all staged data to main storage. + * + * @param jobId - The job ID returned from beginJob + */ + async commitJob(jobId: string): Promise { + if (!this.#currentJobId || this.#currentJobId !== jobId) { + throw new Error( + `Cannot commit job ${jobId}: no matching job in progress. ` + `Current job: ${this.#currentJobId ?? 'none'}`, + ); + } + + this.log.debug(`Committing job ${jobId}`); + + // Commit all stores atomically in a single transaction. + // Each store's commit is a no-op if it has no staged data (but that's up to each store to handle). + await this.kvStore.transactionAsync(async () => { + for (const store of this.#stores.values()) { + await store.commit(jobId); + } + }); + + this.#currentJobId = undefined; + this.log.debug(`Job ${jobId} committed successfully`); + } + + /** + * Aborts a job by discarding all staged data. + * + * @param jobId - The job ID returned from beginJob + */ + async abortJob(jobId: string): Promise { + if (!this.#currentJobId || this.#currentJobId !== jobId) { + // Job may have already been aborted or never started properly + this.log.warn(`Abort called for job ${jobId} but current job is ${this.#currentJobId ?? 'none'}`); + } + + this.log.debug(`Aborting job ${jobId}`); + + for (const store of this.#stores.values()) { + await store.discardStaged(jobId); + } + + this.#currentJobId = undefined; + this.log.debug(`Job ${jobId} aborted`); + } + + /** + * Checks if there's a job currently in progress. + */ + hasJobInProgress(): boolean { + return this.#currentJobId !== undefined; + } +} diff --git a/yarn-project/pxe/src/logs/log_service.test.ts b/yarn-project/pxe/src/logs/log_service.test.ts index 25e2c3494b8e..69fd3f297ae2 100644 --- a/yarn-project/pxe/src/logs/log_service.test.ts +++ b/yarn-project/pxe/src/logs/log_service.test.ts @@ -50,6 +50,7 @@ describe('LogService', () => { recipientTaggingStore, senderAddressBookStore, addressStore, + 'test', ); aztecNode.getPrivateLogsByTags.mockReset(); diff --git a/yarn-project/pxe/src/logs/log_service.ts b/yarn-project/pxe/src/logs/log_service.ts index 4959d4a089e4..c1382093ed0e 100644 --- a/yarn-project/pxe/src/logs/log_service.ts +++ b/yarn-project/pxe/src/logs/log_service.ts @@ -26,6 +26,7 @@ export class LogService { private readonly recipientTaggingStore: RecipientTaggingStore, private readonly senderAddressBookStore: SenderAddressBookStore, private readonly addressStore: AddressStore, + private readonly jobId: string, ) {} public async bulkRetrieveLogs(logRetrievalRequests: LogRetrievalRequest[]): Promise<(LogRetrievalResponse | null)[]> { @@ -122,6 +123,7 @@ export class LogService { this.aztecNode, this.recipientTaggingStore, anchorBlockNumber, + this.jobId, ), ), ); @@ -186,7 +188,7 @@ export class LogService { }); // TODO: This looks like it could belong more at the oracle interface level - return this.capsuleStore.appendToCapsuleArray(contractAddress, capsuleArrayBaseSlot, pendingTaggedLogs); + return this.capsuleStore.appendToCapsuleArray(contractAddress, capsuleArrayBaseSlot, pendingTaggedLogs, this.jobId); } async #getCompleteAddress(account: AztecAddress): Promise { diff --git a/yarn-project/pxe/src/notes/note_service.test.ts b/yarn-project/pxe/src/notes/note_service.test.ts index 0a53958ca841..c676a2d270ad 100644 --- a/yarn-project/pxe/src/notes/note_service.test.ts +++ b/yarn-project/pxe/src/notes/note_service.test.ts @@ -28,12 +28,12 @@ describe('NoteService', () => { let contractAddress: AztecAddress; let noteService: NoteService; + const TEST_JOB_ID = 'test-job-id'; beforeEach(async () => { const store = await openTmpStore('test'); keyStore = new KeyStore(store); noteStore = await NoteStore.create(store); - noteStore = await NoteStore.create(store); aztecNode = mock(); anchorBlockStore = new AnchorBlockStore(store); await anchorBlockStore.setHeader( @@ -54,11 +54,10 @@ describe('NoteService', () => { recipient = await keyStore.addAccount(new Fr(69), Fr.random()); - noteService = new NoteService(noteStore, aztecNode, anchorBlockStore); + noteService = new NoteService(noteStore, aztecNode, anchorBlockStore, TEST_JOB_ID); }); it('should remove notes that have been nullified', async () => { - // Set up initial state with a note const noteDao = await NoteDao.random({ contractAddress }); // Spy on the noteStore.applyNullifiers to later on have additional guarantee that we really removed @@ -66,21 +65,22 @@ describe('NoteService', () => { jest.spyOn(noteStore, 'applyNullifiers'); // Add the note to storage - await noteStore.addNotes([noteDao], recipient.address); + await noteStore.addNotes([noteDao], recipient.address, TEST_JOB_ID); - // Set up the nullifier in the merkle tree const nullifierIndex = randomDataInBlock(123n); aztecNode.findLeavesIndexes.mockResolvedValue([nullifierIndex]); - // Call the function under test await noteService.syncNoteNullifiers(contractAddress); // Verify the note was removed by checking storage - const remainingNotes = await noteStore.getNotes({ - contractAddress, - status: NoteStatus.ACTIVE, - scopes: [recipient.address], - }); + const remainingNotes = await noteStore.getNotes( + { + contractAddress, + status: NoteStatus.ACTIVE, + scopes: [recipient.address], + }, + TEST_JOB_ID, + ); expect(remainingNotes).toHaveLength(0); // Verify the note was removed by checking the spy @@ -92,7 +92,7 @@ describe('NoteService', () => { const noteDao = await NoteDao.random({ contractAddress }); // Add the note to storage - await noteStore.addNotes([noteDao], recipient.address); + await noteStore.addNotes([noteDao], recipient.address, TEST_JOB_ID); // No nullifier found in merkle tree aztecNode.findLeavesIndexes.mockResolvedValue([undefined]); @@ -101,11 +101,14 @@ describe('NoteService', () => { await noteService.syncNoteNullifiers(contractAddress); // Verify note still exists - const remainingNotes = await noteStore.getNotes({ - contractAddress, - status: NoteStatus.ACTIVE, - scopes: [recipient.address], - }); + const remainingNotes = await noteStore.getNotes( + { + contractAddress, + status: NoteStatus.ACTIVE, + scopes: [recipient.address], + }, + TEST_JOB_ID, + ); expect(remainingNotes).toHaveLength(1); expect(remainingNotes[0]).toEqual(noteDao); }); @@ -118,7 +121,7 @@ describe('NoteService', () => { const noteDao = await NoteDao.random({ contractAddress }); // Add the note to storage - await noteStore.addNotes([noteDao], recipient.address); + await noteStore.addNotes([noteDao], recipient.address, TEST_JOB_ID); // Mock nullifier to only exist after synced block aztecNode.findLeavesIndexes.mockImplementation(blockNum => { @@ -132,11 +135,14 @@ describe('NoteService', () => { await noteService.syncNoteNullifiers(contractAddress); // Verify note still exists - const remainingNotes = await noteStore.getNotes({ - contractAddress, - status: NoteStatus.ACTIVE, - scopes: [recipient.address], - }); + const remainingNotes = await noteStore.getNotes( + { + contractAddress, + status: NoteStatus.ACTIVE, + scopes: [recipient.address], + }, + TEST_JOB_ID, + ); expect(remainingNotes).toHaveLength(1); expect(remainingNotes[0]).toEqual(noteDao); }); @@ -157,8 +163,8 @@ describe('NoteService', () => { // Verify applyNullifiers was called once for all accounts expect(getNotesSpy).toHaveBeenCalledTimes(1); - // Verify getNotes was called with the correct contract address - expect(getNotesSpy).toHaveBeenCalledWith(expect.objectContaining({ contractAddress })); + // Verify getNotes was called with the correct contract address (and jobId for production) + expect(getNotesSpy).toHaveBeenCalledWith(expect.objectContaining({ contractAddress }), TEST_JOB_ID); }); describe('deliverNote', () => { @@ -277,7 +283,7 @@ describe('NoteService', () => { ); // Verify note was stored - const notes = await noteStore.getNotes({ contractAddress, scopes: [recipient.address] }); + const notes = await noteStore.getNotes({ contractAddress, scopes: [recipient.address] }, TEST_JOB_ID); expect(notes).toHaveLength(1); expect(notes[0].noteHash.equals(noteHash)).toBe(true); @@ -353,7 +359,7 @@ describe('NoteService', () => { ); // Verify note was removed - const notes = await noteStore.getNotes({ contractAddress, scopes: [recipient.address] }); + const notes = await noteStore.getNotes({ contractAddress, scopes: [recipient.address] }, TEST_JOB_ID); expect(notes).toHaveLength(0); }); }); diff --git a/yarn-project/pxe/src/notes/note_service.ts b/yarn-project/pxe/src/notes/note_service.ts index 4596a6bd0407..8ad37313230b 100644 --- a/yarn-project/pxe/src/notes/note_service.ts +++ b/yarn-project/pxe/src/notes/note_service.ts @@ -15,6 +15,7 @@ export class NoteService { private readonly noteStore: NoteStore, private readonly aztecNode: AztecNode, private readonly anchorBlockStore: AnchorBlockStore, + private readonly jobId: string, ) {} /** @@ -33,13 +34,16 @@ export class NoteService { status: NoteStatus, scopes?: AztecAddress[], ) { - const noteDaos = await this.noteStore.getNotes({ - contractAddress, - owner, - storageSlot, - status, - scopes, - }); + const noteDaos = await this.noteStore.getNotes( + { + contractAddress, + owner, + storageSlot, + status, + scopes, + }, + this.jobId, + ); return noteDaos.map( ({ contractAddress, owner, storageSlot, randomness, noteNonce, note, noteHash, siloedNullifier, index }) => ({ contractAddress, @@ -71,7 +75,7 @@ export class NoteService { public async syncNoteNullifiers(contractAddress: AztecAddress): Promise { const syncedBlockNumber = (await this.anchorBlockStore.getBlockHeader()).getBlockNumber(); - const contractNotes = await this.noteStore.getNotes({ contractAddress }); + const contractNotes = await this.noteStore.getNotes({ contractAddress }, this.jobId); if (contractNotes.length === 0) { return; @@ -105,7 +109,7 @@ export class NoteService { }) .filter(nullifier => nullifier !== undefined) as DataInBlock[]; - await this.noteStore.applyNullifiers(foundNullifiers); + await this.noteStore.applyNullifiers(foundNullifiers, this.jobId); } public async deliverNote( @@ -190,11 +194,11 @@ export class NoteService { ); // The note was found by `recipient`, so we use that as the scope when storing the note. - await this.noteStore.addNotes([noteDao], recipient); + await this.noteStore.addNotes([noteDao], recipient, this.jobId); if (nullifierIndex !== undefined) { const { data: _, ...blockHashAndNum } = nullifierIndex; - await this.noteStore.applyNullifiers([{ data: siloedNullifier, ...blockHashAndNum }]); + await this.noteStore.applyNullifiers([{ data: siloedNullifier, ...blockHashAndNum }], this.jobId); } } } diff --git a/yarn-project/pxe/src/pxe.test.ts b/yarn-project/pxe/src/pxe.test.ts index 086b76338744..60bb6dac88e2 100644 --- a/yarn-project/pxe/src/pxe.test.ts +++ b/yarn-project/pxe/src/pxe.test.ts @@ -210,6 +210,7 @@ describe('PXE', () => { }); async function storeEvent(blockNumber?: number): Promise { + const testJobId = 'test-job'; const event = { packedEvent: [Fr.random(), Fr.random()], l2BlockNumber: BlockNumber(blockNumber ?? lastKnownBlockNumber), @@ -218,13 +219,20 @@ describe('PXE', () => { eventSelector, }; - await privateEventStore.storePrivateEventLog(eventSelector, event.packedEvent, eventIndex++, { - contractAddress, - scope, - txHash: event.txHash, - l2BlockNumber: event.l2BlockNumber, - l2BlockHash: event.l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + event.packedEvent, + eventIndex++, + { + contractAddress, + scope, + txHash: event.txHash, + l2BlockNumber: event.l2BlockNumber, + l2BlockHash: event.l2BlockHash, + }, + testJobId, + ); + await privateEventStore.commit(testJobId); return event; } diff --git a/yarn-project/pxe/src/pxe.ts b/yarn-project/pxe/src/pxe.ts index 858f19370d5e..07c8ecb40d07 100644 --- a/yarn-project/pxe/src/pxe.ts +++ b/yarn-project/pxe/src/pxe.ts @@ -63,6 +63,7 @@ import { ProxiedContractStoreFactory } from './contract_function_simulator/proxi import { PXEDebugUtils } from './debug/pxe_debug_utils.js'; import { enrichPublicSimulationError, enrichSimulationError } from './error_enriching.js'; import { PrivateEventFilterValidator } from './events/private_event_filter_validator.js'; +import { JobCoordinator } from './job_coordinator/index.js'; import { PrivateKernelExecutionProver, type PrivateKernelExecutionProverConfig, @@ -107,6 +108,7 @@ export class PXE { private protocolContractsProvider: ProtocolContractsProvider, private log: Logger, private jobQueue: SerialQueue, + private jobCoordinator: JobCoordinator, public debug: PXEDebugUtils, ) {} @@ -143,6 +145,17 @@ export class PXE { const capsuleStore = new CapsuleStore(store); const keyStore = new KeyStore(store); const tipsStore = new L2TipsKVStore(store, 'pxe'); + + const jobCoordinator = new JobCoordinator(store); + jobCoordinator.registerStores([ + anchorBlockStore, + noteStore, + senderTaggingStore, + recipientTaggingStore, + privateEventStore, + capsuleStore, + ]); + const synchronizer = new BlockSynchronizer(node, anchorBlockStore, noteStore, tipsStore, config, loggerOrSuffix); const debugUtils = new PXEDebugUtils(contractStore, noteStore); @@ -168,6 +181,7 @@ export class PXE { protocolContractsProvider, log, jobQueue, + jobCoordinator, debugUtils, ); @@ -221,8 +235,10 @@ export class PXE { * complete. * * Useful for tasks that cannot run concurrently, such as contract function simulation. + * + * @param fn - The function to execute. Receives a jobId for staged writes. */ - #putInJobQueue(fn: () => Promise): Promise { + #putInJobQueue(fn: (jobId: string) => Promise): Promise { // TODO(#12636): relax the conditions under which we forbid concurrency. if (this.jobQueue.length() != 0) { this.log.warn( @@ -230,7 +246,17 @@ export class PXE { ); } - return this.jobQueue.put(fn); + return this.jobQueue.put(async () => { + const jobId = this.jobCoordinator.beginJob(); + try { + const result = await fn(jobId); + await this.jobCoordinator.commitJob(jobId); + return result; + } catch (err) { + await this.jobCoordinator.abortJob(jobId); + throw err; + } + }); } async #registerProtocolContracts() { @@ -263,7 +289,8 @@ export class PXE { async #executePrivate( contractFunctionSimulator: ContractFunctionSimulator, txRequest: TxExecutionRequest, - scopes?: AztecAddress[], + scopes: AztecAddress[] | undefined, + jobId: string, ): Promise { const { origin: contractAddress, functionSelector } = txRequest; @@ -280,6 +307,7 @@ export class PXE { // contract entrypoint undefined, // senderForTags scopes, + jobId, ); this.log.debug(`Private simulation completed for ${contractAddress.toString()}:${functionSelector}`); return result; @@ -298,17 +326,19 @@ export class PXE { * @param authWitnesses - Authentication witnesses required for the function call. * @param scopes - Optional array of account addresses whose notes can be accessed in this call. Defaults to all * accounts if not specified. + * @param jobId - The job ID for staged writes. * @returns The simulation result containing the outputs of the utility function. */ async #simulateUtility( contractFunctionSimulator: ContractFunctionSimulator, call: FunctionCall, - authWitnesses?: AuthWitness[], - scopes?: AztecAddress[], + authWitnesses: AuthWitness[], + scopes: AztecAddress[] | undefined, + jobId: string, ) { try { const anchorBlockHeader = await this.anchorBlockStore.getBlockHeader(); - return contractFunctionSimulator.runUtility(call, authWitnesses ?? [], anchorBlockHeader, scopes); + return contractFunctionSimulator.runUtility(call, authWitnesses, anchorBlockHeader, scopes, jobId); } catch (err) { if (err instanceof SimulationError) { await enrichSimulationError(err, this.contractStore, this.log); @@ -602,7 +632,7 @@ export class PXE { public updateContract(contractAddress: AztecAddress, artifact: ContractArtifact): Promise { // We disable concurrently updating contracts to avoid concurrently syncing with the node, or changing a contract's // class while we're simulating it. - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async _jobId => { const currentInstance = await this.contractStore.getContractInstance(contractAddress); if (!currentInstance) { throw new Error(`Instance not found when updating a contract. Contract address: ${contractAddress}.`); @@ -657,14 +687,14 @@ export class PXE { let privateExecutionResult: PrivateExecutionResult; // We disable proving concurrently mostly out of caution, since it accesses some of our stores. Proving is so // computationally demanding that it'd be rare for someone to try to do it concurrently regardless. - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async jobId => { const totalTimer = new Timer(); try { const syncTimer = new Timer(); await this.blockStateSynchronizer.sync(); const syncTime = syncTimer.ms(); const contractFunctionSimulator = this.#getSimulatorForTx(); - privateExecutionResult = await this.#executePrivate(contractFunctionSimulator, txRequest); + privateExecutionResult = await this.#executePrivate(contractFunctionSimulator, txRequest, undefined, jobId); const { publicInputs, @@ -711,7 +741,7 @@ export class PXE { // TODO(benesjan): The following is an expensive operation. Figure out a way to avoid it. const txHash = (await txProvingResult.toTx()).txHash; - await this.senderTaggingStore.storePendingIndexes(preTagsUsedInTheTx, txHash); + await this.senderTaggingStore.storePendingIndexes(preTagsUsedInTheTx, txHash, jobId); this.log.debug(`Stored used pre-tags as sender for the tx`, { preTagsUsedInTheTx, }); @@ -741,7 +771,7 @@ export class PXE { skipProofGeneration: boolean = true, ): Promise { // We disable concurrent profiles for consistency with simulateTx. - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async jobId => { const totalTimer = new Timer(); try { const txInfo = { @@ -761,7 +791,12 @@ export class PXE { const syncTime = syncTimer.ms(); const contractFunctionSimulator = this.#getSimulatorForTx(); - const privateExecutionResult = await this.#executePrivate(contractFunctionSimulator, txRequest); + const privateExecutionResult = await this.#executePrivate( + contractFunctionSimulator, + txRequest, + undefined, + jobId, + ); const { executionSteps, timings: { proving } = {} } = await this.#prove( txRequest, @@ -841,7 +876,7 @@ export class PXE { // We disable concurrent simulations since those might execute oracles which read and write to the PXE stores (e.g. // to the capsules), and we need to prevent concurrent runs from interfering with one another (e.g. attempting to // delete the same read value, or reading values that another simulation is currently modifying). - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async jobId => { try { const totalTimer = new Timer(); const txInfo = { @@ -867,7 +902,7 @@ export class PXE { const skipKernels = overrides?.contracts !== undefined && Object.keys(overrides.contracts ?? {}).length > 0; // Execution of private functions only; no proving, and no kernel logic. - const privateExecutionResult = await this.#executePrivate(contractFunctionSimulator, txRequest, scopes); + const privateExecutionResult = await this.#executePrivate(contractFunctionSimulator, txRequest, scopes, jobId); let publicInputs: PrivateKernelTailCircuitPublicInputs | undefined; let executionSteps: PrivateExecutionStep[] = []; @@ -982,7 +1017,7 @@ export class PXE { // We disable concurrent simulations since those might execute oracles which read and write to the PXE stores (e.g. // to the capsules), and we need to prevent concurrent runs from interfering with one another (e.g. attempting to // delete the same read value, or reading values that another simulation is currently modifying). - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async jobId => { try { const totalTimer = new Timer(); const syncTimer = new Timer(); @@ -992,10 +1027,16 @@ export class PXE { const contractFunctionSimulator = this.#getSimulatorForTx(); await this.contractStore.syncPrivateState(call.to, call.selector, privateSyncCall => - this.#simulateUtility(contractFunctionSimulator, privateSyncCall), + this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], undefined, jobId), ); - const executionResult = await this.#simulateUtility(contractFunctionSimulator, call, authwits ?? [], scopes); + const executionResult = await this.#simulateUtility( + contractFunctionSimulator, + call, + authwits ?? [], + scopes, + jobId, + ); const functionTime = functionTimer.ms(); const totalTime = totalTimer.ms(); @@ -1037,14 +1078,15 @@ export class PXE { * @returns - The packed events with block and tx metadata. */ public getPrivateEvents(eventSelector: EventSelector, filter: PrivateEventFilter): Promise { - return this.#putInJobQueue(async () => { + return this.#putInJobQueue(async jobId => { await this.blockStateSynchronizer.sync(); const contractFunctionSimulator = this.#getSimulatorForTx(); await this.contractStore.syncPrivateState( filter.contractAddress, null, - async privateSyncCall => await this.#simulateUtility(contractFunctionSimulator, privateSyncCall), + async privateSyncCall => + await this.#simulateUtility(contractFunctionSimulator, privateSyncCall, [], undefined, jobId), ); const sanitizedFilter = await new PrivateEventFilterValidator(this.anchorBlockStore).validate(filter); diff --git a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.test.ts b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.test.ts index 07d36e3d2bd1..6b47176322d6 100644 --- a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.test.ts +++ b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.test.ts @@ -1,16 +1,18 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { randomInt } from '@aztec/foundation/crypto/random'; +import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { makeBlockHeader } from '@aztec/stdlib/testing'; import { AnchorBlockStore } from './anchor_block_store.js'; describe('block header', () => { + let store: AztecAsyncKVStore; let anchorBlockStore: AnchorBlockStore; beforeEach(async () => { - const store = await openTmpStore('sync_store_test'); + store = await openTmpStore('sync_store_test'); anchorBlockStore = new AnchorBlockStore(store); }); @@ -24,4 +26,49 @@ describe('block header', () => { it('rejects getting header if no block set', async () => { await expect(() => anchorBlockStore.getBlockHeader()).rejects.toThrow(); }); + + describe('staging', () => { + it('writes to staging when jobId provided', async () => { + const committedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(1) }); + const stagedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(2) }); + const jobId: string = 'test123'; + + await anchorBlockStore.setHeader(committedHeader); + await anchorBlockStore.setHeader(stagedHeader, jobId); + + // Without jobId, should get committed header + await expect(anchorBlockStore.getBlockHeader()).resolves.toEqual(committedHeader); + + // With jobId, should get staged header + await expect(anchorBlockStore.getBlockHeader(jobId)).resolves.toEqual(stagedHeader); + }); + + it('commit promotes staged data to main', async () => { + const committedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(1) }); + const stagedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(2) }); + const jobId: string = 'test123'; + + await anchorBlockStore.setHeader(committedHeader); + await anchorBlockStore.setHeader(stagedHeader, jobId); + await anchorBlockStore.commit(jobId); + + await expect(anchorBlockStore.getBlockHeader()).resolves.toEqual(stagedHeader); + }); + + it('discardStaged removes staged data without affecting main', async () => { + const committedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(1) }); + const stagedHeader = makeBlockHeader(randomInt(1000), { blockNumber: BlockNumber(2) }); + const jobId: string = 'test123'; + + await anchorBlockStore.setHeader(committedHeader); + await anchorBlockStore.setHeader(stagedHeader, jobId); + await anchorBlockStore.discardStaged(jobId); + + // Should still get committed header + await expect(anchorBlockStore.getBlockHeader()).resolves.toEqual(committedHeader); + + // With jobId should fall back to committed since staging was discarded + await expect(anchorBlockStore.getBlockHeader(jobId)).resolves.toEqual(committedHeader); + }); + }); }); diff --git a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts index 7112b1fd6785..c9b0e33838e2 100644 --- a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts +++ b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts @@ -1,20 +1,60 @@ import type { AztecAsyncKVStore, AztecAsyncSingleton } from '@aztec/kv-store'; import { BlockHeader } from '@aztec/stdlib/tx'; -export class AnchorBlockStore { +import type { StagedStore } from '../../job_coordinator/index.js'; + +const HEADER_KEY = 'header'; + +/** + * AnchorBlockStore manages the synchronized block header for PXE. + * + * The anchor block is the latest block PXE has synced to. All simulations + * use this block as their reference point. + */ +export class AnchorBlockStore implements StagedStore { + readonly storeName = 'anchor_block'; + #store: AztecAsyncKVStore; #synchronizedHeader: AztecAsyncSingleton; + /** In-memory stage: jobId -> header buffer */ + #stagedHeader: Map; constructor(store: AztecAsyncKVStore) { this.#store = store; - this.#synchronizedHeader = this.#store.openSingleton('header'); + this.#synchronizedHeader = this.#store.openSingleton(HEADER_KEY); + this.#stagedHeader = new Map(); } - async setHeader(header: BlockHeader): Promise { - await this.#synchronizedHeader.set(header.toBuffer()); + /** + * Sets the synchronized block header. + * + * @param header - The block header to set + * @param jobId - Optional job ID for staged writes + */ + async setHeader(header: BlockHeader, jobId?: string): Promise { + if (jobId) { + this.#stagedHeader.set(jobId, header.toBuffer()); + } else { + await this.#synchronizedHeader.set(header.toBuffer()); + } } - async getBlockHeader(): Promise { + /** + * Gets the synchronized block header. + * + * @param jobId - Optional job ID to check staged version first + * @returns The block header + * @throws If no header has been set + */ + async getBlockHeader(jobId?: string): Promise { + if (jobId) { + const stagedBuffer = this.#stagedHeader.get(jobId); + if (stagedBuffer) { + return BlockHeader.fromBuffer(stagedBuffer); + } + } + + // Fall back to committed data const headerBuffer = await this.#synchronizedHeader.getAsync(); if (!headerBuffer) { throw new Error(`Trying to get block header with a not-yet-synchronized PXE - this should never happen`); @@ -22,4 +62,27 @@ export class AnchorBlockStore { return BlockHeader.fromBuffer(headerBuffer); } + + /** + * Commits staged data to main storage. + * Called by JobCoordinator when a job completes successfully. + * Must be called within a transaction by the JobCoordinator. + */ + async commit(jobId: string): Promise { + const stagedBuffer = this.#stagedHeader.get(jobId); + + if (stagedBuffer) { + await this.#synchronizedHeader.set(stagedBuffer); + this.#stagedHeader.delete(jobId); + } + } + + /** + * Discards staged data without committing. + * Called by JobCoordinator on abort or during recovery. + */ + discardStaged(jobId: string): Promise { + this.#stagedHeader.delete(jobId); + return Promise.resolve(); + } } diff --git a/yarn-project/pxe/src/storage/capsule_store/capsule_store.test.ts b/yarn-project/pxe/src/storage/capsule_store/capsule_store.test.ts index 848c27d25e37..b1236be4c608 100644 --- a/yarn-project/pxe/src/storage/capsule_store/capsule_store.test.ts +++ b/yarn-project/pxe/src/storage/capsule_store/capsule_store.test.ts @@ -1,21 +1,31 @@ import { range } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; -import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { type AztecLMDBStoreV2, openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CapsuleStore } from './capsule_store.js'; +const TEST_JOB_ID = 'test-job'; + describe('capsule data provider', () => { let contract: AztecAddress; + let kvStore: AztecLMDBStoreV2; let capsuleStore: CapsuleStore; + // Helper to commit within a transaction (simulating JobCoordinator behavior) + const commitInTransaction = async () => { + await kvStore.transactionAsync(async () => { + await capsuleStore.commit(TEST_JOB_ID); + }); + }; + beforeEach(async () => { // Setup mock contract address contract = await AztecAddress.random(); // Setup data provider - const store = await openTmpStore('capsule_store_test'); - capsuleStore = new CapsuleStore(store); + kvStore = await openTmpStore('capsule_store_test'); + capsuleStore = new CapsuleStore(kvStore); }); describe('store and load', () => { @@ -23,8 +33,20 @@ describe('capsule data provider', () => { const slot = new Fr(1); const values = [new Fr(42)]; - await capsuleStore.storeCapsule(contract, slot, values); - const result = await capsuleStore.loadCapsule(contract, slot); + capsuleStore.storeCapsule(contract, slot, values, TEST_JOB_ID); + + // Capsule visible within TEST_JOB_ID + let result = await capsuleStore.loadCapsule(contract, slot, TEST_JOB_ID); + expect(result).toEqual(values); + + // Capsule invisible outside TEST_JOB_ID until committed + result = await capsuleStore.loadCapsule(contract, slot); + expect(result).toBeNull(); + + await capsuleStore.commit(TEST_JOB_ID); + + // Capsule visible outside in general after committed + result = await capsuleStore.loadCapsule(contract, slot); expect(result).toEqual(values); }); @@ -32,7 +54,8 @@ describe('capsule data provider', () => { const slot = new Fr(1); const values = [new Fr(42), new Fr(43), new Fr(44)]; - await capsuleStore.storeCapsule(contract, slot, values); + capsuleStore.storeCapsule(contract, slot, values, TEST_JOB_ID); + await capsuleStore.commit(TEST_JOB_ID); const result = await capsuleStore.loadCapsule(contract, slot); expect(result).toEqual(values); }); @@ -42,10 +65,13 @@ describe('capsule data provider', () => { const initialValues = [new Fr(42)]; const newValues = [new Fr(100)]; - await capsuleStore.storeCapsule(contract, slot, initialValues); - await capsuleStore.storeCapsule(contract, slot, newValues); + capsuleStore.storeCapsule(contract, slot, initialValues, TEST_JOB_ID); + capsuleStore.storeCapsule(contract, slot, newValues, TEST_JOB_ID); + let result = await capsuleStore.loadCapsule(contract, slot, TEST_JOB_ID); + expect(result).toEqual(newValues); - const result = await capsuleStore.loadCapsule(contract, slot); + await capsuleStore.commit(TEST_JOB_ID); + result = await capsuleStore.loadCapsule(contract, slot, TEST_JOB_ID); expect(result).toEqual(newValues); }); @@ -55,8 +81,9 @@ describe('capsule data provider', () => { const values1 = [new Fr(42)]; const values2 = [new Fr(100)]; - await capsuleStore.storeCapsule(contract, slot, values1); - await capsuleStore.storeCapsule(anotherContract, slot, values2); + capsuleStore.storeCapsule(contract, slot, values1, TEST_JOB_ID); + capsuleStore.storeCapsule(anotherContract, slot, values2, TEST_JOB_ID); + await capsuleStore.commit(TEST_JOB_ID); const result1 = await capsuleStore.loadCapsule(contract, slot); const result2 = await capsuleStore.loadCapsule(anotherContract, slot); @@ -77,15 +104,31 @@ describe('capsule data provider', () => { const slot = new Fr(1); const values = [new Fr(42)]; - await capsuleStore.storeCapsule(contract, slot, values); - await capsuleStore.deleteCapsule(contract, slot); + capsuleStore.storeCapsule(contract, slot, values, TEST_JOB_ID); + await capsuleStore.commit(TEST_JOB_ID); + + capsuleStore.deleteCapsule(contract, slot, TEST_JOB_ID); + await capsuleStore.commit(TEST_JOB_ID); + + expect(await capsuleStore.loadCapsule(contract, slot)).toBeNull(); + }); + + it('deletes before commit', async () => { + const slot = new Fr(1); + const values = [new Fr(42)]; + + capsuleStore.storeCapsule(contract, slot, values, TEST_JOB_ID); + capsuleStore.deleteCapsule(contract, slot, TEST_JOB_ID); + expect(await capsuleStore.loadCapsule(contract, slot, TEST_JOB_ID)).toBeNull(); + await capsuleStore.commit(TEST_JOB_ID); expect(await capsuleStore.loadCapsule(contract, slot)).toBeNull(); }); it('deletes an empty slot', async () => { const slot = new Fr(1); - await capsuleStore.deleteCapsule(contract, slot); + capsuleStore.deleteCapsule(contract, slot, TEST_JOB_ID); + await capsuleStore.commit(TEST_JOB_ID); expect(await capsuleStore.loadCapsule(contract, slot)).toBeNull(); }); @@ -96,11 +139,14 @@ describe('capsule data provider', () => { const slot = new Fr(1); const values = [new Fr(42)]; - await capsuleStore.storeCapsule(contract, slot, values); + capsuleStore.storeCapsule(contract, slot, values, TEST_JOB_ID); const dstSlot = new Fr(5); - await capsuleStore.copyCapsule(contract, slot, dstSlot, 1); + await capsuleStore.copyCapsule(contract, slot, dstSlot, 1, TEST_JOB_ID); + + expect(await capsuleStore.loadCapsule(contract, dstSlot, TEST_JOB_ID)).toEqual(values); + await capsuleStore.commit(TEST_JOB_ID); expect(await capsuleStore.loadCapsule(contract, dstSlot)).toEqual(values); }); @@ -108,70 +154,82 @@ describe('capsule data provider', () => { const src = new Fr(1); const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - await capsuleStore.storeCapsule(contract, src, valuesArray[0]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + capsuleStore.storeCapsule(contract, src, valuesArray[0], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2], TEST_JOB_ID); const dst = new Fr(5); - await capsuleStore.copyCapsule(contract, src, dst, 3); + await capsuleStore.copyCapsule(contract, src, dst, 3, TEST_JOB_ID); - expect(await capsuleStore.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + expect(await capsuleStore.loadCapsule(contract, dst, TEST_JOB_ID)).toEqual(valuesArray[0]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)), TEST_JOB_ID)).toEqual(valuesArray[1]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)), TEST_JOB_ID)).toEqual(valuesArray[2]); + + await capsuleStore.commit(TEST_JOB_ID); }); it('copies overlapping values with src ahead', async () => { const src = new Fr(1); const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - await capsuleStore.storeCapsule(contract, src, valuesArray[0]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + capsuleStore.storeCapsule(contract, src, valuesArray[0], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2], TEST_JOB_ID); const dst = new Fr(2); - await capsuleStore.copyCapsule(contract, src, dst, 3); + await capsuleStore.copyCapsule(contract, src, dst, 3, TEST_JOB_ID); - expect(await capsuleStore.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + expect(await capsuleStore.loadCapsule(contract, dst, TEST_JOB_ID)).toEqual(valuesArray[0]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)), TEST_JOB_ID)).toEqual(valuesArray[1]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)), TEST_JOB_ID)).toEqual(valuesArray[2]); // Slots 2 and 3 (src[1] and src[2]) should have been overwritten since they are also dst[0] and dst[1] - expect(await capsuleStore.loadCapsule(contract, src)).toEqual(valuesArray[0]); // src[0] (unchanged) - expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[0]); // dst[0] - expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[1]); // dst[1] + expect(await capsuleStore.loadCapsule(contract, src, TEST_JOB_ID)).toEqual(valuesArray[0]); // src[0] (unchanged) + expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(1)), TEST_JOB_ID)).toEqual(valuesArray[0]); // dst[0] + expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(2)), TEST_JOB_ID)).toEqual(valuesArray[1]); // dst[1] + + await capsuleStore.commit(TEST_JOB_ID); }); it('copies overlapping values with dst ahead', async () => { const src = new Fr(5); const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - await capsuleStore.storeCapsule(contract, src, valuesArray[0]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + capsuleStore.storeCapsule(contract, src, valuesArray[0], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1], TEST_JOB_ID); + capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2], TEST_JOB_ID); const dst = new Fr(4); - await capsuleStore.copyCapsule(contract, src, dst, 3); + await capsuleStore.copyCapsule(contract, src, dst, 3, TEST_JOB_ID); - expect(await capsuleStore.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + expect(await capsuleStore.loadCapsule(contract, dst, TEST_JOB_ID)).toEqual(valuesArray[0]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(1)), TEST_JOB_ID)).toEqual(valuesArray[1]); + expect(await capsuleStore.loadCapsule(contract, dst.add(new Fr(2)), TEST_JOB_ID)).toEqual(valuesArray[2]); // Slots 5 and 6 (src[0] and src[1]) should have been overwritten since they are also dst[1] and dst[2] - expect(await capsuleStore.loadCapsule(contract, src)).toEqual(valuesArray[1]); // dst[1] - expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[2]); // dst[2] - expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[2]); // src[2] (unchanged) + expect(await capsuleStore.loadCapsule(contract, src, TEST_JOB_ID)).toEqual(valuesArray[1]); // dst[1] + expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(1)), TEST_JOB_ID)).toEqual(valuesArray[2]); // dst[2] + expect(await capsuleStore.loadCapsule(contract, src.add(new Fr(2)), TEST_JOB_ID)).toEqual(valuesArray[2]); // src[2] (unchanged) + + await capsuleStore.commit(TEST_JOB_ID); }); it('copying fails if any value is empty', async () => { const src = new Fr(1); const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - await capsuleStore.storeCapsule(contract, src, valuesArray[0]); + capsuleStore.storeCapsule(contract, src, valuesArray[0], TEST_JOB_ID); // We skip src[1] - await capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + capsuleStore.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2], TEST_JOB_ID); const dst = new Fr(5); - await expect(capsuleStore.copyCapsule(contract, src, dst, 3)).rejects.toThrow('Attempted to copy empty slot'); + await expect(capsuleStore.copyCapsule(contract, src, dst, 3, TEST_JOB_ID)).rejects.toThrow( + 'Attempted to copy empty slot', + ); + + // We could also commit, but given this is an exception scenario it's + // also good to show that we can do this instead + await capsuleStore.discardStaged(TEST_JOB_ID); }); }); @@ -181,7 +239,14 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const array = range(4).map(x => [new Fr(x)]); - await capsuleStore.appendToCapsuleArray(contract, baseSlot, array); + await capsuleStore.appendToCapsuleArray(contract, baseSlot, array, TEST_JOB_ID); + + expect(await capsuleStore.loadCapsule(contract, baseSlot, TEST_JOB_ID)).toEqual([new Fr(array.length)]); + for (const i of range(array.length)) { + expect(await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + i)), TEST_JOB_ID)).toEqual(array[i]); + } + + await capsuleStore.commit(TEST_JOB_ID); expect(await capsuleStore.loadCapsule(contract, baseSlot)).toEqual([new Fr(array.length)]); for (const i of range(array.length)) { @@ -193,13 +258,22 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const originalArray = range(4).map(x => [new Fr(x)]); - await capsuleStore.appendToCapsuleArray(contract, baseSlot, originalArray); + await capsuleStore.appendToCapsuleArray(contract, baseSlot, originalArray, TEST_JOB_ID); const newElements = [[new Fr(13)], [new Fr(42)]]; - await capsuleStore.appendToCapsuleArray(contract, baseSlot, newElements); + await capsuleStore.appendToCapsuleArray(contract, baseSlot, newElements, TEST_JOB_ID); const expectedLength = originalArray.length + newElements.length; + expect(await capsuleStore.loadCapsule(contract, baseSlot, TEST_JOB_ID)).toEqual([new Fr(expectedLength)]); + for (const i of range(expectedLength)) { + expect(await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + i)), TEST_JOB_ID)).toEqual( + [...originalArray, ...newElements][i], + ); + } + + await capsuleStore.commit(TEST_JOB_ID); + expect(await capsuleStore.loadCapsule(contract, baseSlot)).toEqual([new Fr(expectedLength)]); for (const i of range(expectedLength)) { expect(await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + i)))).toEqual( @@ -220,9 +294,14 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const storedArray = range(4).map(x => [new Fr(x)]); - await capsuleStore.appendToCapsuleArray(contract, baseSlot, storedArray); + await capsuleStore.appendToCapsuleArray(contract, baseSlot, storedArray, TEST_JOB_ID); - const retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + let retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); + expect(retrievedArray).toEqual(storedArray); + + await capsuleStore.commit(TEST_JOB_ID); + + retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); expect(retrievedArray).toEqual(storedArray); }); @@ -230,9 +309,16 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); // Store in the base slot a non-zero value, indicating a non-zero array length - await capsuleStore.storeCapsule(contract, baseSlot, [new Fr(1)]); + capsuleStore.storeCapsule(contract, baseSlot, [new Fr(1)], TEST_JOB_ID); // Reading should now fail as some of the capsules in the array are empty + await expect(capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID)).rejects.toThrow( + 'Expected non-empty value', + ); + + await capsuleStore.commit(TEST_JOB_ID); + + // Reading after committing it should also blow up await expect(capsuleStore.readCapsuleArray(contract, baseSlot)).rejects.toThrow('Expected non-empty value'); }); }); @@ -242,9 +328,14 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const newArray = range(4).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, newArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, newArray, TEST_JOB_ID); - const retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + let retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); + expect(retrievedArray).toEqual(newArray); + + await capsuleStore.commit(TEST_JOB_ID); + + retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); expect(retrievedArray).toEqual(newArray); }); @@ -252,12 +343,17 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const originalArray = range(4, 0).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray, TEST_JOB_ID); const newArray = range(10, 10).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, newArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, newArray, TEST_JOB_ID); - const retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + let retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); + expect(retrievedArray).toEqual(newArray); + + await capsuleStore.commit(TEST_JOB_ID); + + retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); expect(retrievedArray).toEqual(newArray); }); @@ -265,15 +361,27 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const originalArray = range(10, 0).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray, TEST_JOB_ID); const newArray = range(4, 10).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, newArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, newArray, TEST_JOB_ID); - const retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + let retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); expect(retrievedArray).toEqual(newArray); // Not only do we read the expected array, but also all capsules past the new array length have been cleared + for (const i of range(originalArray.length - newArray.length)) { + expect( + await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + newArray.length + i)), TEST_JOB_ID), + ).toBeNull(); + } + + await capsuleStore.commit(TEST_JOB_ID); + + // Check everything is consistent after committing as well + retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + expect(retrievedArray).toEqual(newArray); + for (const i of range(originalArray.length - newArray.length)) { expect(await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + newArray.length + i)))).toBeNull(); } @@ -283,11 +391,21 @@ describe('capsule data provider', () => { const baseSlot = new Fr(3); const originalArray = range(10, 0).map(x => [new Fr(x)]); - await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray); + await capsuleStore.setCapsuleArray(contract, baseSlot, originalArray, TEST_JOB_ID); - await capsuleStore.setCapsuleArray(contract, baseSlot, []); + await capsuleStore.setCapsuleArray(contract, baseSlot, [], TEST_JOB_ID); - const retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot); + let retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); + expect(retrievedArray).toEqual([]); + + // All capsules from the original array have been cleared + for (const i of range(originalArray.length)) { + expect(await capsuleStore.loadCapsule(contract, baseSlot.add(new Fr(1 + i)), TEST_JOB_ID)).toBeNull(); + } + + await capsuleStore.commit(TEST_JOB_ID); + + retrievedArray = await capsuleStore.readCapsuleArray(contract, baseSlot, TEST_JOB_ID); expect(retrievedArray).toEqual([]); // All capsules from the original array have been cleared @@ -319,7 +437,10 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); @@ -331,7 +452,10 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); @@ -343,10 +467,18 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); // Append a single element - await capsuleStore.appendToCapsuleArray(contract, new Fr(0), [range(ARRAY_LENGTH).map(x => new Fr(x))]); + await capsuleStore.appendToCapsuleArray( + contract, + new Fr(0), + [range(ARRAY_LENGTH).map(x => new Fr(x))], + TEST_JOB_ID, + ); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); @@ -358,10 +490,13 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); // We just move the entire thing one slot. - await capsuleStore.copyCapsule(contract, new Fr(0), new Fr(1), NUMBER_OF_ITEMS); + await capsuleStore.copyCapsule(contract, new Fr(0), new Fr(1), NUMBER_OF_ITEMS, TEST_JOB_ID); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); @@ -373,9 +508,12 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); - await capsuleStore.readCapsuleArray(contract, new Fr(0)); + await capsuleStore.readCapsuleArray(contract, new Fr(0), TEST_JOB_ID); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); @@ -387,11 +525,127 @@ describe('capsule data provider', () => { contract, new Fr(0), times(NUMBER_OF_ITEMS, () => range(ARRAY_LENGTH).map(x => new Fr(x))), + TEST_JOB_ID, ); - await capsuleStore.setCapsuleArray(contract, new Fr(0), []); + await capsuleStore.setCapsuleArray(contract, new Fr(0), [], TEST_JOB_ID); + + await commitInTransaction(); }, TEST_TIMEOUT_MS, ); }); + + describe('staging', () => { + it('writes to staging when jobId provided', async () => { + const slot = Fr.random(); + const committedValues = [Fr.random()]; + const stagedValues = [Fr.random()]; + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // First set a committed capsule (using a different job that we commit) + capsuleStore.storeCapsule(contract, slot, committedValues, commitJobId); + await capsuleStore.commit(commitJobId); + + // Then set a staged capsule (not committed) + capsuleStore.storeCapsule(contract, slot, stagedValues, stagingJobId); + + // Without jobId, should get committed capsule + expect(await capsuleStore.loadCapsule(contract, slot)).toEqual(committedValues); + + // With jobId, should get staged capsule + expect(await capsuleStore.loadCapsule(contract, slot, stagingJobId)).toEqual(stagedValues); + }); + + it('staged capsules are visible when reading with jobId', async () => { + const slot = Fr.random(); + const stagedValues = [Fr.random()]; + const jobId: string = 'test123'; + + // Store only in staging (not committed) + capsuleStore.storeCapsule(contract, slot, stagedValues, jobId); + + // Without jobId, should not see the staged capsule + expect(await capsuleStore.loadCapsule(contract, slot)).toBeNull(); + + // With jobId, should see the staged capsule + expect(await capsuleStore.loadCapsule(contract, slot, jobId)).toEqual(stagedValues); + }); + + it('staged deletions hide committed data', async () => { + const slot = Fr.random(); + const committedValues = [Fr.random()]; + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // First set a committed capsule + capsuleStore.storeCapsule(contract, slot, committedValues, commitJobId); + await capsuleStore.commit(commitJobId); + + // Delete in staging (not committed) + capsuleStore.deleteCapsule(contract, slot, stagingJobId); + + // Without jobId, should still see committed capsule + expect(await capsuleStore.loadCapsule(contract, slot)).toEqual(committedValues); + + // With stagingJobId, should see null (deleted in staging) + expect(await capsuleStore.loadCapsule(contract, slot, stagingJobId)).toBeNull(); + }); + + it('commit promotes staged data to main', async () => { + const slot = Fr.random(); + const committedValues = [Fr.random()]; + const stagedValues = [Fr.random()]; + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + capsuleStore.storeCapsule(contract, slot, committedValues, commitJobId); + await capsuleStore.commit(commitJobId); + + capsuleStore.storeCapsule(contract, slot, stagedValues, stagingJobId); + + await capsuleStore.commit(stagingJobId); + + // Now without jobId should get the previously staged capsule + expect(await capsuleStore.loadCapsule(contract, slot)).toEqual(stagedValues); + }); + + it('commit applies staged deletions', async () => { + const slot = Fr.random(); + const committedValues = [Fr.random()]; + const commitJobId: string = 'commit-job'; + const deleteJobId: string = 'delete-job'; + + capsuleStore.storeCapsule(contract, slot, committedValues, commitJobId); + await capsuleStore.commit(commitJobId); + capsuleStore.deleteCapsule(contract, slot, deleteJobId); + + await capsuleStore.commit(deleteJobId); + + // Now without jobId should see null (deleted) + expect(await capsuleStore.loadCapsule(contract, slot)).toBeNull(); + }); + + it('discardStaged removes staged data without affecting main', async () => { + const slot = Fr.random(); + const committedValues = [Fr.random()]; + const stagedValues = [Fr.random()]; + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + capsuleStore.storeCapsule(contract, slot, committedValues, commitJobId); + await capsuleStore.commit(commitJobId); + capsuleStore.storeCapsule(contract, slot, stagedValues, stagingJobId); + + // Discard the staging + await capsuleStore.discardStaged(stagingJobId); + + // Should still get committed capsule + expect(await capsuleStore.loadCapsule(contract, slot)).toEqual(committedValues); + + // With jobId should fall back to committed since staging was discarded + expect(await capsuleStore.loadCapsule(contract, slot, stagingJobId)).toEqual(committedValues); + }); + }); }); diff --git a/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts b/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts index ac39dc88dafb..564dfc485175 100644 --- a/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts +++ b/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts @@ -3,18 +3,26 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -export class CapsuleStore { +import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; + +export class CapsuleStore implements StagedStore { + readonly storeName = 'capsules'; + #store: AztecAsyncKVStore; // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` #capsules: AztecAsyncMap; + /** In-memory stage: jobId -> key -> (Buffer or null for deletion) */ + #stagedCapsules: Map>; + logger: Logger; constructor(store: AztecAsyncKVStore) { this.#store = store; this.#capsules = this.#store.openMap('capsules'); + this.#stagedCapsules = new Map(); this.logger = createLogger('pxe:capsule-data-provider'); } @@ -25,26 +33,56 @@ export class CapsuleStore { * @param contractAddress - The contract address to scope the data under. * @param slot - The slot in the database in which to store the value. Slots need not be contiguous. * @param capsule - An array of field elements representing the capsule. + * @param jobId - The job ID for staged writes. * @remarks A capsule is a "blob" of data that is passed to the contract through an oracle. It works similarly * to public contract storage in that it's indexed by the contract address and storage slot but instead of the global * network state it's backed by local PXE db. */ - async storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { - await this.#capsules.set(dbSlotToKey(contractAddress, slot), Buffer.concat(capsule.map(value => value.toBuffer()))); + storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[], jobId: string) { + const key = dbSlotToKey(contractAddress, slot); + const buffer = Buffer.concat(capsule.map(value => value.toBuffer())); + + let jobStaging = this.#stagedCapsules.get(jobId); + if (!jobStaging) { + jobStaging = new Map(); + this.#stagedCapsules.set(jobId, jobStaging); + } + jobStaging.set(key, buffer); } /** * Returns data previously stored via `storeCapsule` in the per-contract non-volatile database. * @param contractAddress - The contract address under which the data is scoped. * @param slot - The slot in the database to read. + * @param jobId - Optional jobId to check staging first. * @returns The stored data or `null` if no data is stored under the slot. */ - async loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - const dataBuffer = await this.#capsules.getAsync(dbSlotToKey(contractAddress, slot)); + async loadCapsule(contractAddress: AztecAddress, slot: Fr, jobId?: string): Promise { + const key = dbSlotToKey(contractAddress, slot); + + // Check staging first if jobId provided + if (jobId) { + const jobStaging = this.#stagedCapsules.get(jobId); + if (jobStaging?.has(key)) { + const stagedValue = jobStaging.get(key); + // null means staged deletion, undefined means not in staging (shouldn't happen after has() check) + if (stagedValue === null || stagedValue === undefined) { + return null; + } + return this.#bufferToCapsule(stagedValue); + } + } + + // Fall back to committed data + const dataBuffer = await this.#capsules.getAsync(key); if (!dataBuffer) { this.logger.trace(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); return null; } + return this.#bufferToCapsule(dataBuffer); + } + + #bufferToCapsule(dataBuffer: Buffer): Fr[] { const capsule: Fr[] = []; for (let i = 0; i < dataBuffer.length; i += Fr.SIZE_IN_BYTES) { capsule.push(Fr.fromBuffer(dataBuffer.subarray(i, i + Fr.SIZE_IN_BYTES))); @@ -56,9 +94,18 @@ export class CapsuleStore { * Deletes data in the per-contract non-volatile database. Does nothing if no data was present. * @param contractAddress - The contract address under which the data is scoped. * @param slot - The slot in the database to delete. + * @param jobId - The job ID for staged writes. */ - async deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - await this.#capsules.delete(dbSlotToKey(contractAddress, slot)); + deleteCapsule(contractAddress: AztecAddress, slot: Fr, jobId: string) { + const key = dbSlotToKey(contractAddress, slot); + + let jobStaging = this.#stagedCapsules.get(jobId); + if (!jobStaging) { + jobStaging = new Map(); + this.#stagedCapsules.set(jobId, jobStaging); + } + // null marks deletion in staging + jobStaging.set(key, null); } /** @@ -71,102 +118,149 @@ export class CapsuleStore { * @param srcSlot - The first slot to copy from. * @param dstSlot - The first slot to copy to. * @param numEntries - The number of entries to copy. + * @param jobId - The job ID for staged writes. */ - copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise { - return this.#store.transactionAsync(async () => { - // In order to support overlapping source and destination regions, we need to check the relative positions of source - // and destination. If destination is ahead of source, then by the time we overwrite source elements using forward - // indexes we'll have already read those. On the contrary, if source is ahead of destination we need to use backward - // indexes to avoid reading elements that've been overwritten. - - const indexes = Array.from(Array(numEntries).keys()); - if (srcSlot.lt(dstSlot)) { - indexes.reverse(); - } + async copyCapsule( + contractAddress: AztecAddress, + srcSlot: Fr, + dstSlot: Fr, + numEntries: number, + jobId: string, + ): Promise { + // In order to support overlapping source and destination regions, we need to check the relative positions of source + // and destination. If destination is ahead of source, then by the time we overwrite source elements using forward + // indexes we'll have already read those. On the contrary, if source is ahead of destination we need to use backward + // indexes to avoid reading elements that've been overwritten. - for (const i of indexes) { - const currentSrcSlot = dbSlotToKey(contractAddress, srcSlot.add(new Fr(i))); - const currentDstSlot = dbSlotToKey(contractAddress, dstSlot.add(new Fr(i))); + const indexes = Array.from(Array(numEntries).keys()); + if (srcSlot.lt(dstSlot)) { + indexes.reverse(); + } - const toCopy = await this.#capsules.getAsync(currentSrcSlot); - if (!toCopy) { - throw new Error(`Attempted to copy empty slot ${currentSrcSlot} for contract ${contractAddress.toString()}`); - } + for (const i of indexes) { + const currentSrcSlot = srcSlot.add(new Fr(i)); + const currentDstSlot = dstSlot.add(new Fr(i)); - await this.#capsules.set(currentDstSlot, toCopy); + const toCopy = await this.loadCapsule(contractAddress, currentSrcSlot, jobId); + if (!toCopy) { + throw new Error( + `Attempted to copy empty slot ${dbSlotToKey(contractAddress, currentSrcSlot)} for contract ${contractAddress.toString()}`, + ); } - }); + + this.storeCapsule(contractAddress, currentDstSlot, toCopy, jobId); + } } /** * Appends multiple capsules to a capsule array stored at the base slot. * The array length is stored at the base slot, and elements are stored in consecutive slots after it. - * All operations are performed in a single transaction. * @param contractAddress - The contract address that owns the capsule array * @param baseSlot - The slot where the array length is stored * @param content - Array of capsule data to append + * @param jobId - The job ID for staged writes. */ - appendToCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr, content: Fr[][]): Promise { - return this.#store.transactionAsync(async () => { - // Load current length, defaulting to 0 if not found - const lengthData = await this.loadCapsule(contractAddress, baseSlot); - const currentLength = lengthData ? lengthData[0].toNumber() : 0; - - // Store each capsule at consecutive slots after baseSlot + 1 + currentLength - for (let i = 0; i < content.length; i++) { - const nextSlot = arraySlot(baseSlot, currentLength + i); - await this.storeCapsule(contractAddress, nextSlot, content[i]); - } + async appendToCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr, content: Fr[][], jobId: string) { + // Load current length, defaulting to 0 if not found + const lengthData = await this.loadCapsule(contractAddress, baseSlot, jobId); + const currentLength = lengthData ? lengthData[0].toNumber() : 0; + + // Store each capsule at consecutive slots after baseSlot + 1 + currentLength + for (let i = 0; i < content.length; i++) { + const nextSlot = arraySlot(baseSlot, currentLength + i); + this.storeCapsule(contractAddress, nextSlot, content[i], jobId); + } - // Update length to include all new capsules - const newLength = currentLength + content.length; - await this.storeCapsule(contractAddress, baseSlot, [new Fr(newLength)]); - }); + // Update length to include all new capsules + const newLength = currentLength + content.length; + this.storeCapsule(contractAddress, baseSlot, [new Fr(newLength)], jobId); } - readCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr): Promise { - return this.#store.transactionAsync(async () => { - // Load length, defaulting to 0 if not found - const maybeLength = await this.loadCapsule(contractAddress, baseSlot); - const length = maybeLength ? maybeLength[0].toBigInt() : 0n; - - const values: Fr[][] = []; - - // Read each capsule at consecutive slots after baseSlot - for (let i = 0; i < length; i++) { - const currentValue = await this.loadCapsule(contractAddress, arraySlot(baseSlot, i)); - if (currentValue == undefined) { - throw new Error( - `Expected non-empty value at capsule array in base slot ${baseSlot} at index ${i} for contract ${contractAddress}`, - ); - } + /** + * Reads a capsule array from the per-contract non-volatile database. + * @param contractAddress - The contract address that owns the capsule array + * @param baseSlot - The slot where the array length is stored + * @param jobId - Optional jobId to check staging first. + * @returns The array of capsules + */ + async readCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr, jobId?: string): Promise { + // Load length, defaulting to 0 if not found + const maybeLength = await this.loadCapsule(contractAddress, baseSlot, jobId); + const length = maybeLength ? maybeLength[0].toBigInt() : 0n; + + const values: Fr[][] = []; - values.push(currentValue); + // Read each capsule at consecutive slots after baseSlot + for (let i = 0; i < length; i++) { + const currentValue = await this.loadCapsule(contractAddress, arraySlot(baseSlot, i), jobId); + if (currentValue == undefined) { + throw new Error( + `Expected non-empty value at capsule array in base slot ${baseSlot} at index ${i} for contract ${contractAddress}`, + ); } - return values; - }); + values.push(currentValue); + } + + return values; } - setCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr, content: Fr[][]) { - return this.#store.transactionAsync(async () => { - // Load current length, defaulting to 0 if not found - const maybeLength = await this.loadCapsule(contractAddress, baseSlot); - const originalLength = maybeLength ? maybeLength[0].toNumber() : 0; + /** + * Sets a capsule array in the per-contract non-volatile database. + * @param contractAddress - The contract address that owns the capsule array + * @param baseSlot - The slot where the array length is stored + * @param content - The array of capsules to set + * @param jobId - The job ID for staged writes. + */ + async setCapsuleArray(contractAddress: AztecAddress, baseSlot: Fr, content: Fr[][], jobId: string): Promise { + // Load current length, defaulting to 0 if not found + const maybeLength = await this.loadCapsule(contractAddress, baseSlot, jobId); + const originalLength = maybeLength ? maybeLength[0].toNumber() : 0; + + // Set the new length + this.storeCapsule(contractAddress, baseSlot, [new Fr(content.length)], jobId); - // Set the new length - await this.storeCapsule(contractAddress, baseSlot, [new Fr(content.length)]); + // Store the new content, possibly overwriting existing values + for (let i = 0; i < content.length; i++) { + this.storeCapsule(contractAddress, arraySlot(baseSlot, i), content[i], jobId); + } - // Store the new content, possibly overwriting existing values - for (let i = 0; i < content.length; i++) { - await this.storeCapsule(contractAddress, arraySlot(baseSlot, i), content[i]); - } + // Clear any stragglers + for (let i = content.length; i < originalLength; i++) { + this.deleteCapsule(contractAddress, arraySlot(baseSlot, i), jobId); + } + } + + /** + * Commits staged data to main storage. + * Called by JobCoordinator when a job completes successfully. + * Note: JobCoordinator wraps all commits in a single transaction, so we don't + * need our own transactionAsync here (and using one would deadlock on IndexedDB). + * @param jobId - The jobId identifying which staged data to commit + */ + async commit(jobId: string): Promise { + const jobStaging = this.#stagedCapsules.get(jobId); + if (!jobStaging) { + return; + } - // Clear any stragglers - for (let i = content.length; i < originalLength; i++) { - await this.deleteCapsule(contractAddress, arraySlot(baseSlot, i)); + for (const [key, value] of jobStaging) { + if (value === null) { + await this.#capsules.delete(key); + } else { + await this.#capsules.set(key, value); } - }); + } + + this.#stagedCapsules.delete(jobId); + } + + /** + * Discards staged data without committing. + */ + discardStaged(jobId: string): Promise { + this.#stagedCapsules.delete(jobId); + return Promise.resolve(); } } diff --git a/yarn-project/pxe/src/storage/note_store/note_store.test.ts b/yarn-project/pxe/src/storage/note_store/note_store.test.ts index 112e2bcb76e9..64673e037440 100644 --- a/yarn-project/pxe/src/storage/note_store/note_store.test.ts +++ b/yarn-project/pxe/src/storage/note_store/note_store.test.ts @@ -7,6 +7,8 @@ import { NoteDao, NoteStatus } from '@aztec/stdlib/note'; import { NoteStore } from './note_store.js'; +const TEST_JOB_ID = 'test-job'; + // ----------------------------------------------------------------------------- // Shared constants for deterministic fixtures // ----------------------------------------------------------------------------- @@ -75,8 +77,9 @@ describe('NoteStore', () => { index: 3n, }); - await provider.addNotes([note1, note2], SCOPE_1); - await provider.addNotes([note3], SCOPE_2); + await provider.addNotes([note1, note2], SCOPE_1, TEST_JOB_ID); + await provider.addNotes([note3], SCOPE_2, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); return { store, provider, note1, note2, note3 }; } @@ -119,7 +122,8 @@ describe('NoteStore', () => { const noteA = await mkNote({ contractAddress: CONTRACT_A, index: 1n }); const noteB = await mkNote({ contractAddress: CONTRACT_B, index: 2n }); - await provider1.addNotes([noteA, noteB], FAKE_ADDRESS); + await provider1.addNotes([noteA, noteB], FAKE_ADDRESS, TEST_JOB_ID); + await provider1.commit(TEST_JOB_ID); const provider2 = await NoteStore.create(store); @@ -171,7 +175,8 @@ describe('NoteStore', () => { storageSlot: SLOT_X, index: 4n, }); - await provider.addNotes([note4], SCOPE_2); + await provider.addNotes([note4], SCOPE_2, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const res = await provider.getNotes({ contractAddress: CONTRACT_A, @@ -183,7 +188,8 @@ describe('NoteStore', () => { it('deduplicates notes that appear in multiple scopes', async () => { // note 1 has been added to scope 1 in setup so we add it to scope 2 to then be able to test deduplication - await provider.addNotes([note1], SCOPE_2); + await provider.addNotes([note1], SCOPE_2, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const res = await provider.getNotes({ contractAddress: CONTRACT_A, @@ -197,7 +203,8 @@ describe('NoteStore', () => { it('filters notes by status, returning ACTIVE by default and both ACTIVE and NULLIFIED when requested', async () => { const nullifiers = [mkNullifier(note2)]; - await expect(provider.applyNullifiers(nullifiers)).resolves.toEqual([note2]); + await provider.applyNullifiers(nullifiers, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const resActive = await provider.getNotes({ contractAddress: CONTRACT_A }); expect(new Set(getIndexes(resActive))).toEqual(new Set([1n])); @@ -221,7 +228,8 @@ describe('NoteStore', () => { it('applies scope filtering to nullified notes', async () => { const nullifiers = [mkNullifier(note3)]; - await expect(provider.applyNullifiers(nullifiers)).resolves.toEqual([note3]); + await provider.applyNullifiers(nullifiers, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); // Query for contractB, but with the wrong scope (scope1) const res = await provider.getNotes({ @@ -274,7 +282,7 @@ describe('NoteStore', () => { }); it('returns no notes when filtering by non-existing contractAddress', async () => { - const res = await provider.getNotes({ contractAddress: FAKE_ADDRESS }); + const res = await provider.getNotes({ contractAddress: FAKE_ADDRESS }, TEST_JOB_ID); expect(getIndexes(res)).toHaveLength(0); }); @@ -337,12 +345,13 @@ describe('NoteStore', () => { }); it('returns empty array when given empty nullifiers array', async () => { - const result = await provider.applyNullifiers([]); + const result = await provider.applyNullifiers([], TEST_JOB_ID); expect(result).toEqual([]); }); it('nullifies a single note and moves it from active to nullified', async () => { - const result = await provider.applyNullifiers([mkNullifier(note1)]); + const result = await provider.applyNullifiers([mkNullifier(note1)], TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); expect(result).toEqual([note1]); const active = await provider.getNotes({ contractAddress: CONTRACT_A }); @@ -357,7 +366,8 @@ describe('NoteStore', () => { it('nullifies multiple notes and returns them', async () => { const nullifiers = [mkNullifier(note1), mkNullifier(note3)]; - const result = await provider.applyNullifiers(nullifiers); + const result = await provider.applyNullifiers(nullifiers, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const activeA = await provider.getNotes({ contractAddress: CONTRACT_A }); const activeB = await provider.getNotes({ contractAddress: CONTRACT_B }); @@ -368,7 +378,8 @@ describe('NoteStore', () => { }); it('retrieves a nullified note by its siloedNullifier when status is ACTIVE_OR_NULLIFIED', async () => { - await provider.applyNullifiers([mkNullifier(note2)]); + await provider.applyNullifiers([mkNullifier(note2)], TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const filter = { contractAddress: CONTRACT_A, @@ -402,12 +413,15 @@ describe('NoteStore', () => { l2BlockHash: L2BlockHash.random(), }; - await expect(provider.applyNullifiers([fakeNullifier])).rejects.toThrow('Nullifier not found in applyNullifiers'); + await expect(provider.applyNullifiers([fakeNullifier], TEST_JOB_ID)).rejects.toThrow( + 'Nullifier not found in applyNullifiers', + ); }); it('preserves scope information when nullifying notes', async () => { const nullifiers = [mkNullifier(note1)]; - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); // Verify nullified note remains visible only within its original scope const wrongScopeNotes = await provider.getNotes({ @@ -436,7 +450,7 @@ describe('NoteStore', () => { }, ]; - await expect(provider.applyNullifiers(nullifiers)).rejects.toThrow(); + await expect(provider.applyNullifiers(nullifiers, TEST_JOB_ID)).rejects.toThrow(); // Verify note1 is still active (transaction rolled back) const activeNotes = await provider.getNotes({ contractAddress: CONTRACT_A }); @@ -445,7 +459,8 @@ describe('NoteStore', () => { it('updates all relevant indexes when nullifying notes', async () => { const nullifiers = [mkNullifier(note1)]; - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); // Test various filter combinations still work const byContract = await provider.getNotes({ @@ -470,19 +485,20 @@ describe('NoteStore', () => { }); it('attempts to nullify the same note twice in succession results in error', async () => { - await provider.applyNullifiers([mkNullifier(note1)]); // First application should succeed + await provider.applyNullifiers([mkNullifier(note1)], TEST_JOB_ID); // First application should succeed + await provider.commit(TEST_JOB_ID); const activeNotes = await provider.getNotes({ contractAddress: CONTRACT_A }); expect(new Set(getIndexes(activeNotes))).toEqual(new Set([2n])); // should throw on second attempt as note1 is already nullified - await expect(provider.applyNullifiers([mkNullifier(note1)])).rejects.toThrow( + await expect(provider.applyNullifiers([mkNullifier(note1)], 'second-job')).rejects.toThrow( 'Nullifier already applied in applyNullifiers', ); }); it('attempts to nullify the same note twice in same call results in error', async () => { const nullifiers = [mkNullifier(note1), mkNullifier(note1)]; - await expect(provider.applyNullifiers(nullifiers)).rejects.toThrow( + await expect(provider.applyNullifiers(nullifiers, TEST_JOB_ID)).rejects.toThrow( 'Nullifier already applied in applyNullifiers', ); }); @@ -510,7 +526,8 @@ describe('NoteStore', () => { const noteBlock3 = await mkNote({ index: 3n, l2BlockNumber: BlockNumber(3) }); // Nullified at block 4 const noteBlock5 = await mkNote({ index: 5n, l2BlockNumber: BlockNumber(5) }); // Created after rollback block 3 - await provider.addNotes([noteBlock1, noteBlock2, noteBlock3, noteBlock5], SCOPE_1); + await provider.addNotes([noteBlock1, noteBlock2, noteBlock3, noteBlock5], SCOPE_1, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const nullifiers = [ mkNullifier(noteBlock1, BlockNumber(2)), @@ -520,7 +537,8 @@ describe('NoteStore', () => { // Apply nullifiers and rollback to block 3 // - should restore noteBlock3 (nullified at block 4) and preserve noteBlock1 (nullified at block 2) - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, 'nullify-job'); + await provider.commit('nullify-job'); await provider.rollbackNotesAndNullifiers(3, 6); } @@ -571,7 +589,8 @@ describe('NoteStore', () => { describe('rewind nullifications edge cases', () => { it('handles rollback when blockNumber equals synchedBlockNumber', async () => { const note = await mkNote({ index: 10n, l2BlockNumber: BlockNumber(5) }); - await provider.addNotes([note], SCOPE_1); + await provider.addNotes([note], SCOPE_1, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const nullifiers = [ { @@ -580,7 +599,8 @@ describe('NoteStore', () => { l2BlockHash: L2BlockHash.fromString(note.l2BlockHash), }, ]; - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, 'nullify-job'); + await provider.commit('nullify-job'); // Since nullification happened at block 5 (not after), it should stay nullified // The rewind loop processes blocks (blockNumber+1) to synchedBlockNumber = 6 to 5 = no iterations @@ -598,7 +618,8 @@ describe('NoteStore', () => { it('handles rollback when synchedBlockNumber < blockNumber', async () => { const note = await mkNote({ index: 20n, l2BlockNumber: BlockNumber(3) }); - await provider.addNotes([note], SCOPE_1); + await provider.addNotes([note], SCOPE_1, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const nullifiers = [ { @@ -607,7 +628,8 @@ describe('NoteStore', () => { l2BlockHash: L2BlockHash.fromString(note.l2BlockHash), }, ]; - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, 'nullify-job'); + await provider.commit('nullify-job'); // blockNumber=6, synchedBlockNumber=4 therefore no nullifications to rewind await provider.rollbackNotesAndNullifiers(6, 4); @@ -625,7 +647,8 @@ describe('NoteStore', () => { it('handles rollback with a large block gap', async () => { const note1 = await mkNote({ index: 30n, l2BlockNumber: BlockNumber(5) }); const note2 = await mkNote({ index: 31n, l2BlockNumber: BlockNumber(10) }); - await provider.addNotes([note1, note2], SCOPE_1); + await provider.addNotes([note1, note2], SCOPE_1, TEST_JOB_ID); + await provider.commit(TEST_JOB_ID); const nullifiers = [ { @@ -634,7 +657,8 @@ describe('NoteStore', () => { l2BlockHash: L2BlockHash.fromString(note1.l2BlockHash), }, ]; - await provider.applyNullifiers(nullifiers); + await provider.applyNullifiers(nullifiers, 'nullify-job'); + await provider.commit('nullify-job'); await provider.rollbackNotesAndNullifiers(5, 100); // note1 should be restored (nullified at block 7 > rollback block 5) @@ -650,4 +674,306 @@ describe('NoteStore', () => { }); }); }); + + describe('NoteStore staging', () => { + let kvStore: AztecLMDBStoreV2; + let noteStore: NoteStore; + + beforeEach(async () => { + kvStore = await openTmpStore('note_data_provider_staging_test'); + noteStore = await NoteStore.create(kvStore); + await noteStore.addScope(SCOPE_1); + }); + + afterEach(async () => { + await kvStore.close(); + }); + + it('stages notes without affecting committed storage', async () => { + const committedNote = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + const stagedNote = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 2n, + l2BlockNumber: BlockNumber(2), + }); + + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // Add committed note + await noteStore.addNotes([committedNote], SCOPE_1, commitJobId); + await noteStore.commit(commitJobId); + + // Add staged note with different jobId (not committed) + await noteStore.addNotes([stagedNote], SCOPE_1, stagingJobId); + + // Without jobId, should only see committed note + const notesWithoutJobId = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(notesWithoutJobId).toHaveLength(1); + expect(notesWithoutJobId[0].index).toBe(1n); + + // With stagingJobId, should see both committed and staged notes + const notesWithJobId = await noteStore.getNotes({ contractAddress: CONTRACT_A }, stagingJobId); + expect(notesWithJobId).toHaveLength(2); + expect(new Set(notesWithJobId.map(n => n.index))).toEqual(new Set([1n, 2n])); + }); + + it('commit promotes staged notes to main storage', async () => { + const stagedNote = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + + const jobId: string = 'test123'; + + // Add staged note + await noteStore.addNotes([stagedNote], SCOPE_1, jobId); + + // Commit staging (promotes to main) + await noteStore.commit(jobId); + + // Now should see the note without jobId + const notes = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(notes).toHaveLength(1); + expect(notes[0].index).toBe(1n); + }); + + it('discardStaged removes staged notes without affecting main', async () => { + const committedNote = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + const stagedNote = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 2n, + l2BlockNumber: BlockNumber(2), + }); + + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // Add committed note + await noteStore.addNotes([committedNote], SCOPE_1, commitJobId); + await noteStore.commit(commitJobId); + + // Add staged note (not committed) + await noteStore.addNotes([stagedNote], SCOPE_1, stagingJobId); + + // Discard staging + await noteStore.discardStaged(stagingJobId); + + // Should only see committed note + const notes = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(notes).toHaveLength(1); + expect(notes[0].index).toBe(1n); + }); + + it('stages nullification operations correctly', async () => { + const note = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + siloedNullifier: new Fr(123n), + }); + + const commitJobId: string = 'commit-job'; + const nullifyJobId: string = 'nullify-job'; + + // Add committed note + await noteStore.addNotes([note], SCOPE_1, commitJobId); + await noteStore.commit(commitJobId); + + // Stage nullification (not committed) + const nullifier = { + data: note.siloedNullifier, + l2BlockNumber: BlockNumber(2), + l2BlockHash: L2BlockHash.random(), + }; + await noteStore.applyNullifiers([nullifier], nullifyJobId); + + // Without jobId, note should still be active + const activeNotes = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(activeNotes).toHaveLength(1); + + // Commit staging + await noteStore.commit(nullifyJobId); + + // Now note should be nullified + const activeNotesAfterCommit = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(activeNotesAfterCommit).toHaveLength(0); + + const allNotes = await noteStore.getNotes({ + contractAddress: CONTRACT_A, + status: NoteStatus.ACTIVE_OR_NULLIFIED, + }); + expect(allNotes).toHaveLength(1); + }); + + it('can stage both add and nullify in same job', async () => { + const note = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + siloedNullifier: new Fr(456n), + }); + + const jobId: string = 'test123'; + + // Stage add note + await noteStore.addNotes([note], SCOPE_1, jobId); + + // Stage nullify the same note (within same job) + const nullifier = { + data: note.siloedNullifier, + l2BlockNumber: BlockNumber(2), + l2BlockHash: L2BlockHash.random(), + }; + await noteStore.applyNullifiers([nullifier], jobId); + + // Commit both operations + await noteStore.commit(jobId); + + // Note should exist but be nullified + const activeNotes = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(activeNotes).toHaveLength(0); + + const allNotes = await noteStore.getNotes({ + contractAddress: CONTRACT_A, + status: NoteStatus.ACTIVE_OR_NULLIFIED, + }); + expect(allNotes).toHaveLength(1); + }); + + it('getNotes with jobId excludes committed notes staged for nullification', async () => { + const note = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + siloedNullifier: new Fr(789n), + }); + + // Add note to committed storage + const commitJobId: string = 'commit-job'; + await noteStore.addNotes([note], SCOPE_1, commitJobId); + await noteStore.commit(commitJobId); + + const nullifyJobId: string = 'nullify-job'; + + // Stage nullification (not committed) + const nullifier = { + data: note.siloedNullifier, + l2BlockNumber: BlockNumber(2), + l2BlockHash: L2BlockHash.random(), + }; + await noteStore.applyNullifiers([nullifier], nullifyJobId); + + // Without jobId, note should still be visible (it's committed and active) + const notesWithoutJobId = await noteStore.getNotes({ contractAddress: CONTRACT_A }); + expect(notesWithoutJobId).toHaveLength(1); + + // With nullifyJobId, note should be excluded (staged for nullification) + const notesWithJobId = await noteStore.getNotes({ contractAddress: CONTRACT_A }, nullifyJobId); + expect(notesWithJobId).toHaveLength(0); + }); + + it('getNotes with jobId filters staged notes by scope', async () => { + await noteStore.addScope(SCOPE_2); + + const note = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + + const stagingJobId: string = 'staging-job'; + + // Stage note under SCOPE_1 (not committed) + await noteStore.addNotes([note], SCOPE_1, stagingJobId); + + // Query with SCOPE_1 - should see the staged note + const notesScope1 = await noteStore.getNotes({ contractAddress: CONTRACT_A, scopes: [SCOPE_1] }, stagingJobId); + expect(notesScope1).toHaveLength(1); + + // Query with SCOPE_2 - should not see the staged note + const notesScope2 = await noteStore.getNotes({ contractAddress: CONTRACT_A, scopes: [SCOPE_2] }, stagingJobId); + expect(notesScope2).toHaveLength(0); + }); + + it('getNotes with jobId filters staged notes by storageSlot', async () => { + const noteSlotX = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + const noteSlotY = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_Y, + index: 2n, + l2BlockNumber: BlockNumber(1), + }); + + const stagingJobId: string = 'staging-job'; + + // Stage both notes (not committed) + await noteStore.addNotes([noteSlotX, noteSlotY], SCOPE_1, stagingJobId); + + // Query with SLOT_X filter - should only see noteSlotX + const notesSlotX = await noteStore.getNotes({ contractAddress: CONTRACT_A, storageSlot: SLOT_X }, stagingJobId); + expect(notesSlotX).toHaveLength(1); + expect(notesSlotX[0].index).toBe(1n); + + // Query with SLOT_Y filter - should only see noteSlotY + const notesSlotY = await noteStore.getNotes({ contractAddress: CONTRACT_A, storageSlot: SLOT_Y }, stagingJobId); + expect(notesSlotY).toHaveLength(1); + expect(notesSlotY[0].index).toBe(2n); + }); + + it('getNotes with jobId filters staged notes by contract', async () => { + const noteContractA = await NoteDao.random({ + contractAddress: CONTRACT_A, + storageSlot: SLOT_X, + index: 1n, + l2BlockNumber: BlockNumber(1), + }); + const noteContractB = await NoteDao.random({ + contractAddress: CONTRACT_B, + storageSlot: SLOT_X, + index: 2n, + l2BlockNumber: BlockNumber(1), + }); + + await noteStore.addScope(SCOPE_2); + const stagingJobId: string = 'staging-job'; + + // Stage notes for different contracts (not committed) + await noteStore.addNotes([noteContractA], SCOPE_1, stagingJobId); + await noteStore.addNotes([noteContractB], SCOPE_2, stagingJobId); + + // Query for CONTRACT_A - should only see noteContractA + const notesContractA = await noteStore.getNotes({ contractAddress: CONTRACT_A }, stagingJobId); + expect(notesContractA).toHaveLength(1); + expect(notesContractA[0].index).toBe(1n); + + // Query for CONTRACT_B - should only see noteContractB + const notesContractB = await noteStore.getNotes({ contractAddress: CONTRACT_B }, stagingJobId); + expect(notesContractB).toHaveLength(1); + expect(notesContractB[0].index).toBe(2n); + }); + }); }); diff --git a/yarn-project/pxe/src/storage/note_store/note_store.ts b/yarn-project/pxe/src/storage/note_store/note_store.ts index 11825b20cbb1..20ca756db1b2 100644 --- a/yarn-project/pxe/src/storage/note_store/note_store.ts +++ b/yarn-project/pxe/src/storage/note_store/note_store.ts @@ -7,13 +7,39 @@ import type { DataInBlock } from '@aztec/stdlib/block'; import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; import { NoteDao } from '@aztec/stdlib/note'; +import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; + +type StagedAddNote = { + noteBuffer: Buffer; + scope: string; + contractAddress: string; + storageSlot: string; + siloedNullifier: string; +}; + +type StagedNullifyNote = { + noteBuffer: Buffer; + nullifier: string; + blockNumber: number; + contractAddress: string; + storageSlot: string; +}; + +/** In-memory staged data structure for a single job */ +type StagedNoteData = { + addedNotes: Map; // noteIndex -> data + nullifiedNotes: Map; // noteIndex -> data +}; + /** * NoteStore manages the storage and retrieval of notes. * * Notes can be active or nullified. This class processes new notes, nullifications, * and performs rollback handling in the case of a reorg. **/ -export class NoteStore { +export class NoteStore implements StagedStore { + readonly storeName = 'notes'; + #store: AztecAsyncKVStore; #notes: AztecAsyncMap; #nullifiedNotes: AztecAsyncMap; @@ -30,6 +56,9 @@ export class NoteStore { #notesByContractAndScope: Map>; #notesByStorageSlotAndScope: Map>; + /** In-memory staging: jobId -> { addedNotes, nullifiedNotes } */ + #stagedData: Map; + private constructor(store: AztecAsyncKVStore) { this.#store = store; this.#notes = store.openMap('notes'); @@ -46,6 +75,8 @@ export class NoteStore { this.#notesToScope = store.openMultiMap('notes_to_scope'); this.#notesByContractAndScope = new Map>(); this.#notesByStorageSlotAndScope = new Map>(); + + this.#stagedData = new Map(); } /** @@ -98,23 +129,26 @@ export class NoteStore { * * @param notes - Notes to store * @param scope - The scope (user/account) under which to store the notes + * @param jobId - The job ID for staging writes */ - addNotes(notes: NoteDao[], scope: AztecAddress): Promise { - return this.#store.transactionAsync(async () => { - if (!(await this.#scopes.hasAsync(scope.toString()))) { - await this.addScope(scope); - } - - for (const dao of notes) { - const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - await this.#notes.set(noteIndex, dao.toBuffer()); - await this.#notesToScope.set(noteIndex, scope.toString()); - await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); + addNotes(notes: NoteDao[], scope: AztecAddress, jobId: string): Promise { + let jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + jobStaging = { addedNotes: new Map(), nullifiedNotes: new Map() }; + this.#stagedData.set(jobId, jobStaging); + } - await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); - } - }); + for (const dao of notes) { + const noteIndex = toBufferBE(dao.index, 32).toString('hex'); + jobStaging.addedNotes.set(noteIndex, { + noteBuffer: dao.toBuffer(), + scope: scope.toString(), + contractAddress: dao.contractAddress.toString(), + storageSlot: dao.storageSlot.toString(), + siloedNullifier: dao.siloedNullifier.toString(), + }); + } + return Promise.resolve(); } /** @@ -225,114 +259,233 @@ export class NoteStore { * * @param filter - Filter criteria including contractAddress (required), and optional * owner, storageSlot, status, scopes, and siloedNullifier. + * @param jobId - Optional jobId for reading staged data * @returns Filtered and deduplicated notes (a note might be present in multiple scopes - we ensure it is only * returned once if this is the case) * @throws If filtering by an empty scopes array. Scopes have to be set to undefined or to a non-empty array. */ - async getNotes(filter: NotesFilter): Promise { - filter.status = filter.status ?? NoteStatus.ACTIVE; + async getNotes(filter: NotesFilter, jobId?: string): Promise { + filter.status ??= NoteStatus.ACTIVE; - // throw early if scopes is an empty array if (filter.scopes !== undefined && filter.scopes.length === 0) { throw new Error( 'Trying to get notes with an empty scopes array. Scopes have to be set to undefined if intending on not filtering by scopes.', ); } - const candidateNoteSources = []; + // Get staged data for this job + const staged = jobId ? this.#stagedData.get(jobId) : undefined; + const stagedScopes = this.#extractStagedScopes(staged); + const stagedNullifiedIds = new Set(staged?.nullifiedNotes.keys() ?? []); - filter.scopes ??= (await toArray(this.#scopes.keysAsync())).map(addressString => - AztecAddress.fromString(addressString), - ); + // Resolve scopes (defaults to all known scopes if not specified) + const scopes = await this.#resolveScopes(filter.scopes, stagedScopes); - const activeNoteIdsPerScope: string[][] = []; + const result = new Map(); - for (const scope of new Set(filter.scopes)) { - const formattedScopeString = scope.toString(); - if (!(await this.#scopes.hasAsync(formattedScopeString))) { - throw new Error('Trying to get incoming notes of a scope that is not in the PXE database'); - } + // 1. Load committed active notes + const committedNoteIds = await this.#getCommittedNoteIds(filter, scopes); + await this.#loadCommittedActiveNotes(filter, committedNoteIds, stagedNullifiedIds, result); - activeNoteIdsPerScope.push( - filter.storageSlot - ? await toArray( - this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValuesAsync(filter.storageSlot.toString()), - ) - : await toArray( - this.#notesByContractAndScope - .get(formattedScopeString)! - .getValuesAsync(filter.contractAddress.toString()), - ), - ); + // 2. Load committed nullified notes (if requested) + if (filter.status === NoteStatus.ACTIVE_OR_NULLIFIED) { + await this.#loadCommittedNullifiedNotes(filter, scopes, result); } - candidateNoteSources.push({ - ids: new Set(activeNoteIdsPerScope.flat()), - notes: this.#notes, - }); + // 3. Load staged notes + if (staged) { + this.#loadStagedNotes(filter, staged, scopes, stagedNullifiedIds, result); + } - // If status is ACTIVE_OR_NULLIFIED we add nullified notes as candidates on top of the default active ones. - if (filter.status === NoteStatus.ACTIVE_OR_NULLIFIED) { - const nullifiedIds = filter.storageSlot - ? await toArray(this.#nullifiedNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString())) - : await toArray(this.#nullifiedNotesByContract.getValuesAsync(filter.contractAddress.toString())); + return Array.from(result.values()); + } - const setOfScopes = new Set(filter.scopes.map(s => s.toString() as string)); - const filteredNullifiedIds = new Set(); + /** + * Checks if a note matches the given filter criteria. + */ + #matchesFilter(note: NoteDao, filter: NotesFilter): boolean { + if (!note.contractAddress.equals(filter.contractAddress)) { + return false; + } + if (filter.owner && !note.owner.equals(filter.owner)) { + return false; + } + if (filter.storageSlot && !note.storageSlot.equals(filter.storageSlot)) { + return false; + } + if (filter.siloedNullifier && !note.siloedNullifier.equals(filter.siloedNullifier)) { + return false; + } + return true; + } - for (const noteId of nullifiedIds) { - const scopeList = await toArray(this.#nullifiedNotesToScope.getValuesAsync(noteId)); - if (scopeList.some(scope => setOfScopes.has(scope))) { - filteredNullifiedIds.add(noteId); - } + /** + * Extracts unique scopes from staged notes. + */ + #extractStagedScopes(staged: StagedNoteData | undefined): Set { + const scopes = new Set(); + if (staged) { + for (const note of staged.addedNotes.values()) { + scopes.add(note.scope); } + } + return scopes; + } - if (filteredNullifiedIds.size > 0) { - candidateNoteSources.push({ - ids: filteredNullifiedIds, - notes: this.#nullifiedNotes, - }); + /** + * Resolves filter scopes, defaulting to all known scopes if not specified. + * @returns The resolved scopes as AztecAddress array + * @throws If any scope doesn't exist in committed storage or staged data + */ + async #resolveScopes(filterScopes: AztecAddress[] | undefined, stagedScopes: Set): Promise { + // Default to including all known scopes if filter.scopes is not specified + if (filterScopes === undefined) { + const committedScopes = await toArray(this.#scopes.keysAsync()); + return [ + ...committedScopes.map(s => AztecAddress.fromString(s)), + ...Array.from(stagedScopes).map(s => AztecAddress.fromString(s)), + ]; + } + + // Validate that all requested scopes exist (either committed or staged) + for (const scope of filterScopes) { + const scopeString = scope.toString(); + if (!stagedScopes.has(scopeString) && !(await this.#scopes.hasAsync(scopeString))) { + throw new Error('Trying to get incoming notes of a scope that is not in the PXE database'); } } - const result: NoteDao[] = []; - for (const { ids, notes } of candidateNoteSources) { - for (const id of ids) { - const serializedNote = await notes.getAsync(id); - if (!serializedNote) { - continue; - } + return filterScopes; + } - const note = NoteDao.fromBuffer(serializedNote); - if (!note.contractAddress.equals(filter.contractAddress)) { - continue; - } + /** + * Gets committed note IDs for the given filter and scopes. + * Only queries scopes that exist in committed storage. + */ + async #getCommittedNoteIds(filter: NotesFilter, scopes: AztecAddress[]): Promise> { + const noteIds = new Set(); - if (filter.owner && !note.owner.equals(filter.owner)) { - continue; - } + for (const scope of new Set(scopes)) { + const scopeString = scope.toString(); - if (filter.storageSlot && !note.storageSlot.equals(filter.storageSlot!)) { - continue; - } + // Skip scopes that only exist in staging (no committed index for them) + if (!(await this.#scopes.hasAsync(scopeString))) { + continue; + } - if (filter.siloedNullifier && !note.siloedNullifier.equals(filter.siloedNullifier)) { - continue; - } + const ids = filter.storageSlot + ? await toArray( + this.#notesByStorageSlotAndScope.get(scopeString)!.getValuesAsync(filter.storageSlot.toString()), + ) + : await toArray( + this.#notesByContractAndScope.get(scopeString)!.getValuesAsync(filter.contractAddress.toString()), + ); - result.push(note); + for (const id of ids) { + noteIds.add(id); } } - // A note might be present in multiple scopes - we ensure it is only returned once - const deduplicated: NoteDao[] = []; - for (const note of result) { - if (!deduplicated.some(existing => existing.equals(note))) { - deduplicated.push(note); + return noteIds; + } + + /** + * Loads committed active notes into the result map. + * Skips notes that are staged for nullification (when status is ACTIVE). + */ + async #loadCommittedActiveNotes( + filter: NotesFilter, + noteIds: Set, + stagedNullifiedIds: Set, + result: Map, + ): Promise { + for (const id of noteIds) { + // Skip committed notes that are staged for nullification (for ACTIVE status) + if (filter.status === NoteStatus.ACTIVE && stagedNullifiedIds.has(id)) { + continue; + } + + const serializedNote = await this.#notes.getAsync(id); + if (!serializedNote) { + continue; + } + + const note = NoteDao.fromBuffer(serializedNote); + if (this.#matchesFilter(note, filter)) { + result.set(id, note); } } + } + + /** + * Loads committed nullified notes into the result map. + * Only called when filter.status includes nullified notes. + */ + async #loadCommittedNullifiedNotes( + filter: NotesFilter, + scopes: AztecAddress[], + result: Map, + ): Promise { + const nullifiedIds = filter.storageSlot + ? await toArray(this.#nullifiedNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString())) + : await toArray(this.#nullifiedNotesByContract.getValuesAsync(filter.contractAddress.toString())); + + const scopeSet = new Set(scopes.map(s => s.toString() as string)); + + for (const noteId of nullifiedIds) { + // Already in result (shouldn't happen, but defensive) + if (result.has(noteId)) { + continue; + } + + // Check if note belongs to any of the requested scopes + const scopeList = await toArray(this.#nullifiedNotesToScope.getValuesAsync(noteId)); + if (!scopeList.some(scope => scopeSet.has(scope))) { + continue; + } - return deduplicated; + const serializedNote = await this.#nullifiedNotes.getAsync(noteId); + if (!serializedNote) { + continue; + } + + const note = NoteDao.fromBuffer(serializedNote); + if (this.#matchesFilter(note, filter)) { + result.set(noteId, note); + } + } + } + + /** + * Loads staged notes into the result map. + * Skips notes that are staged for nullification (when status is ACTIVE). + */ + #loadStagedNotes( + filter: NotesFilter, + staged: StagedNoteData, + scopes: AztecAddress[], + stagedNullifiedIds: Set, + result: Map, + ): void { + const scopeSet = new Set(scopes.map(s => s.toString() as string)); + + for (const [noteIndex, data] of staged.addedNotes) { + // Skip if staged for nullification and status is ACTIVE + if (filter.status === NoteStatus.ACTIVE && stagedNullifiedIds.has(noteIndex)) { + continue; + } + + // Filter by scope + if (!scopeSet.has(data.scope)) { + continue; + } + + const note = NoteDao.fromBuffer(data.noteBuffer); + if (this.#matchesFilter(note, filter)) { + // Map handles deduplication automatically (overwrites if same noteIndex) + result.set(noteIndex, note); + } + } } /** @@ -343,70 +496,197 @@ export class NoteStore { * the entire operation fails and no notes are modified. * * @param nullifiers - Array of nullifiers with their block numbers to process + * @param jobId - The job ID for staging writes * @returns Promise resolving to array of nullified NoteDao objects * @throws Error if any nullifier is not found in the active notes */ - applyNullifiers(nullifiers: DataInBlock[]): Promise { + async applyNullifiers(nullifiers: DataInBlock[], jobId: string): Promise { if (nullifiers.length === 0) { - return Promise.resolve([]); + return []; } - return this.#store.transactionAsync(async () => { - const nullifiedNotes: NoteDao[] = []; - - for (const blockScopedNullifier of nullifiers) { - const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; - const nullifierKey = nullifier.toString(); - - const noteIndex = await this.#nullifierToNoteId.getAsync(nullifierKey); - if (!noteIndex) { - // Check if already nullified? - const alreadyNullified = await this.#nullifiedNotesByNullifier.getAsync(nullifierKey); - if (alreadyNullified) { - throw new Error(`Nullifier already applied in applyNullifiers`); - } - throw new Error('Nullifier not found in applyNullifiers'); - } + const nullifiedNotes: NoteDao[] = []; - const noteBuffer = noteIndex ? await this.#notes.getAsync(noteIndex) : undefined; + let jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + jobStaging = { addedNotes: new Map(), nullifiedNotes: new Map() }; + this.#stagedData.set(jobId, jobStaging); + } - if (!noteBuffer) { - throw new Error('Note not found in applyNullifiers'); - } + for (const blockScopedNullifier of nullifiers) { + const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; + const nullifierKey = nullifier.toString(); - const noteScopes = await toArray(this.#notesToScope.getValuesAsync(noteIndex)); - if (noteScopes.length === 0) { - // We should never run into this error because notes always have a scope assigned to them - either on initial - // insertion via `addNotes` or when removing their nullifiers. - throw new Error('Note scopes are missing in applyNullifiers'); + const noteIndex = await this.#getNoteIndexForNullifier(nullifierKey, jobId); + if (!noteIndex) { + const alreadyNullified = await this.#nullifiedNotesByNullifier.getAsync(nullifierKey); + if (alreadyNullified) { + throw new Error('Nullifier already applied in applyNullifiers'); } + throw new Error('Nullifier not found in applyNullifiers'); + } - const note = NoteDao.fromBuffer(noteBuffer); + if (jobStaging.nullifiedNotes.has(noteIndex)) { + throw new Error('Nullifier already applied in applyNullifiers'); + } - nullifiedNotes.push(note); + const noteBuffer = await this.#getNoteBuffer(noteIndex, jobId); + if (!noteBuffer) { + throw new Error('Note not found in applyNullifiers'); + } - await this.#notes.delete(noteIndex); - await this.#notesToScope.delete(noteIndex); + const note = NoteDao.fromBuffer(noteBuffer); + nullifiedNotes.push(note); + + // Stage the nullification + jobStaging.nullifiedNotes.set(noteIndex, { + noteBuffer, + nullifier: nullifierKey, + blockNumber, + contractAddress: note.contractAddress.toString(), + storageSlot: note.storageSlot.toString(), + }); + } - const scopes = await toArray(this.#scopes.keysAsync()); + return nullifiedNotes; + } - for (const scope of scopes) { - await this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); + async #getNoteIndexForNullifier(nullifierKey: string, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedData.get(jobId); + if (jobStaging) { + for (const [noteIndex, data] of jobStaging.addedNotes) { + if (data.siloedNullifier === nullifierKey) { + return noteIndex; + } } + } + } + return await this.#nullifierToNoteId.getAsync(nullifierKey); + } - for (const scope of noteScopes) { - await this.#nullifiedNotesToScope.set(noteIndex, scope); - } - await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); - await this.#nullifiersByBlockNumber.set(blockNumber, nullifier.toString()); - await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); - await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); - await this.#nullifiedNotesByNullifier.set(nullifier.toString(), noteIndex); + async #getNoteBuffer(noteIndex: string, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedData.get(jobId); + if (jobStaging?.addedNotes.has(noteIndex)) { + return jobStaging.addedNotes.get(noteIndex)!.noteBuffer; + } + } + return await this.#notes.getAsync(noteIndex); + } - await this.#nullifierToNoteId.delete(nullifier.toString()); + /** + * Commits staged data to main storage. + * Must be called within a transaction by the JobCoordinator. + * @param jobId - The jobId containing the staging prefix + */ + async commit(jobId: string): Promise { + const jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + return; + } + + // Find notes that are both added and nullified in this job (skip active storage for these) + const addedThenNullified = new Set(); + for (const noteIndex of jobStaging.nullifiedNotes.keys()) { + if (jobStaging.addedNotes.has(noteIndex)) { + addedThenNullified.add(noteIndex); } - return nullifiedNotes; - }); + } + + // Ensure all required scopes exist + const scopesToAdd = new Set(); + for (const data of jobStaging.addedNotes.values()) { + scopesToAdd.add(data.scope); + } + for (const scope of scopesToAdd) { + if (!(await this.#scopes.hasAsync(scope))) { + await this.addScope(AztecAddress.fromString(scope)); + } + } + + // Add notes that aren't immediately nullified + for (const [noteIndex, data] of jobStaging.addedNotes) { + if (addedThenNullified.has(noteIndex)) { + continue; + } + await this.#addToActiveDbIndexes(noteIndex, data); + } + + // Nullify notes + for (const [noteIndex, data] of jobStaging.nullifiedNotes) { + let noteScopes: string[]; + if (addedThenNullified.has(noteIndex)) { + // Here we're nullifying a note that was added during this job, + // so we just read its scope and skip to indexing it as nullified + noteScopes = [jobStaging.addedNotes.get(noteIndex)!.scope]; + } else { + // Here we're nullifying a note that was in the DB before this job, + // so we need to remove it from the indexes that track it as active + noteScopes = await toArray(this.#notesToScope.getValuesAsync(noteIndex)); + await this.#removeFromActiveNoteDbIndexes(noteIndex, data.contractAddress, data.storageSlot, data.nullifier); + } + + await this.#addToNullifiedDbIndexes(noteIndex, data, noteScopes); + } + + this.#stagedData.delete(jobId); + } + + /** + * Adds a note to active storage and scope indexes. + */ + async #addToActiveDbIndexes(noteIndex: string, data: StagedAddNote): Promise { + await Promise.all([ + this.#notes.set(noteIndex, data.noteBuffer), + this.#notesToScope.set(noteIndex, data.scope), + this.#nullifierToNoteId.set(data.siloedNullifier, noteIndex), + this.#notesByContractAndScope.get(data.scope)!.set(data.contractAddress, noteIndex), + this.#notesByStorageSlotAndScope.get(data.scope)!.set(data.storageSlot, noteIndex), + ]); + } + + /** + * Removes a note from active storage and all scope indexes. + */ + async #removeFromActiveNoteDbIndexes( + noteIndex: string, + contractAddress: string, + storageSlot: string, + nullifier: string, + ): Promise { + const scopes = await toArray(this.#scopes.keysAsync()); + + await Promise.all([ + this.#notes.delete(noteIndex), + this.#notesToScope.delete(noteIndex), + this.#nullifierToNoteId.delete(nullifier), + ...scopes.flatMap(scope => [ + this.#notesByContractAndScope.get(scope)?.deleteValue(contractAddress, noteIndex), + this.#notesByStorageSlotAndScope.get(scope)?.deleteValue(storageSlot, noteIndex), + ]), + ]); + } + + /** + * Adds a note to all nullified indexes. + */ + async #addToNullifiedDbIndexes(noteIndex: string, data: StagedNullifyNote, scopes: string[]): Promise { + await Promise.all([ + ...scopes.map(scope => this.#nullifiedNotesToScope.set(noteIndex, scope)), + this.#nullifiedNotes.set(noteIndex, data.noteBuffer), + this.#nullifiersByBlockNumber.set(data.blockNumber, data.nullifier), + this.#nullifiedNotesByContract.set(data.contractAddress, noteIndex), + this.#nullifiedNotesByStorageSlot.set(data.storageSlot, noteIndex), + this.#nullifiedNotesByNullifier.set(data.nullifier, noteIndex), + ]); + } + + /** + * Discards staged data without committing. + */ + discardStaged(jobId: string): Promise { + this.#stagedData.delete(jobId); + return Promise.resolve(); } } diff --git a/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts b/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts index 2b3e75b3fcf1..e91ed21aec02 100644 --- a/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts +++ b/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts @@ -10,6 +10,8 @@ import { TxHash } from '@aztec/stdlib/tx'; import type { PackedPrivateEvent } from '../../pxe.js'; import { PrivateEventStore } from './private_event_store.js'; +const TEST_JOB_ID = 'test-job'; + const getRandomMsgContent = () => { return [Fr.random(), Fr.random(), Fr.random()]; }; @@ -48,13 +50,14 @@ describe('PrivateEventStore', () => { }); it('stores and retrieves private events', async () => { - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, eventCommitmentIndex, { - contractAddress, - scope, - txHash, - l2BlockNumber, - l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, fromBlock: l2BlockNumber, @@ -65,13 +68,14 @@ describe('PrivateEventStore', () => { }); it('ignores duplicate events with same eventCommitmentIndex', async () => { - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, eventCommitmentIndex, { - contractAddress, - scope, - txHash, - l2BlockNumber, - l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, @@ -86,20 +90,21 @@ describe('PrivateEventStore', () => { it('allows multiple events with same content but different eventCommitmentIndex', async () => { const otherEventCommitmentIndex = eventCommitmentIndex + 1; - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, eventCommitmentIndex, { - contractAddress, - scope, - txHash, - l2BlockNumber, - l2BlockHash, - }); - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, otherEventCommitmentIndex, { - contractAddress, - scope, - txHash, - l2BlockNumber, - l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + otherEventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, @@ -118,27 +123,34 @@ describe('PrivateEventStore', () => { l2BlockNumber: BlockNumber(200), }; - await privateEventStore.storePrivateEventLog(eventSelector, getRandomMsgContent(), 0, { - contractAddress, - scope, - txHash: TxHash.random(), - l2BlockNumber: BlockNumber(100), - l2BlockHash, - }); - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, 1, { - contractAddress, - scope, - txHash: expectedEvent.txHash, - l2BlockNumber: expectedEvent.l2BlockNumber, - l2BlockHash: expectedEvent.l2BlockHash, - }); - await privateEventStore.storePrivateEventLog(eventSelector, getRandomMsgContent(), 2, { - contractAddress, - scope, - txHash: TxHash.random(), - l2BlockNumber: BlockNumber(300), - l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + getRandomMsgContent(), + 0, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber: BlockNumber(100), l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + 1, + { + contractAddress, + scope, + txHash: expectedEvent.txHash, + l2BlockNumber: expectedEvent.l2BlockNumber, + l2BlockHash: expectedEvent.l2BlockHash, + }, + TEST_JOB_ID, + ); + await privateEventStore.storePrivateEventLog( + eventSelector, + getRandomMsgContent(), + 2, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber: BlockNumber(300), l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, @@ -153,20 +165,21 @@ describe('PrivateEventStore', () => { it('filters events by recipient', async () => { const otherScope = await AztecAddress.random(); - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, eventCommitmentIndex, { - contractAddress, - scope, - txHash, - l2BlockNumber, - l2BlockHash, - }); - await privateEventStore.storePrivateEventLog(eventSelector, msgContent, eventCommitmentIndex + 1, { - contractAddress, - scope: otherScope, - txHash: TxHash.random(), - l2BlockNumber, - l2BlockHash, - }); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex + 1, + { contractAddress, scope: otherScope, txHash: TxHash.random(), l2BlockNumber, l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, @@ -201,38 +214,175 @@ describe('PrivateEventStore', () => { }); it('returns events in order by eventCommitmentIndex', async () => { - await privateEventStore.storePrivateEventLog(eventSelector, msgContent2, 1, { + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent2, + 1, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber: BlockNumber(200), l2BlockHash }, + TEST_JOB_ID, + ); + + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent1, + 0, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber: BlockNumber(100), l2BlockHash }, + TEST_JOB_ID, + ); + + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent3, + 2, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber: BlockNumber(300), l2BlockHash }, + TEST_JOB_ID, + ); + await privateEventStore.commit(TEST_JOB_ID); + + const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, - scope, - txHash: TxHash.random(), - l2BlockNumber: BlockNumber(200), - l2BlockHash, + fromBlock: 0, + toBlock: 0 + 1000, + scopes: [scope], }); - await privateEventStore.storePrivateEventLog(eventSelector, msgContent1, 0, { + expect(events.map(e => e.packedEvent)).toEqual([msgContent1, msgContent2, msgContent3]); + }); + }); + + describe('staging', () => { + it('stages events without affecting committed storage', async () => { + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // Store committed event + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + commitJobId, + ); + await privateEventStore.commit(commitJobId); + + // Store staged event (not committed) + const stagedMsgContent = getRandomMsgContent(); + await privateEventStore.storePrivateEventLog( + eventSelector, + stagedMsgContent, + eventCommitmentIndex + 1, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber, l2BlockHash }, + stagingJobId, + ); + + // Without jobId, should only see committed event + const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, - scope, - txHash: TxHash.random(), - l2BlockNumber: BlockNumber(100), - l2BlockHash, + fromBlock: l2BlockNumber, + toBlock: l2BlockNumber + 1, + scopes: [scope], }); + expect(events).toHaveLength(1); + expect(events[0].packedEvent).toEqual(msgContent); + }); + + it('staged events are visible when reading with jobId', async () => { + const stagingJobId: string = 'staging-job'; - await privateEventStore.storePrivateEventLog(eventSelector, msgContent3, 2, { + const stagedMsgContent = getRandomMsgContent(); + await privateEventStore.storePrivateEventLog( + eventSelector, + stagedMsgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + stagingJobId, + ); + + // Without jobId, should not see the staged event + const eventsWithoutJobId = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, - scope, - txHash: TxHash.random(), - l2BlockNumber: BlockNumber(300), - l2BlockHash, + fromBlock: l2BlockNumber, + toBlock: l2BlockNumber + 1, + scopes: [scope], }); + expect(eventsWithoutJobId).toHaveLength(0); + // With jobId, should see the staged event + const eventsWithJobId = await privateEventStore.getPrivateEvents( + eventSelector, + { + contractAddress, + fromBlock: l2BlockNumber, + toBlock: l2BlockNumber + 1, + scopes: [scope], + }, + stagingJobId, + ); + expect(eventsWithJobId).toHaveLength(1); + expect(eventsWithJobId[0].packedEvent).toEqual(stagedMsgContent); + }); + + it('commit promotes staged events to main storage', async () => { + const stagingJobId: string = 'staging-job'; + + const stagedMsgContent = getRandomMsgContent(); + await privateEventStore.storePrivateEventLog( + eventSelector, + stagedMsgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + stagingJobId, + ); + + await privateEventStore.commit(stagingJobId); + + // Now should see the event without jobId const events = await privateEventStore.getPrivateEvents(eventSelector, { contractAddress, - fromBlock: 0, - toBlock: 0 + 1000, + fromBlock: l2BlockNumber, + toBlock: l2BlockNumber + 1, scopes: [scope], }); + expect(events).toHaveLength(1); + expect(events[0].packedEvent).toEqual(stagedMsgContent); + }); - expect(events.map(e => e.packedEvent)).toEqual([msgContent1, msgContent2, msgContent3]); + it('discardStaged removes staged events without affecting main', async () => { + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // Store committed event + await privateEventStore.storePrivateEventLog( + eventSelector, + msgContent, + eventCommitmentIndex, + { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash }, + commitJobId, + ); + await privateEventStore.commit(commitJobId); + + // Store staged event (not committed) + const stagedMsgContent = getRandomMsgContent(); + await privateEventStore.storePrivateEventLog( + eventSelector, + stagedMsgContent, + eventCommitmentIndex + 1, + { contractAddress, scope, txHash: TxHash.random(), l2BlockNumber, l2BlockHash }, + stagingJobId, + ); + + // Discard staging + await privateEventStore.discardStaged(stagingJobId); + + // Should only see committed event + const events = await privateEventStore.getPrivateEvents(eventSelector, { + contractAddress, + fromBlock: l2BlockNumber, + toBlock: l2BlockNumber + 1, + scopes: [scope], + }); + expect(events).toHaveLength(1); + expect(events[0].packedEvent).toEqual(msgContent); }); }); }); diff --git a/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts b/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts index 1207f8389203..d9d9a76e0852 100644 --- a/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts +++ b/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts @@ -8,6 +8,7 @@ import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2BlockHash } from '@aztec/stdlib/block'; import { type InTx, TxHash } from '@aztec/stdlib/tx'; +import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; import type { PackedPrivateEvent } from '../../pxe.js'; export type PrivateEventStoreFilter = { @@ -34,7 +35,9 @@ type PrivateEventMetadata = InTx & { /** * Stores decrypted private event logs. */ -export class PrivateEventStore { +export class PrivateEventStore implements StagedStore { + readonly storeName = 'private_events'; + #store: AztecAsyncKVStore; /** Array storing the actual private event log entries containing the log content and block number */ #eventLogs: AztecAsyncArray; @@ -42,6 +45,8 @@ export class PrivateEventStore { #eventLogIndex: AztecAsyncMap; /** Map from eventCommitmentIndex to boolean indicating if log has been seen. */ #seenLogs: AztecAsyncMap; + /** In-memory staging: jobId -> eventCommitmentIndex -> staged data */ + #stagedEvents: Map>; logger = createLogger('private_event_store'); @@ -50,12 +55,44 @@ export class PrivateEventStore { this.#eventLogs = this.#store.openArray('private_event_logs'); this.#eventLogIndex = this.#store.openMap('private_event_log_index'); this.#seenLogs = this.#store.openMap('seen_logs'); + this.#stagedEvents = new Map(); } #keyFor(contractAddress: AztecAddress, scope: AztecAddress, eventSelector: EventSelector): string { return `${contractAddress.toString()}_${scope.toString()}_${eventSelector.toString()}`; } + #entryToEvent(entry: PrivateEventEntry, eventSelector: EventSelector): PackedPrivateEvent { + const reader = BufferReader.asReader(entry.msgContent); + const numFields = entry.msgContent.length / Fr.SIZE_IN_BYTES; + return { + packedEvent: reader.readArray(numFields, Fr), + l2BlockNumber: BlockNumber(entry.l2BlockNumber), + txHash: TxHash.fromBuffer(entry.txHash), + l2BlockHash: L2BlockHash.fromBuffer(entry.l2BlockHash), + eventSelector, + }; + } + + #entryMatchesFilter(entry: PrivateEventEntry, filter: PrivateEventStoreFilter): boolean { + return ( + entry.l2BlockNumber >= filter.fromBlock && + entry.l2BlockNumber < filter.toBlock && + (!filter?.txHash || TxHash.fromBuffer(entry.txHash).equals(filter.txHash)) + ); + } + + /** Checks if an event has been seen (committed or staged) */ + async #hasBeenSeen(eventCommitmentIndex: number, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedEvents.get(jobId); + if (jobStaging?.has(eventCommitmentIndex)) { + return true; + } + } + return !!(await this.#seenLogs.getAsync(eventCommitmentIndex)); + } + /** * Store a private event log. * @param eventSelector - The event selector of the event. @@ -66,97 +103,128 @@ export class PrivateEventStore { * scope - The address to which the event is scoped. * txHash - The transaction hash of the event log. * blockNumber - The block number in which the event was emitted. + * @param jobId - The job ID for staging writes */ - storePrivateEventLog( + async storePrivateEventLog( eventSelector: EventSelector, msgContent: Fr[], eventCommitmentIndex: number, metadata: PrivateEventMetadata, + jobId: string, ): Promise { const { contractAddress, scope, txHash, l2BlockNumber, l2BlockHash } = metadata; + const key = this.#keyFor(contractAddress, scope, eventSelector); - return this.#store.transactionAsync(async () => { - const key = this.#keyFor(contractAddress, scope, eventSelector); - - // Check if this exact log has already been stored using eventCommitmentIndex as unique identifier - const hasBeenSeen = await this.#seenLogs.getAsync(eventCommitmentIndex); - if (hasBeenSeen) { - this.logger.verbose('Ignoring duplicate event log', { txHash: txHash.toString(), eventCommitmentIndex }); - return; - } - - this.logger.verbose('storing private event log', { contractAddress, scope, msgContent, l2BlockNumber }); - - const index = await this.#eventLogs.lengthAsync(); - await this.#eventLogs.push({ - msgContent: serializeToBuffer(msgContent), - l2BlockNumber, - l2BlockHash: l2BlockHash.toBuffer(), - eventCommitmentIndex, - txHash: txHash.toBuffer(), - }); - - const existingIndices = (await this.#eventLogIndex.getAsync(key)) || []; - await this.#eventLogIndex.set(key, [...existingIndices, index]); + // Check for duplicates (both committed and staged) + if (await this.#hasBeenSeen(eventCommitmentIndex, jobId)) { + this.logger.verbose('Ignoring duplicate event log', { txHash: txHash.toString(), eventCommitmentIndex }); + return; + } - // Mark this log as seen using eventCommitmentIndex - await this.#seenLogs.set(eventCommitmentIndex, true); - }); + const entry: PrivateEventEntry = { + msgContent: serializeToBuffer(msgContent), + l2BlockNumber, + l2BlockHash: l2BlockHash.toBuffer(), + eventCommitmentIndex, + txHash: txHash.toBuffer(), + }; + + this.logger.verbose('staging private event log', { contractAddress, scope, msgContent, l2BlockNumber }); + let jobStaging = this.#stagedEvents.get(jobId); + if (!jobStaging) { + jobStaging = new Map(); + this.#stagedEvents.set(jobId, jobStaging); + } + jobStaging.set(eventCommitmentIndex, { entry, key }); } /** * Returns the private events given search parameters. + * * @param eventSelector - The event selector to filter by. * @param filter - Filtering criteria: * contractAddress: The address of the contract to get events from. * fromBlock: The block number to search from (inclusive). * toBlock: The block number to search upto (exclusive). * scope: - The addresses that decrypted the logs. + * @param context - Optional job context to include staged events * @returns - The event log contents, augmented with metadata about * the transaction and block it the event was included in . */ public async getPrivateEvents( eventSelector: EventSelector, filter: PrivateEventStoreFilter, + jobId?: string, ): Promise { - const events: Array<{ eventCommitmentIndex: number; event: PackedPrivateEvent }> = []; + const eventsMap = new Map(); + // Build set of valid keys for this query + const validKeys = new Set(); for (const scope of filter.scopes) { - const key = this.#keyFor(filter.contractAddress, scope, eventSelector); + validKeys.add(this.#keyFor(filter.contractAddress, scope, eventSelector)); + } + + // Get committed events + for (const key of validKeys) { const indices = (await this.#eventLogIndex.getAsync(key)) || []; for (const index of indices) { const entry = await this.#eventLogs.atAsync(index); - if (!entry || entry.l2BlockNumber < filter.fromBlock || entry.l2BlockNumber >= filter.toBlock) { + if (!entry || !this.#entryMatchesFilter(entry, filter)) { continue; } + eventsMap.set(entry.eventCommitmentIndex, this.#entryToEvent(entry, eventSelector)); + } + } - // Convert buffer back to Fr array - const reader = BufferReader.asReader(entry.msgContent); - const numFields = entry.msgContent.length / Fr.SIZE_IN_BYTES; - const msgContent = reader.readArray(numFields, Fr); - const txHash = TxHash.fromBuffer(entry.txHash); - const l2BlockHash = L2BlockHash.fromBuffer(entry.l2BlockHash); - - if (filter.txHash && !txHash.equals(filter.txHash)) { - continue; + // Get staged events if context is provided + if (jobId) { + const jobStaging = this.#stagedEvents.get(jobId); + if (jobStaging) { + for (const [eventCommitmentIndex, { entry, key }] of jobStaging) { + if (!validKeys.has(key) || !this.#entryMatchesFilter(entry, filter)) { + continue; + } + eventsMap.set(eventCommitmentIndex, this.#entryToEvent(entry, eventSelector)); } - - events.push({ - eventCommitmentIndex: entry.eventCommitmentIndex, - event: { - packedEvent: msgContent, - l2BlockNumber: BlockNumber(entry.l2BlockNumber), - txHash, - l2BlockHash, - eventSelector, - }, - }); } } - // Sort by eventCommitmentIndex only - events.sort((a, b) => a.eventCommitmentIndex - b.eventCommitmentIndex); - return events.map(ev => ev.event); + // Sort by eventCommitmentIndex and return + const sortedEntries = Array.from(eventsMap.entries()).sort((a, b) => a[0] - b[0]); + return sortedEntries.map(([_, event]) => event); + } + + /** + * Commits staged data to main storage. + * Must be called within a transaction by the JobCoordinator. + * @param context - The job context identifying which staged data to commit + */ + async commit(jobId: string): Promise { + const jobStaging = this.#stagedEvents.get(jobId); + if (!jobStaging) { + return; + } + + for (const [eventCommitmentIndex, { entry, key }] of jobStaging) { + const index = await this.#eventLogs.lengthAsync(); + await this.#eventLogs.push(entry); + + const existingIndices = (await this.#eventLogIndex.getAsync(key)) || []; + await this.#eventLogIndex.set(key, [...existingIndices, index]); + + await this.#seenLogs.set(eventCommitmentIndex, true); + } + + this.#stagedEvents.delete(jobId); + } + + /** + * Discards staged data without committing. + * @param context - The job context + */ + discardStaged(jobId: string): Promise { + this.#stagedEvents.delete(jobId); + return Promise.resolve(); } } diff --git a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts index 3c9c4d77a883..47bc699d75bb 100644 --- a/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts +++ b/yarn-project/pxe/src/storage/tagging_store/recipient_tagging_store.ts @@ -1,6 +1,10 @@ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; import type { DirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; +import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; + +type StagedIndexes = { highestAgedIndex?: number; highestFinalizedIndex?: number }; + /** * Data provider of tagging data used when syncing the logs as a recipient. The sender counterpart of this class * is called SenderTaggingStore. We have the providers separate for the sender and recipient because @@ -11,43 +15,101 @@ import type { DirectionalAppTaggingSecret } from '@aztec/stdlib/logs'; * * TODO(benesjan): Relocate to yarn-project/pxe/src/storage/tagging_store */ -export class RecipientTaggingStore { +export class RecipientTaggingStore implements StagedStore { + readonly storeName = 'recipient_tagging'; + #store: AztecAsyncKVStore; #highestAgedIndex: AztecAsyncMap; #highestFinalizedIndex: AztecAsyncMap; + /** In-memory stage: jobId -> secret -> { highestAgedIndex?, highestFinalizedIndex? } */ + #stagedIndexes: Map>; + constructor(store: AztecAsyncKVStore) { this.#store = store; this.#highestAgedIndex = this.#store.openMap('highest_aged_index'); this.#highestFinalizedIndex = this.#store.openMap('highest_finalized_index'); + this.#stagedIndexes = new Map(); } - getHighestAgedIndex(secret: DirectionalAppTaggingSecret): Promise { + getHighestAgedIndex(secret: DirectionalAppTaggingSecret, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedIndexes.get(jobId); + const staged = jobStaging?.get(secret.toString()); + if (staged?.highestAgedIndex !== undefined) { + return Promise.resolve(staged.highestAgedIndex); + } + } return this.#highestAgedIndex.getAsync(secret.toString()); } - async updateHighestAgedIndex(secret: DirectionalAppTaggingSecret, index: number): Promise { - const currentIndex = await this.#highestAgedIndex.getAsync(secret.toString()); + async updateHighestAgedIndex(secret: DirectionalAppTaggingSecret, index: number, jobId: string): Promise { + const currentIndex = await this.getHighestAgedIndex(secret, jobId); if (currentIndex !== undefined && index <= currentIndex) { // Log sync should never set a lower highest aged index. throw new Error(`New highest aged index (${index}) must be higher than the current one (${currentIndex})`); } - await this.#highestAgedIndex.set(secret.toString(), index); + + // Stage the update + let jobStaging = this.#stagedIndexes.get(jobId); + if (!jobStaging) { + jobStaging = new Map(); + this.#stagedIndexes.set(jobId, jobStaging); + } + const existing = jobStaging.get(secret.toString()) || {}; + jobStaging.set(secret.toString(), { ...existing, highestAgedIndex: index }); } - getHighestFinalizedIndex(secret: DirectionalAppTaggingSecret): Promise { + getHighestFinalizedIndex(secret: DirectionalAppTaggingSecret, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedIndexes.get(jobId); + const staged = jobStaging?.get(secret.toString()); + if (staged?.highestFinalizedIndex !== undefined) { + return Promise.resolve(staged.highestFinalizedIndex); + } + } return this.#highestFinalizedIndex.getAsync(secret.toString()); } - async updateHighestFinalizedIndex(secret: DirectionalAppTaggingSecret, index: number): Promise { - const currentIndex = await this.#highestFinalizedIndex.getAsync(secret.toString()); + async updateHighestFinalizedIndex(secret: DirectionalAppTaggingSecret, index: number, jobId: string): Promise { + const currentIndex = await this.getHighestFinalizedIndex(secret, jobId); if (currentIndex !== undefined && index < currentIndex) { // Log sync should never set a lower highest finalized index but it can happen that it would try to set the same // one because we are loading logs from highest aged index + 1 and not from the highest finalized index. throw new Error(`New highest finalized index (${index}) must be higher than the current one (${currentIndex})`); } - await this.#highestFinalizedIndex.set(secret.toString(), index); + + let jobStaging = this.#stagedIndexes.get(jobId); + if (!jobStaging) { + jobStaging = new Map(); + this.#stagedIndexes.set(jobId, jobStaging); + } + const existing = jobStaging.get(secret.toString()) || {}; + jobStaging.set(secret.toString(), { ...existing, highestFinalizedIndex: index }); + } + + async commit(jobId: string): Promise { + const jobStaging = this.#stagedIndexes.get(jobId); + if (!jobStaging) { + return; + } + + for (const [secretStr, staged] of jobStaging) { + if (staged.highestAgedIndex !== undefined) { + await this.#highestAgedIndex.set(secretStr, staged.highestAgedIndex); + } + if (staged.highestFinalizedIndex !== undefined) { + await this.#highestFinalizedIndex.set(secretStr, staged.highestFinalizedIndex); + } + } + + this.#stagedIndexes.delete(jobId); + } + + discardStaged(jobId: string): Promise { + this.#stagedIndexes.delete(jobId); + return Promise.resolve(); } } diff --git a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts index d6336e68cfd0..c00a847a3b87 100644 --- a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts +++ b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.test.ts @@ -6,6 +6,8 @@ import { TxHash } from '@aztec/stdlib/tx'; import { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../../tagging/index.js'; import { SenderTaggingStore } from './sender_tagging_store.js'; +const TEST_JOB_ID = 'test-job'; + describe('SenderTaggingStore', () => { let taggingStore: SenderTaggingStore; let secret1: DirectionalAppTaggingSecret; @@ -20,9 +22,11 @@ describe('SenderTaggingStore', () => { describe('storePendingIndexes', () => { it('stores a single pending index', async () => { const txHash = TxHash.random(); + const preTag: PreTag = { secret: secret1, index: 5 }; - await taggingStore.storePendingIndexes([preTag], txHash); + await taggingStore.storePendingIndexes([preTag], txHash, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); expect(txHashes).toHaveLength(1); @@ -36,7 +40,8 @@ describe('SenderTaggingStore', () => { { secret: secret2, index: 7 }, ]; - await taggingStore.storePendingIndexes(preTags, txHash); + await taggingStore.storePendingIndexes(preTags, txHash, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes1 = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); expect(txHashes1).toHaveLength(1); @@ -51,8 +56,9 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); expect(txHashes).toHaveLength(2); @@ -64,8 +70,9 @@ describe('SenderTaggingStore', () => { const txHash = TxHash.random(); const preTag: PreTag = { secret: secret1, index: 5 }; - await taggingStore.storePendingIndexes([preTag], txHash); - await taggingStore.storePendingIndexes([preTag], txHash); + await taggingStore.storePendingIndexes([preTag], txHash, TEST_JOB_ID); + await taggingStore.storePendingIndexes([preTag], txHash, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); expect(txHashes).toHaveLength(1); @@ -79,7 +86,7 @@ describe('SenderTaggingStore', () => { { secret: secret1, index: 7 }, ]; - await expect(taggingStore.storePendingIndexes(preTags, txHash)).rejects.toThrow( + await expect(taggingStore.storePendingIndexes(preTags, txHash, TEST_JOB_ID)).rejects.toThrow( 'Duplicate secrets found when storing pending indexes', ); }); @@ -88,12 +95,13 @@ describe('SenderTaggingStore', () => { const txHash = TxHash.random(); // First store an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Try to store a different index for the same secret + txHash pair - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash)).rejects.toThrow( - /Cannot store index 7.*a different index 5 already exists/, - ); + await expect( + taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash, TEST_JOB_ID), + ).rejects.toThrow(/Cannot store index 7.*a different index 5 already exists/); }); it('throws when storing a pending index lower than the last finalized index', async () => { @@ -101,13 +109,14 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Try to store a pending index lower than the finalized index - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2)).rejects.toThrow( - /Cannot store pending index 5.*lower than or equal to the last finalized index 10/, - ); + await expect( + taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, TEST_JOB_ID), + ).rejects.toThrow(/Cannot store pending index 5.*lower than or equal to the last finalized index 10/); }); it('throws when storing a pending index equal to the last finalized index', async () => { @@ -115,13 +124,14 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Try to store a pending index equal to the finalized index - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2)).rejects.toThrow( - /Cannot store pending index 10.*lower than or equal to the last finalized index 10/, - ); + await expect( + taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2, TEST_JOB_ID), + ).rejects.toThrow(/Cannot store pending index 10.*lower than or equal to the last finalized index 10/); }); it('allows storing a pending index higher than the last finalized index', async () => { @@ -129,11 +139,13 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Store a pending index higher than the finalized index - should succeed - await expect(taggingStore.storePendingIndexes([{ secret: secret1, index: 15 }], txHash2)).resolves.not.toThrow(); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 15 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 20); expect(txHashes).toHaveLength(1); @@ -148,12 +160,13 @@ describe('SenderTaggingStore', () => { const indexBeyondWindow = finalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN + 1; // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Try to store an index beyond the window await expect( - taggingStore.storePendingIndexes([{ secret: secret1, index: indexBeyondWindow }], txHash2), + taggingStore.storePendingIndexes([{ secret: secret1, index: indexBeyondWindow }], txHash2, TEST_JOB_ID), ).rejects.toThrow( `Highest used index ${indexBeyondWindow} is further than window length from the highest finalized index ${finalizedIndex}`, ); @@ -166,13 +179,13 @@ describe('SenderTaggingStore', () => { const indexAtBoundary = finalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN; // First store and finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: finalizedIndex }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Store an index at the boundary, but check is >, so it should succeed - await expect( - taggingStore.storePendingIndexes([{ secret: secret1, index: indexAtBoundary }], txHash2), - ).resolves.not.toThrow(); + await taggingStore.storePendingIndexes([{ secret: secret1, index: indexAtBoundary }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, indexAtBoundary + 5); expect(txHashes).toHaveLength(1); @@ -192,9 +205,10 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 8 }], txHash3); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 8 }], txHash3, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 4, 9); expect(txHashes).toHaveLength(2); @@ -207,8 +221,9 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 10 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 5, 10); expect(txHashes).toHaveLength(1); @@ -221,13 +236,14 @@ describe('SenderTaggingStore', () => { const txHash3 = TxHash.random(); const txHash4 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, TEST_JOB_ID); // We store different secret with txHash1 to check we correctly don't return it in the result - await taggingStore.storePendingIndexes([{ secret: secret2, index: 7 }], txHash1); + await taggingStore.storePendingIndexes([{ secret: secret2, index: 7 }], txHash1, TEST_JOB_ID); // Store "parallel" index for secret1 with a different tx (can happen when sending logs from multiple PXEs) - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash4); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash4, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); // Should have 3 unique tx hashes for secret1 @@ -243,8 +259,9 @@ describe('SenderTaggingStore', () => { it('returns the last finalized index after finalizePendingIndexes', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash); - await taggingStore.finalizePendingIndexes([txHash]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1); expect(lastFinalized).toBe(5); @@ -259,8 +276,9 @@ describe('SenderTaggingStore', () => { it('returns the last finalized index when no pending indexes exist', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash); - await taggingStore.finalizePendingIndexes([txHash]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastUsed = await taggingStore.getLastUsedIndex(secret1); expect(lastUsed).toBe(5); @@ -271,11 +289,13 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // First, finalize an index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Then add a higher pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastUsed = await taggingStore.getLastUsedIndex(secret1); expect(lastUsed).toBe(7); @@ -286,9 +306,10 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash3); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash3, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastUsed = await taggingStore.getLastUsedIndex(secret1); expect(lastUsed).toBe(7); @@ -300,11 +321,11 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); - - await taggingStore.dropPendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.dropPendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // txHash1 should be removed const txHashes1 = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); @@ -320,9 +341,9 @@ describe('SenderTaggingStore', () => { describe('finalizePendingIndexes', () => { it('moves pending index to finalized for a given tx hash', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash); - - await taggingStore.finalizePendingIndexes([txHash]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1); expect(lastFinalized).toBe(5); @@ -336,11 +357,13 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); - await taggingStore.finalizePendingIndexes([txHash2]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash2], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1); expect(lastFinalized).toBe(7); @@ -351,14 +374,15 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // Store both pending indexes first - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash2, TEST_JOB_ID); // Finalize the higher index first - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); // Then try to finalize the lower index - await taggingStore.finalizePendingIndexes([txHash2]); + await taggingStore.finalizePendingIndexes([txHash2], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastFinalized = await taggingStore.getLastFinalizedIndex(secret1); expect(lastFinalized).toBe(7); // Should remain at 7 @@ -369,12 +393,13 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); const txHash3 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, TEST_JOB_ID); // Finalize txHash2 (index 5) - await taggingStore.finalizePendingIndexes([txHash2]); + await taggingStore.finalizePendingIndexes([txHash2], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // txHash1 (index 3) should be pruned as it's lower than finalized // txHash3 (index 7) should remain @@ -391,9 +416,10 @@ describe('SenderTaggingStore', () => { { secret: secret2, index: 7 }, ], txHash, + TEST_JOB_ID, ); - - await taggingStore.finalizePendingIndexes([txHash]); + await taggingStore.finalizePendingIndexes([txHash], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); const lastFinalized1 = await taggingStore.getLastFinalizedIndex(secret1); const lastFinalized2 = await taggingStore.getLastFinalizedIndex(secret2); @@ -404,9 +430,9 @@ describe('SenderTaggingStore', () => { it('does nothing when tx hash does not exist', async () => { const txHash = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash); - - await taggingStore.finalizePendingIndexes([TxHash.random()]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([TxHash.random()], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); // Original pending index should still be there const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); @@ -424,22 +450,26 @@ describe('SenderTaggingStore', () => { const txHash2 = TxHash.random(); // Step 1: Add pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(3); expect(await taggingStore.getLastFinalizedIndex(secret1)).toBeUndefined(); // Step 2: Finalize the index - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(3); expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(3); // Step 3: Add a new higher pending index - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(7); expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(3); // Step 4: Finalize the new index - await taggingStore.finalizePendingIndexes([txHash2]); + await taggingStore.finalizePendingIndexes([txHash2], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(7); expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(7); }); @@ -448,13 +478,15 @@ describe('SenderTaggingStore', () => { const txHash1 = TxHash.random(); const txHash2 = TxHash.random(); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(5); // Drop txHash2 - await taggingStore.dropPendingIndexes([txHash2]); + await taggingStore.dropPendingIndexes([txHash2], TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(3); }); @@ -465,14 +497,15 @@ describe('SenderTaggingStore', () => { const txHash3 = TxHash.random(); // Secret1: pending -> finalized - await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1); - await taggingStore.finalizePendingIndexes([txHash1]); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, TEST_JOB_ID); + await taggingStore.finalizePendingIndexes([txHash1], TEST_JOB_ID); // Secret2: pending (not finalized) - await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash2); + await taggingStore.storePendingIndexes([{ secret: secret2, index: 5 }], txHash2, TEST_JOB_ID); // Secret1: new pending - await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, TEST_JOB_ID); + await taggingStore.commit(TEST_JOB_ID); expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(3); expect(await taggingStore.getLastUsedIndex(secret1)).toBe(7); @@ -480,4 +513,146 @@ describe('SenderTaggingStore', () => { expect(await taggingStore.getLastUsedIndex(secret2)).toBe(5); }); }); + + describe('staging', () => { + it('writes to staging when jobId provided', async () => { + const committedTxHash = TxHash.random(); + const stagedTxHash = TxHash.random(); + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // First set committed data + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], committedTxHash, commitJobId); + await taggingStore.commit(commitJobId); + + // Then set staged data (not committed) + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], stagedTxHash, stagingJobId); + + // Without jobId, should only get committed data + const txHashesWithoutJobId = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10); + expect(txHashesWithoutJobId).toHaveLength(1); + expect(txHashesWithoutJobId[0]).toEqual(committedTxHash); + + // With stagingJobId, should get both committed and staged data + const txHashesWithJobId = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10, stagingJobId); + expect(txHashesWithJobId).toHaveLength(2); + expect(txHashesWithJobId).toContainEqual(committedTxHash); + expect(txHashesWithJobId).toContainEqual(stagedTxHash); + }); + + it('stages finalized indexes separately', async () => { + const txHash1 = TxHash.random(); + const txHash2 = TxHash.random(); + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // First commit some data + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, commitJobId); + await taggingStore.finalizePendingIndexes([txHash1], commitJobId); + await taggingStore.commit(commitJobId); + + // Stage a higher finalized index (not committed) + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, stagingJobId); + await taggingStore.finalizePendingIndexes([txHash2], stagingJobId); + + // Without jobId, should get the committed finalized index + expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(3); + + // With stagingJobId, should get the staged finalized index + expect(await taggingStore.getLastFinalizedIndex(secret1, stagingJobId)).toBe(7); + }); + + it('commit promotes staged data to main', async () => { + const txHash1 = TxHash.random(); + const txHash2 = TxHash.random(); + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, commitJobId); + await taggingStore.finalizePendingIndexes([txHash1], commitJobId); + await taggingStore.commit(commitJobId); + + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, stagingJobId); + await taggingStore.finalizePendingIndexes([txHash2], stagingJobId); + + // Commit the staging + await taggingStore.commit(stagingJobId); + + // Now without jobId should get the previously staged data + expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(7); + }); + + it('discardStaged removes staged data without affecting main', async () => { + const txHash1 = TxHash.random(); + const txHash2 = TxHash.random(); + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, commitJobId); + await taggingStore.finalizePendingIndexes([txHash1], commitJobId); + await taggingStore.commit(commitJobId); + + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash2, stagingJobId); + await taggingStore.finalizePendingIndexes([txHash2], stagingJobId); + + // Discard the staging + await taggingStore.discardStaged(stagingJobId); + + // Should still get the committed finalized index + expect(await taggingStore.getLastFinalizedIndex(secret1)).toBe(3); + + // With stagingJobId should fall back to committed since staging was discarded + expect(await taggingStore.getLastFinalizedIndex(secret1, stagingJobId)).toBe(3); + }); + + it('stages pending and finalized index operations independently', async () => { + const txHash1 = TxHash.random(); + const txHash2 = TxHash.random(); + const txHash3 = TxHash.random(); + const commitJobId: string = 'commit-job'; + const stagingJobId: string = 'staging-job'; + + // Committed: index 3 pending + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, commitJobId); + await taggingStore.commit(commitJobId); + + // Staged: index 5 pending, then finalize it + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, stagingJobId); + await taggingStore.finalizePendingIndexes([txHash2], stagingJobId); + + // Staged: add another pending index + await taggingStore.storePendingIndexes([{ secret: secret1, index: 7 }], txHash3, stagingJobId); + + // Without jobId: + // - Should see pending: txHash1 (index 3) + // - No finalized index + expect(await taggingStore.getLastFinalizedIndex(secret1)).toBeUndefined(); + expect(await taggingStore.getLastUsedIndex(secret1)).toBe(3); + + // With stagingJobId: + // - Should see finalized: 5 + // - Should see pending: txHash1 (index 3), txHash3 (index 7) + // - Last used should be max(finalized=5, pending={3,7}) = 7 + expect(await taggingStore.getLastFinalizedIndex(secret1, stagingJobId)).toBe(5); + expect(await taggingStore.getLastUsedIndex(secret1, stagingJobId)).toBe(7); + }); + + it('drops pending indexes in staging correctly', async () => { + const txHash1 = TxHash.random(); + const txHash2 = TxHash.random(); + const stagingJobId: string = 'staging-job'; + + // Store both pending indexes with staging + await taggingStore.storePendingIndexes([{ secret: secret1, index: 3 }], txHash1, stagingJobId); + await taggingStore.storePendingIndexes([{ secret: secret1, index: 5 }], txHash2, stagingJobId); + + // Drop one in staging + await taggingStore.dropPendingIndexes([txHash1], stagingJobId); + + // With stagingJobId, should only see txHash2 + const txHashes = await taggingStore.getTxHashesOfPendingIndexes(secret1, 0, 10, stagingJobId); + expect(txHashes).toHaveLength(1); + expect(txHashes[0]).toEqual(txHash2); + }); + }); }); diff --git a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts index 58b6c7b939c5..1137db016224 100644 --- a/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts +++ b/yarn-project/pxe/src/storage/tagging_store/sender_tagging_store.ts @@ -3,14 +3,26 @@ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; import type { DirectionalAppTaggingSecret, PreTag } from '@aztec/stdlib/logs'; import { TxHash } from '@aztec/stdlib/tx'; +import type { StagedStore } from '../../job_coordinator/job_coordinator.js'; import { UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../../tagging/index.js'; +/** Staged pending indexes: array of entries or null (deletion sentinel) */ +type StagedPendingData = { index: number; txHash: string }[] | null; + +/** In-memory staged data structure for a single job */ +type StagedSenderData = { + pendingIndexes: Map; + finalizedIndexes: Map; +}; + /** * Data provider of tagging data used when syncing the sender tagging indexes. The recipient counterpart of this class * is called RecipientTaggingStore. We have the providers separate for the sender and recipient because * the algorithms are completely disjoint and there is not data reuse between the two. */ -export class SenderTaggingStore { +export class SenderTaggingStore implements StagedStore { + readonly storeName = 'sender_tagging'; + #store: AztecAsyncKVStore; // Stores the pending indexes for each directional app tagging secret. Pending here means that the tx that contained @@ -28,11 +40,15 @@ export class SenderTaggingStore { // we don't need to store the history. #lastFinalizedIndexes: AztecAsyncMap; + /** In-memory staging: jobId -> { pendingIndexes, finalizedIndexes } */ + #stagedData: Map; + constructor(store: AztecAsyncKVStore) { this.#store = store; this.#pendingIndexes = this.#store.openMap('pending_indexes'); this.#lastFinalizedIndexes = this.#store.openMap('last_finalized_indexes'); + this.#stagedData = new Map(); } /** @@ -43,6 +59,7 @@ export class SenderTaggingStore { * @param preTags - The pre-tags containing the directional app tagging secrets and the indexes that are to be * stored in the db. * @param txHash - The tx in which the pretags were used in private logs. + * @param context - Optional job context for staged writes. * @throws If any two pre-tags contain the same directional app tagging secret. This is enforced because we care * only about the highest index for a given secret that was used in the tx. Hence this check is a good way to catch * bugs. @@ -56,7 +73,7 @@ export class SenderTaggingStore { * This is enforced because this should never happen if the syncing is done correctly as we look for logs from higher * indexes than finalized ones. */ - async storePendingIndexes(preTags: PreTag[], txHash: TxHash) { + async storePendingIndexes(preTags: PreTag[], txHash: TxHash, jobId: string) { // The secrets in pre-tags should be unique because we always store just the highest index per given secret-txHash // pair. Below we check that this is the case. const secretsSet = new Set(preTags.map(preTag => preTag.secret.toString())); @@ -67,7 +84,7 @@ export class SenderTaggingStore { for (const { secret, index } of preTags) { // First we check that for any secret the highest used index in tx is not further than window length from // the highest finalized index. - const finalizedIndex = (await this.getLastFinalizedIndex(secret)) ?? 0; + const finalizedIndex = (await this.getLastFinalizedIndex(secret, jobId)) ?? 0; if (index > finalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN) { throw new Error( `Highest used index ${index} is further than window length from the highest finalized index ${finalizedIndex}. @@ -78,7 +95,7 @@ export class SenderTaggingStore { // Throw if the new pending index is lower than or equal to the last finalized index const secretStr = secret.toString(); - const lastFinalizedIndex = await this.#lastFinalizedIndexes.getAsync(secretStr); + const lastFinalizedIndex = await this.#getLastFinalizedIndexInternal(secretStr, jobId); if (lastFinalizedIndex !== undefined && index <= lastFinalizedIndex) { throw new Error( `Cannot store pending index ${index} for secret ${secretStr}: ` + @@ -88,7 +105,7 @@ export class SenderTaggingStore { // Check if this secret + txHash combination already exists const txHashStr = txHash.toString(); - const existingForSecret = (await this.#pendingIndexes.getAsync(secretStr)) ?? []; + const existingForSecret = await this.#getPendingIndexesInternal(secretStr, jobId); const existingForSecretAndTx = existingForSecret.find(entry => entry.txHash === txHashStr); if (existingForSecretAndTx) { @@ -102,7 +119,8 @@ export class SenderTaggingStore { // If it exists with the same index, ignore the update (no-op) } else { // If it doesn't exist, add it - await this.#pendingIndexes.set(secretStr, [...existingForSecret, { index, txHash: txHashStr }]); + const newValue = [...existingForSecret, { index, txHash: txHashStr }]; + await this.#setPendingIndexesInternal(secretStr, newValue, jobId); } } } @@ -113,6 +131,7 @@ export class SenderTaggingStore { * @param secret - The directional app tagging secret to query pending indexes for. * @param startIndex - The lower bound of the index range (inclusive). * @param endIndex - The upper bound of the index range (exclusive). + * @param context - Optional job context to check staging first. * @returns An array of unique transaction hashes for pending transactions that contain indexes in the range * [startIndex, endIndex). Returns an empty array if no pending indexes exist in the range. */ @@ -120,8 +139,9 @@ export class SenderTaggingStore { secret: DirectionalAppTaggingSecret, startIndex: number, endIndex: number, + jobId?: string, ): Promise { - const existing = (await this.#pendingIndexes.getAsync(secret.toString())) ?? []; + const existing = await this.#getPendingIndexesInternal(secret.toString(), jobId); const txHashes = existing .filter(entry => entry.index >= startIndex && entry.index < endIndex) .map(entry => entry.txHash); @@ -131,25 +151,27 @@ export class SenderTaggingStore { /** * Returns the last (highest) finalized index for a given secret. * @param secret - The secret to get the last finalized index for. + * @param context - Optional job context to check staging first. * @returns The last (highest) finalized index for the given secret. */ - getLastFinalizedIndex(secret: DirectionalAppTaggingSecret): Promise { - return this.#lastFinalizedIndexes.getAsync(secret.toString()); + getLastFinalizedIndex(secret: DirectionalAppTaggingSecret, jobId?: string): Promise { + return this.#getLastFinalizedIndexInternal(secret.toString(), jobId); } /** * Returns the last used index for a given directional app tagging secret, considering both finalized and pending * indexes. * @param secret - The directional app tagging secret to query the last used index for. + * @param context - Optional job context to check staging first. * @returns The last used index. */ - async getLastUsedIndex(secret: DirectionalAppTaggingSecret): Promise { + async getLastUsedIndex(secret: DirectionalAppTaggingSecret, jobId?: string): Promise { const secretStr = secret.toString(); - const pendingTxScopedIndexes = (await this.#pendingIndexes.getAsync(secretStr)) ?? []; + const pendingTxScopedIndexes = await this.#getPendingIndexesInternal(secretStr, jobId); const pendingIndexes = pendingTxScopedIndexes.map(entry => entry.index); if (pendingTxScopedIndexes.length === 0) { - return this.#lastFinalizedIndexes.getAsync(secretStr); + return this.#getLastFinalizedIndexInternal(secretStr, jobId); } // As the last used index we return the highest one from the pending indexes. Note that this value will be always @@ -159,24 +181,26 @@ export class SenderTaggingStore { /** * Drops all pending indexes corresponding to the given transaction hashes. + * @param txHashes - The transaction hashes to drop pending indexes for. + * @param context - Optional job context for staged writes. */ - async dropPendingIndexes(txHashes: TxHash[]) { + async dropPendingIndexes(txHashes: TxHash[], jobId: string) { if (txHashes.length === 0) { return; } const txHashStrs = new Set(txHashes.map(txHash => txHash.toString())); - const allSecrets = await toArray(this.#pendingIndexes.keysAsync()); + const allSecrets = await this.#getAllPendingSecrets(jobId); for (const secret of allSecrets) { - const pendingData = await this.#pendingIndexes.getAsync(secret); - if (pendingData) { + const pendingData = await this.#getPendingIndexesInternal(secret, jobId); + if (pendingData.length > 0) { const filtered = pendingData.filter(item => !txHashStrs.has(item.txHash)); if (filtered.length === 0) { - await this.#pendingIndexes.delete(secret); + await this.#deletePendingIndexesInternal(secret, jobId); } else if (filtered.length !== pendingData.length) { // Some items were filtered out, so update the pending data - await this.#pendingIndexes.set(secret, filtered); + await this.#setPendingIndexesInternal(secret, filtered, jobId); } // else: No items were filtered out (txHashes not found for this secret) --> no-op } @@ -186,8 +210,10 @@ export class SenderTaggingStore { /** * Updates pending indexes corresponding to the given transaction hashes to be finalized and prunes any lower pending * indexes. + * @param txHashes - The transaction hashes to finalize pending indexes for. + * @param context - Optional job context for staged writes. */ - async finalizePendingIndexes(txHashes: TxHash[]) { + async finalizePendingIndexes(txHashes: TxHash[], jobId: string) { if (txHashes.length === 0) { return; } @@ -195,11 +221,11 @@ export class SenderTaggingStore { for (const txHash of txHashes) { const txHashStr = txHash.toString(); - const allSecrets = await toArray(this.#pendingIndexes.keysAsync()); + const allSecrets = await this.#getAllPendingSecrets(jobId); for (const secret of allSecrets) { - const pendingData = await this.#pendingIndexes.getAsync(secret); - if (!pendingData) { + const pendingData = await this.#getPendingIndexesInternal(secret, jobId); + if (pendingData.length === 0) { continue; } @@ -214,7 +240,7 @@ export class SenderTaggingStore { throw new Error(`Multiple pending indexes found for tx hash ${txHashStr} and secret ${secret}`); } - let lastFinalized = await this.#lastFinalizedIndexes.getAsync(secret); + let lastFinalized = await this.#getLastFinalizedIndexInternal(secret, jobId); const newFinalized = matchingIndexes[0]; if (newFinalized < (lastFinalized ?? 0)) { @@ -225,7 +251,7 @@ export class SenderTaggingStore { ); } - await this.#lastFinalizedIndexes.set(secret, newFinalized); + await this.#setLastFinalizedIndexInternal(secret, newFinalized, jobId); lastFinalized = newFinalized; // When we add pending indexes, we ensure they are higher than the last finalized index. However, because we @@ -234,11 +260,136 @@ export class SenderTaggingStore { // outdated pending indexes. const remainingItemsOfHigherIndex = pendingData.filter(item => item.index > (lastFinalized ?? 0)); if (remainingItemsOfHigherIndex.length === 0) { - await this.#pendingIndexes.delete(secret); + await this.#deletePendingIndexesInternal(secret, jobId); } else { - await this.#pendingIndexes.set(secret, remainingItemsOfHigherIndex); + await this.#setPendingIndexesInternal(secret, remainingItemsOfHigherIndex, jobId); } } } } + + /** + * Commits staged data to main storage. + * Called by JobCoordinator when a job completes successfully. + * Must be called within a transaction by the JobCoordinator. + */ + async commit(jobId: string): Promise { + const jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + return; + } + + // Commit pending indexes + for (const [secret, data] of jobStaging.pendingIndexes) { + if (data === null) { + await this.#pendingIndexes.delete(secret); + } else { + await this.#pendingIndexes.set(secret, data); + } + } + + // Commit finalized indexes + for (const [secret, value] of jobStaging.finalizedIndexes) { + await this.#lastFinalizedIndexes.set(secret, value); + } + + this.#stagedData.delete(jobId); + } + + /** + * Discards staged data without committing. + * Called by JobCoordinator on abort or during recovery. + */ + discardStaged(jobId: string): Promise { + this.#stagedData.delete(jobId); + return Promise.resolve(); + } + + // Internal helpers for staging-aware reads and writes + + #getPendingIndexesInternal(secret: string, jobId?: string): Promise<{ index: number; txHash: string }[]> { + if (jobId) { + const jobStaging = this.#stagedData.get(jobId); + if (jobStaging?.pendingIndexes.has(secret)) { + const staged = jobStaging.pendingIndexes.get(secret); + // null means deleted, return empty array; undefined shouldn't happen after has() check + return Promise.resolve(staged === null || staged === undefined ? [] : staged); + } + } + return this.#pendingIndexes.getAsync(secret).then(data => data ?? []); + } + + #setPendingIndexesInternal( + secret: string, + value: { index: number; txHash: string }[], + jobId?: string, + ): Promise { + if (jobId) { + let jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + jobStaging = { pendingIndexes: new Map(), finalizedIndexes: new Map() }; + this.#stagedData.set(jobId, jobStaging); + } + jobStaging.pendingIndexes.set(secret, value); + return Promise.resolve(); + } else { + return this.#pendingIndexes.set(secret, value); + } + } + + #deletePendingIndexesInternal(secret: string, jobId?: string): Promise { + if (jobId) { + // Store null to indicate deletion in staging + let jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + jobStaging = { pendingIndexes: new Map(), finalizedIndexes: new Map() }; + this.#stagedData.set(jobId, jobStaging); + } + jobStaging.pendingIndexes.set(secret, null); + return Promise.resolve(); + } else { + return this.#pendingIndexes.delete(secret); + } + } + + #getLastFinalizedIndexInternal(secret: string, jobId?: string): Promise { + if (jobId) { + const jobStaging = this.#stagedData.get(jobId); + if (jobStaging?.finalizedIndexes.has(secret)) { + return Promise.resolve(jobStaging.finalizedIndexes.get(secret)); + } + } + return this.#lastFinalizedIndexes.getAsync(secret); + } + + #setLastFinalizedIndexInternal(secret: string, value: number, jobId?: string): Promise { + if (jobId) { + let jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + jobStaging = { pendingIndexes: new Map(), finalizedIndexes: new Map() }; + this.#stagedData.set(jobId, jobStaging); + } + jobStaging.finalizedIndexes.set(secret, value); + return Promise.resolve(); + } else { + return this.#lastFinalizedIndexes.set(secret, value); + } + } + + async #getAllPendingSecrets(jobId?: string): Promise { + const mainSecrets = await toArray(this.#pendingIndexes.keysAsync()); + + if (!jobId) { + return mainSecrets; + } + + // Also include secrets that only exist in staging + const jobStaging = this.#stagedData.get(jobId); + if (!jobStaging) { + return mainSecrets; + } + + const stagingSecrets = Array.from(jobStaging.pendingIndexes.keys()); + return Array.from(new Set([...mainSecrets, ...stagingSecrets])); + } } diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts index a5fe72cb9ab5..0023495fd2b3 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts @@ -16,6 +16,7 @@ import { loadPrivateLogsForSenderRecipientPair } from './load_private_logs_for_s // In this test suite we don't care about the anchor block behavior as that is sufficiently tested by // the loadLogsForRange test suite, so we use a high block number to ensure it occurs after all logs. const NON_INTERFERING_ANCHOR_BLOCK_NUMBER = BlockNumber(100); +const TEST_JOB_ID = 'test-job'; describe('loadPrivateLogsForSenderRecipientPair', () => { let secret: DirectionalAppTaggingSecret; @@ -66,11 +67,12 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode, taggingStore, NON_INTERFERING_ANCHOR_BLOCK_NUMBER, + TEST_JOB_ID, ); expect(logs).toHaveLength(0); - expect(await taggingStore.getHighestAgedIndex(secret)).toBeUndefined(); - expect(await taggingStore.getHighestFinalizedIndex(secret)).toBeUndefined(); + expect(await taggingStore.getHighestAgedIndex(secret, TEST_JOB_ID)).toBeUndefined(); + expect(await taggingStore.getHighestFinalizedIndex(secret, TEST_JOB_ID)).toBeUndefined(); }); it('loads log and updates highest finalized index but not highest aged index', async () => { @@ -101,11 +103,12 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode, taggingStore, NON_INTERFERING_ANCHOR_BLOCK_NUMBER, + TEST_JOB_ID, ); expect(logs).toHaveLength(1); - expect(await taggingStore.getHighestFinalizedIndex(secret)).toBe(logIndex); - expect(await taggingStore.getHighestAgedIndex(secret)).toBeUndefined(); + expect(await taggingStore.getHighestFinalizedIndex(secret, TEST_JOB_ID)).toBe(logIndex); + expect(await taggingStore.getHighestAgedIndex(secret, TEST_JOB_ID)).toBeUndefined(); }); it('loads log and updates both highest aged and highest finalized indexes', async () => { @@ -136,11 +139,12 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode, taggingStore, NON_INTERFERING_ANCHOR_BLOCK_NUMBER, + TEST_JOB_ID, ); expect(logs).toHaveLength(1); - expect(await taggingStore.getHighestAgedIndex(secret)).toBe(logIndex); - expect(await taggingStore.getHighestFinalizedIndex(secret)).toBe(logIndex); + expect(await taggingStore.getHighestAgedIndex(secret, TEST_JOB_ID)).toBe(logIndex); + expect(await taggingStore.getHighestFinalizedIndex(secret, TEST_JOB_ID)).toBe(logIndex); }); it('logs at boundaries are properly loaded, window and highest indexes advance as expected', async () => { @@ -156,8 +160,8 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const log2Tag = await computeSiloedTagForIndex(log2Index); // Set existing highest aged index and highest finalized index - await taggingStore.updateHighestAgedIndex(secret, highestAgedIndex); - await taggingStore.updateHighestFinalizedIndex(secret, highestFinalizedIndex); + await taggingStore.updateHighestAgedIndex(secret, highestAgedIndex, TEST_JOB_ID); + await taggingStore.updateHighestFinalizedIndex(secret, highestFinalizedIndex, TEST_JOB_ID); aztecNode.getL2Tips.mockResolvedValue({ finalized: { number: BlockNumber(finalizedBlockNumber) }, @@ -188,12 +192,13 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode, taggingStore, NON_INTERFERING_ANCHOR_BLOCK_NUMBER, + TEST_JOB_ID, ); // Verify that both logs at the boundaries of the range were found and processed expect(logs).toHaveLength(2); - expect(await taggingStore.getHighestFinalizedIndex(secret)).toBe(log2Index); - expect(await taggingStore.getHighestAgedIndex(secret)).toBe(log1Index); + expect(await taggingStore.getHighestFinalizedIndex(secret, TEST_JOB_ID)).toBe(log2Index); + expect(await taggingStore.getHighestAgedIndex(secret, TEST_JOB_ID)).toBe(log1Index); // Verify that the window was moved forward correctly // Total range queried: from (highestAgedIndex + 1) to (log2Index + WINDOW_LEN + 1) exclusive diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts index 6b9fe982816e..041e5bb481cb 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts @@ -21,6 +21,7 @@ export async function loadPrivateLogsForSenderRecipientPair( aztecNode: AztecNode, taggingStore: RecipientTaggingStore, anchorBlockNumber: BlockNumber, + jobId: string, ): Promise { // # Explanation of how the algorithm works // When we perform the sync we will look at logs that correspond to the tagging index range @@ -73,8 +74,8 @@ export async function loadPrivateLogsForSenderRecipientPair( let start: number, end: number; { - const currentHighestAgedIndex = await taggingStore.getHighestAgedIndex(secret); - const currentHighestFinalizedIndex = await taggingStore.getHighestFinalizedIndex(secret); + const currentHighestAgedIndex = await taggingStore.getHighestAgedIndex(secret, jobId); + const currentHighestFinalizedIndex = await taggingStore.getHighestFinalizedIndex(secret, jobId); // We don't want to include the highest aged index so we start from `currentHighestAgedIndex + 1` (or 0 if not set) start = currentHighestAgedIndex === undefined ? 0 : currentHighestAgedIndex + 1; @@ -104,7 +105,7 @@ export async function loadPrivateLogsForSenderRecipientPair( // Store updates in data provider and update local variables if (highestAgedIndex !== undefined) { - await taggingStore.updateHighestAgedIndex(secret, highestAgedIndex); + await taggingStore.updateHighestAgedIndex(secret, highestAgedIndex, jobId); } if (highestFinalizedIndex === undefined) { @@ -117,7 +118,7 @@ export async function loadPrivateLogsForSenderRecipientPair( throw new Error('Highest aged index lower than highest finalized index invariant violated'); } - await taggingStore.updateHighestFinalizedIndex(secret, highestFinalizedIndex); + await taggingStore.updateHighestFinalizedIndex(secret, highestFinalizedIndex, jobId); // For the next iteration we want to look only at indexes for which we have not attempted to load logs yet while // ensuring that we do not look further than WINDOW_LEN ahead of the highest finalized index. diff --git a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts index 43af9587edf5..f9c272b66d7c 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.test.ts @@ -11,6 +11,8 @@ import { SenderTaggingStore } from '../../storage/tagging_store/sender_tagging_s import { DirectionalAppTaggingSecret, SiloedTag, Tag, UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN } from '../index.js'; import { syncSenderTaggingIndexes } from './sync_sender_tagging_indexes.js'; +const TEST_JOB_ID = 'test-job'; + describe('syncSenderTaggingIndexes', () => { // Contract address and secret to be used on the input of the syncSenderTaggingIndexes function. let secret: DirectionalAppTaggingSecret; @@ -44,11 +46,11 @@ describe('syncSenderTaggingIndexes', () => { return Promise.resolve(tags.map((_tag: SiloedTag) => [])); }); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore); + await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, TEST_JOB_ID); // Highest used and finalized indexes should stay undefined - expect(await taggingStore.getLastUsedIndex(secret)).toBeUndefined(); - expect(await taggingStore.getLastFinalizedIndex(secret)).toBeUndefined(); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBeUndefined(); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBeUndefined(); }); // These tests need to be run together in sequence. @@ -86,13 +88,13 @@ describe('syncSenderTaggingIndexes', () => { finalized: { number: finalizedBlockNumberStep1 }, } as any); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore); + await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, TEST_JOB_ID); // Verify the highest finalized index is updated to 3 - expect(await taggingStore.getLastFinalizedIndex(secret)).toBe(finalizedIndexStep1); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBe(finalizedIndexStep1); // Verify the highest used index also returns 3 (when there is no higher pending index the highest used index is // the highest finalized index). - expect(await taggingStore.getLastUsedIndex(secret)).toBe(finalizedIndexStep1); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(finalizedIndexStep1); }); it('step 2: pending log is synced', async () => { @@ -115,12 +117,12 @@ describe('syncSenderTaggingIndexes', () => { finalized: { number: finalizedBlockNumberStep1 }, } as any); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore); + await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, TEST_JOB_ID); // Verify the highest finalized index was not updated - expect(await taggingStore.getLastFinalizedIndex(secret)).toBe(finalizedIndexStep1); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBe(finalizedIndexStep1); // Verify the highest used index was updated to the pending index - expect(await taggingStore.getLastUsedIndex(secret)).toBe(pendingIndexStep2); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(pendingIndexStep2); }); it('step 3: syncs logs across 2 windows', async () => { @@ -184,10 +186,10 @@ describe('syncSenderTaggingIndexes', () => { finalized: { number: newFinalizedBlockNumber }, } as any); - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore); + await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, TEST_JOB_ID); - expect(await taggingStore.getLastFinalizedIndex(secret)).toBe(newHighestFinalizedIndex); - expect(await taggingStore.getLastUsedIndex(secret)).toBe(newHighestUsedIndex); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBe(newHighestFinalizedIndex); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(newHighestUsedIndex); }); }); @@ -238,10 +240,10 @@ describe('syncSenderTaggingIndexes', () => { } as any); // Sync tagged logs - await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore); + await syncSenderTaggingIndexes(secret, contractAddress, aztecNode, taggingStore, TEST_JOB_ID); // Verify that both highest finalized and highest used were set to the pending and finalized index - expect(await taggingStore.getLastFinalizedIndex(secret)).toBe(pendingAndFinalizedIndex); - expect(await taggingStore.getLastUsedIndex(secret)).toBe(pendingAndFinalizedIndex); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBe(pendingAndFinalizedIndex); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(pendingAndFinalizedIndex); }); }); diff --git a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts index 166ba9b6db60..fb6f5445d7aa 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/sync_sender_tagging_indexes.ts @@ -26,6 +26,7 @@ export async function syncSenderTaggingIndexes( app: AztecAddress, aztecNode: AztecNode, taggingStore: SenderTaggingStore, + jobId: string, ): Promise { // # Explanation of how syncing works // @@ -45,7 +46,7 @@ export async function syncSenderTaggingIndexes( // Each window advance requires two queries (logs + tx status). For example, syncing indexes 0–500 with a window of // 100 takes at least 10 round trips (5 windows × 2 queries). - const finalizedIndex = await taggingStore.getLastFinalizedIndex(secret); + const finalizedIndex = await taggingStore.getLastFinalizedIndex(secret, jobId); let start = finalizedIndex === undefined ? 0 : finalizedIndex + 1; let end = start + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN; @@ -56,21 +57,21 @@ export async function syncSenderTaggingIndexes( while (true) { // Load and store indexes for the current window. These indexes may already exist in the database if txs using // them were previously sent from this PXE. Any duplicates are handled by the tagging data provider. - await loadAndStoreNewTaggingIndexes(secret, app, start, end, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, start, end, aztecNode, taggingStore, jobId); // Retrieve all indexes within the current window from storage and update their status accordingly. - const pendingTxHashes = await taggingStore.getTxHashesOfPendingIndexes(secret, start, end); + const pendingTxHashes = await taggingStore.getTxHashesOfPendingIndexes(secret, start, end, jobId); if (pendingTxHashes.length === 0) { break; } const { txHashesToFinalize, txHashesToDrop } = await getStatusChangeOfPending(pendingTxHashes, aztecNode); - await taggingStore.dropPendingIndexes(txHashesToDrop); - await taggingStore.finalizePendingIndexes(txHashesToFinalize); + await taggingStore.dropPendingIndexes(txHashesToDrop, jobId); + await taggingStore.finalizePendingIndexes(txHashesToFinalize, jobId); // We check if the finalized index has been updated. - newFinalizedIndex = await taggingStore.getLastFinalizedIndex(secret); + newFinalizedIndex = await taggingStore.getLastFinalizedIndex(secret, jobId); if (previousFinalizedIndex !== newFinalizedIndex) { // A new finalized index was found, so we'll run the loop again. For example: // - Previous finalized index: 10 diff --git a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts index 8139e8ce721f..c384868c13d0 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.test.ts @@ -11,6 +11,8 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import { SenderTaggingStore } from '../../../storage/tagging_store/sender_tagging_store.js'; import { loadAndStoreNewTaggingIndexes } from './load_and_store_new_tagging_indexes.js'; +const TEST_JOB_ID = 'test-job'; + describe('loadAndStoreNewTaggingIndexes', () => { // App contract address and secret to be used on the input of the loadAndStoreNewTaggingIndexes function. let secret: DirectionalAppTaggingSecret; @@ -46,14 +48,14 @@ describe('loadAndStoreNewTaggingIndexes', () => { return Promise.resolve(tags.map((_tag: SiloedTag) => [])); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify that no pending indexes were stored - expect(await taggingStore.getLastUsedIndex(secret)).toBeUndefined(); - expect(await taggingStore.getLastFinalizedIndex(secret)).toBeUndefined(); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBeUndefined(); + expect(await taggingStore.getLastFinalizedIndex(secret, TEST_JOB_ID)).toBeUndefined(); // Verify the entire window has no pending tx hashes - const txHashesInWindow = await taggingStore.getTxHashesOfPendingIndexes(secret, 0, 10); + const txHashesInWindow = await taggingStore.getTxHashesOfPendingIndexes(secret, 0, 10, TEST_JOB_ID); expect(txHashesInWindow).toHaveLength(0); }); @@ -66,15 +68,15 @@ describe('loadAndStoreNewTaggingIndexes', () => { return Promise.resolve(tags.map((t: SiloedTag) => (t.equals(tag) ? [makeLog(txHash, tag.value)] : []))); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify that the pending index was stored for this txHash - const txHashesInRange = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1); + const txHashesInRange = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1, TEST_JOB_ID); expect(txHashesInRange).toHaveLength(1); expect(txHashesInRange[0].equals(txHash)).toBe(true); // Verify the last used index is correct - expect(await taggingStore.getLastUsedIndex(secret)).toBe(index); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(index); }); it('for multiple logs with same txHash stores the highest index', async () => { @@ -97,19 +99,19 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify that only the highest index (7) was stored for this txHash and secret - const txHashesAtIndex2 = await taggingStore.getTxHashesOfPendingIndexes(secret, index2, index2 + 1); + const txHashesAtIndex2 = await taggingStore.getTxHashesOfPendingIndexes(secret, index2, index2 + 1, TEST_JOB_ID); expect(txHashesAtIndex2).toHaveLength(1); expect(txHashesAtIndex2[0].equals(txHash)).toBe(true); // Verify the lower index is not stored separately - const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, index1, index1 + 1); + const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, index1, index1 + 1, TEST_JOB_ID); expect(txHashesAtIndex1).toHaveLength(0); // Verify the last used index is the highest - expect(await taggingStore.getLastUsedIndex(secret)).toBe(index2); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(index2); }); it('multiple logs with different txHashes', async () => { @@ -133,19 +135,19 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify that both txHashes have their respective indexes stored - const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, index1, index1 + 1); + const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, index1, index1 + 1, TEST_JOB_ID); expect(txHashesAtIndex1).toHaveLength(1); expect(txHashesAtIndex1[0].equals(txHash1)).toBe(true); - const txHashesAtIndex2 = await taggingStore.getTxHashesOfPendingIndexes(secret, index2, index2 + 1); + const txHashesAtIndex2 = await taggingStore.getTxHashesOfPendingIndexes(secret, index2, index2 + 1, TEST_JOB_ID); expect(txHashesAtIndex2).toHaveLength(1); expect(txHashesAtIndex2[0].equals(txHash2)).toBe(true); // Verify the last used index is the highest - expect(await taggingStore.getLastUsedIndex(secret)).toBe(index2); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(index2); }); // Expected to happen if sending logs from multiple PXEs at a similar time. @@ -161,17 +163,17 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify that both txHashes have the same index stored - const txHashesAtIndex = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1); + const txHashesAtIndex = await taggingStore.getTxHashesOfPendingIndexes(secret, index, index + 1, TEST_JOB_ID); expect(txHashesAtIndex).toHaveLength(2); const txHashStrings = txHashesAtIndex.map(h => h.toString()); expect(txHashStrings).toContain(txHash1.toString()); expect(txHashStrings).toContain(txHash2.toString()); // Verify the last used index is correct - expect(await taggingStore.getLastUsedIndex(secret)).toBe(index); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(index); }); it('complex scenario: multiple txHashes with multiple indexes', async () => { @@ -207,29 +209,29 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, 0, 10, aztecNode, taggingStore, TEST_JOB_ID); // Verify txHash1 has highest index 8 (should not be at index 1) - const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, 1, 2); + const txHashesAtIndex1 = await taggingStore.getTxHashesOfPendingIndexes(secret, 1, 2, TEST_JOB_ID); expect(txHashesAtIndex1).toHaveLength(0); - const txHashesAtIndex8 = await taggingStore.getTxHashesOfPendingIndexes(secret, 8, 9); + const txHashesAtIndex8 = await taggingStore.getTxHashesOfPendingIndexes(secret, 8, 9, TEST_JOB_ID); expect(txHashesAtIndex8).toHaveLength(1); expect(txHashesAtIndex8[0].equals(txHash1)).toBe(true); // Verify txHash2 has highest index 5 (should not be at index 3) - const txHashesAtIndex3 = await taggingStore.getTxHashesOfPendingIndexes(secret, 3, 4); + const txHashesAtIndex3 = await taggingStore.getTxHashesOfPendingIndexes(secret, 3, 4, TEST_JOB_ID); expect(txHashesAtIndex3).toHaveLength(0); - const txHashesAtIndex5 = await taggingStore.getTxHashesOfPendingIndexes(secret, 5, 6); + const txHashesAtIndex5 = await taggingStore.getTxHashesOfPendingIndexes(secret, 5, 6, TEST_JOB_ID); expect(txHashesAtIndex5).toHaveLength(1); expect(txHashesAtIndex5[0].equals(txHash2)).toBe(true); // Verify txHash3 has index 9 - const txHashesAtIndex9 = await taggingStore.getTxHashesOfPendingIndexes(secret, 9, 10); + const txHashesAtIndex9 = await taggingStore.getTxHashesOfPendingIndexes(secret, 9, 10, TEST_JOB_ID); expect(txHashesAtIndex9).toHaveLength(1); expect(txHashesAtIndex9[0].equals(txHash3)).toBe(true); // Verify the last used index is the highest - expect(await taggingStore.getLastUsedIndex(secret)).toBe(9); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(9); }); it('start is inclusive and end is exclusive', async () => { @@ -255,18 +257,18 @@ describe('loadAndStoreNewTaggingIndexes', () => { ); }); - await loadAndStoreNewTaggingIndexes(secret, app, start, end, aztecNode, taggingStore); + await loadAndStoreNewTaggingIndexes(secret, app, start, end, aztecNode, taggingStore, TEST_JOB_ID); // Verify that the log at start (inclusive) was processed - const txHashesAtStart = await taggingStore.getTxHashesOfPendingIndexes(secret, start, start + 1); + const txHashesAtStart = await taggingStore.getTxHashesOfPendingIndexes(secret, start, start + 1, TEST_JOB_ID); expect(txHashesAtStart).toHaveLength(1); expect(txHashesAtStart[0].equals(txHashAtStart)).toBe(true); // Verify that the log at end (exclusive) was NOT processed - const txHashesAtEnd = await taggingStore.getTxHashesOfPendingIndexes(secret, end, end + 1); + const txHashesAtEnd = await taggingStore.getTxHashesOfPendingIndexes(secret, end, end + 1, TEST_JOB_ID); expect(txHashesAtEnd).toHaveLength(0); // Verify the last used index is the start index (since end was not processed) - expect(await taggingStore.getLastUsedIndex(secret)).toBe(start); + expect(await taggingStore.getLastUsedIndex(secret, TEST_JOB_ID)).toBe(start); }); }); diff --git a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts index 8b3a5eb7e6c8..7ce9091d7093 100644 --- a/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts +++ b/yarn-project/pxe/src/tagging/sender_sync/utils/load_and_store_new_tagging_indexes.ts @@ -25,6 +25,7 @@ export async function loadAndStoreNewTaggingIndexes( end: number, aztecNode: AztecNode, taggingStore: SenderTaggingStore, + jobId: string, ) { // We compute the tags for the current window of indexes const preTagsForWindow: PreTag[] = Array(end - start) @@ -40,7 +41,7 @@ export async function loadAndStoreNewTaggingIndexes( // Now we iterate over the map, reconstruct the preTags and tx hash and store them in the db. for (const [txHashStr, highestIndex] of highestIndexMap.entries()) { const txHash = TxHash.fromString(txHashStr); - await taggingStore.storePendingIndexes([{ secret, index: highestIndex }], txHash); + await taggingStore.storePendingIndexes([{ secret, index: highestIndex }], txHash, jobId); } } diff --git a/yarn-project/txe/src/constants.ts b/yarn-project/txe/src/constants.ts index 24230b9217a4..906d44db1f6f 100644 --- a/yarn-project/txe/src/constants.ts +++ b/yarn-project/txe/src/constants.ts @@ -1,3 +1,6 @@ import { AztecAddress } from '@aztec/stdlib/aztec-address'; export const DEFAULT_ADDRESS = AztecAddress.fromNumber(42); + +/** Job ID for TXE operations. TXE runs tests in isolation, so a constant job ID is sufficient. */ +export const TXE_JOB_ID = 'txe-job'; diff --git a/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts b/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts index 3d2cf25889b7..9793e262ecdd 100644 --- a/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts +++ b/yarn-project/txe/src/oracle/txe_oracle_top_level_context.ts @@ -80,7 +80,7 @@ import { import type { UInt64 } from '@aztec/stdlib/types'; import { ForkCheckpoint } from '@aztec/world-state'; -import { DEFAULT_ADDRESS } from '../constants.js'; +import { DEFAULT_ADDRESS, TXE_JOB_ID } from '../constants.js'; import type { TXEStateMachine } from '../state_machine/index.js'; import type { TXEAccountStore } from '../util/txe_account_store.js'; import type { TXEContractStore } from '../util/txe_contract_store.js'; @@ -344,6 +344,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + TXE_JOB_ID, 0, 1, undefined, // log @@ -681,6 +682,7 @@ export class TXEOracleTopLevelContext implements IMiscOracle, ITxeExecutionOracl this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + TXE_JOB_ID, ); const acirExecutionResult = await new WASMSimulator() .executeUserCircuit(toACVMWitness(0, call.args), entryPointArtifact, new Oracle(oracle).toACIRCallback()) diff --git a/yarn-project/txe/src/txe_session.ts b/yarn-project/txe/src/txe_session.ts index 2a4cae464ae5..02149f8c6f18 100644 --- a/yarn-project/txe/src/txe_session.ts +++ b/yarn-project/txe/src/txe_session.ts @@ -43,7 +43,7 @@ import { CallContext, GlobalVariables, TxContext } from '@aztec/stdlib/tx'; import { z } from 'zod'; -import { DEFAULT_ADDRESS } from './constants.js'; +import { DEFAULT_ADDRESS, TXE_JOB_ID } from './constants.js'; import type { IAvmExecutionOracle, ITxeExecutionOracle } from './oracle/interfaces.js'; import { TXEOraclePublicContext } from './oracle/txe_oracle_public_context.js'; import { TXEOracleTopLevelContext } from './oracle/txe_oracle_top_level_context.js'; @@ -294,6 +294,7 @@ export class TXESession implements TXESessionStateHandler { this.noteStore, this.stateMachine.node, this.stateMachine.anchorBlockStore, + TXE_JOB_ID, ).syncNoteNullifiers(contractAddress); // Private execution has two associated block numbers: the anchor block (i.e. the historical block that is used to @@ -337,6 +338,7 @@ export class TXESession implements TXESessionStateHandler { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + TXE_JOB_ID, ); // We store the note and tagging index caches fed into the PrivateExecutionOracle (along with some other auxiliary @@ -386,6 +388,7 @@ export class TXESession implements TXESessionStateHandler { this.noteStore, this.stateMachine.node, this.stateMachine.anchorBlockStore, + TXE_JOB_ID, ).syncNoteNullifiers(contractAddress); const anchorBlockHeader = await this.stateMachine.anchorBlockStore.getBlockHeader(); @@ -405,6 +408,7 @@ export class TXESession implements TXESessionStateHandler { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + TXE_JOB_ID, ); this.state = { name: 'UTILITY' }; @@ -499,6 +503,7 @@ export class TXESession implements TXESessionStateHandler { this.senderAddressBookStore, this.capsuleStore, this.privateEventStore, + TXE_JOB_ID, ); await new WASMSimulator() .executeUserCircuit(toACVMWitness(0, call.args), entryPointArtifact, new Oracle(oracle).toACIRCallback())