From ceabd1c1f2a0d2c1bef75e6a1a300839ed34ce5d Mon Sep 17 00:00:00 2001 From: benesjan <13470840+benesjan@users.noreply.github.com> Date: Fri, 2 Jan 2026 18:46:06 +0000 Subject: [PATCH] refactor!: expanding return value of getLogsByTags Fixes F-231 Fixes #9789 In this PR I modify the the return value of our "get logs by tags" endpoints such that we don't need to perform another request for tx hash in PXE's log service. Since I was already modifying it I also dropped unused values from the type and made it a bit less messy. I added here migration notes for all the changes I did to the endpoint not only in this PR but also in PRs down the stack. --- .../docs/resources/migration_notes.md | 40 ++++--- .../src/archiver/archiver_store_test_suite.ts | 40 ++----- .../archiver/kv_archiver_store/log_store.ts | 42 +++---- .../e2e_token_bridge_tutorial_test.test.ts | 2 +- yarn-project/pxe/src/logs/log_service.test.ts | 110 +---------------- yarn-project/pxe/src/logs/log_service.ts | 111 +++++------------- ...ate_logs_for_sender_recipient_pair.test.ts | 47 +++----- .../utils/find_highest_indexes.test.ts | 16 +-- .../utils/load_logs_for_range.test.ts | 97 ++++++--------- .../sync/sync_sender_tagging_indexes.test.ts | 6 +- ...load_and_store_new_tagging_indexes.test.ts | 7 +- .../stdlib/src/interfaces/archiver.test.ts | 9 +- .../stdlib/src/interfaces/aztec-node.test.ts | 9 +- .../src/interfaces/get_logs_response.test.ts | 5 +- yarn-project/stdlib/src/logs/index.ts | 1 - .../stdlib/src/logs/log_with_tx_data.ts | 46 -------- .../stdlib/src/logs/tx_scoped_l2_log.ts | 90 ++++++-------- yarn-project/stdlib/src/tests/factories.ts | 45 +++++++ yarn-project/stdlib/src/tx/tx_effect.ts | 4 +- 19 files changed, 248 insertions(+), 479 deletions(-) delete mode 100644 yarn-project/stdlib/src/logs/log_with_tx_data.ts diff --git a/docs/docs-developers/docs/resources/migration_notes.md b/docs/docs-developers/docs/resources/migration_notes.md index 68d285815afe..bb104d239e64 100644 --- a/docs/docs-developers/docs/resources/migration_notes.md +++ b/docs/docs-developers/docs/resources/migration_notes.md @@ -9,26 +9,34 @@ Aztec is in full-speed development. Literally every version breaks compatibility ## TBD +### [Aztec Node] changes to `getLogsByTags` endpoint + +`getLogsByTags` endpoint has been optimized for our new log sync algorithm and these are the changes: + +- The `logsPerTag` pagination argument has been removed. Pagination was unnecessary here, since multiple logs per tag typically only occur if several devices are sending logs from the same sender to a recipient, which is unlikely to generate enough logs to require pagination. +- The structure of `TxScopedL2Log` has been revised to meet the requirements of our new log sync algorithm. +- The endpoint has been separated into two versions: `getPrivateLogsByTags` and `getPublicLogsByTagsFromContract`. This change was made because it was never desirable in PXE to mix public and private logs. The public version requires both a `Tag` and a contract address as input. In contrast to the private version—which uses `SiloedTag` (a tag that hashes the raw tag with the emitting contract's address)—the public version uses the raw `Tag` type, since kernels do not hash the tag with the contract address for public logs. + ### [AVM] Gas cost multipliers for public execution to reach simulation/proving parity Gas costs for several AVM opcodes have been adjusted with multipliers to better align public simulation costs with actual proving costs. -| Opcode | Multiplier | Previous Cost | New Cost | -|--------|------------|---------------|----------| -| FDIV | 25x | 9 | 225 | -| SLOAD | 10x | 129 | 1,290 | -| SSTORE | 20x | 1,657 | 33,140 | -| NOTEHASHEXISTS | 4x | 126 | 504 | -| EMITNOTEHASH | 15x | 1,285 | 19,275 | -| NULLIFIEREXISTS | 7x | 132 | 924 | -| EMITNULLIFIER | 20x | 1,540 | 30,800 | -| L1TOL2MSGEXISTS | 5x | 108 | 540 | -| SENDL2TOL1MSG | 2x | 209 | 418 | -| CALL | 3x | 3,312 | 9,936 | -| STATICCALL | 3x | 3,312 | 9,936 | -| GETCONTRACTINSTANCE | 4x | 1,527 | 6,108 | -| POSEIDON2 | 15x | 24 | 360 | -| ECADD | 10x | 27 | 270 | +| Opcode | Multiplier | Previous Cost | New Cost | +| ------------------- | ---------- | ------------- | -------- | +| FDIV | 25x | 9 | 225 | +| SLOAD | 10x | 129 | 1,290 | +| SSTORE | 20x | 1,657 | 33,140 | +| NOTEHASHEXISTS | 4x | 126 | 504 | +| EMITNOTEHASH | 15x | 1,285 | 19,275 | +| NULLIFIEREXISTS | 7x | 132 | 924 | +| EMITNULLIFIER | 20x | 1,540 | 30,800 | +| L1TOL2MSGEXISTS | 5x | 108 | 540 | +| SENDL2TOL1MSG | 2x | 209 | 418 | +| CALL | 3x | 3,312 | 9,936 | +| STATICCALL | 3x | 3,312 | 9,936 | +| GETCONTRACTINSTANCE | 4x | 1,527 | 6,108 | +| POSEIDON2 | 15x | 24 | 360 | +| ECADD | 10x | 27 | 270 | **Impact**: Contracts with public bytecode performing any of these operations will see increased gas consumption. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 0cd292f299ef..48732c8f07d4 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -2276,17 +2276,13 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 2, - blockHash: L2BlockHash.fromField(await logsCheckpoints[2 - 1].checkpoint.blocks[0].header.hash()), - log: makePrivateLog(tags[0]), - isFromPublic: false, + logData: makePrivateLog(tags[0]).getEmittedFields(), }), ], [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePrivateLog(tags[1]), - isFromPublic: false, + logData: makePrivateLog(tags[1]).getEmittedFields(), }), ], ]); @@ -2312,15 +2308,11 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePrivateLog(tags[0]), - isFromPublic: false, + logData: makePrivateLog(tags[0]).getEmittedFields(), }), expect.objectContaining({ blockNumber: newBlockNumber, - blockHash: L2BlockHash.fromField(await newCheckpoint.checkpoint.blocks[0].header.hash()), - log: newLog, - isFromPublic: false, + logData: newLog.getEmittedFields(), }), ], ]); @@ -2338,9 +2330,7 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePrivateLog(tags[1]), - isFromPublic: false, + logData: makePrivateLog(tags[1]).getEmittedFields(), }), ], ]); @@ -2425,17 +2415,13 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 2, - blockHash: L2BlockHash.fromField(await logsCheckpoints[2 - 1].checkpoint.blocks[0].header.hash()), - log: makePublicLog(tags[0]), - isFromPublic: true, + logData: makePublicLog(tags[0]).getEmittedFields(), }), ], [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePublicLog(tags[1]), - isFromPublic: true, + logData: makePublicLog(tags[1]).getEmittedFields(), }), ], ]); @@ -2461,15 +2447,11 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePublicLog(tags[0]), - isFromPublic: true, + logData: makePublicLog(tags[0]).getEmittedFields(), }), expect.objectContaining({ blockNumber: newBlockNumber, - blockHash: L2BlockHash.fromField(await newCheckpoint.checkpoint.blocks[0].header.hash()), - log: newLog, - isFromPublic: true, + logData: newLog.getEmittedFields(), }), ], ]); @@ -2487,9 +2469,7 @@ export function describeArchiverDataStore( [ expect.objectContaining({ blockNumber: 1, - blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()), - log: makePublicLog(tags[1]), - isFromPublic: true, + logData: makePublicLog(tags[1]).getEmittedFields(), }), ], ]); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index 7b1b8b306738..aec5c3362807 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -1,4 +1,4 @@ -import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/constants'; +import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { createLogger } from '@aztec/foundation/log'; @@ -57,21 +57,16 @@ export class LogStore { * @param block - The L2 block to extract logs from. * @returns An object containing the private and public tagged logs for the block. */ - async #extractTaggedLogsFromBlock(block: L2BlockNew) { - const blockHash = L2BlockHash.fromField(await block.hash()); + #extractTaggedLogsFromBlock(block: L2BlockNew) { // SiloedTag (as string) -> array of log buffers. const privateTaggedLogs = new Map(); // "{contractAddress}_{tag}" (as string) -> array of log buffers. const publicTaggedLogs = new Map(); - const dataStartIndexForBlock = - block.header.state.partial.noteHashTree.nextAvailableLeafIndex - - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX; - block.body.txEffects.forEach((txEffect, txIndex) => { + block.body.txEffects.forEach(txEffect => { const txHash = txEffect.txHash; - const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; - txEffect.privateLogs.forEach((log, logIndex) => { + txEffect.privateLogs.forEach(log => { // Private logs use SiloedTag (already siloed by kernel) const tag = log.fields[0]; this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`); @@ -80,18 +75,17 @@ export class LogStore { currentLogs.push( new TxScopedL2Log( txHash, - dataStartIndexForTx, - logIndex, block.number, - blockHash, block.timestamp, - log, + log.getEmittedFields(), + txEffect.noteHashes, + txEffect.nullifiers[0], ).toBuffer(), ); privateTaggedLogs.set(tag.toString(), currentLogs); }); - txEffect.publicLogs.forEach((log, logIndex) => { + txEffect.publicLogs.forEach(log => { // Public logs use Tag directly (not siloed) and are stored with contract address const tag = log.fields[0]; const contractAddress = log.contractAddress; @@ -104,12 +98,11 @@ export class LogStore { currentLogs.push( new TxScopedL2Log( txHash, - dataStartIndexForTx, - logIndex, block.number, - blockHash, block.timestamp, - log, + log.getEmittedFields(), + txEffect.noteHashes, + txEffect.nullifiers[0], ).toBuffer(), ); publicTaggedLogs.set(key, currentLogs); @@ -125,10 +118,11 @@ export class LogStore { * @returns A map from tag (as string) to an array of serialized private logs belonging to that tag, and a map from * "{contractAddress}_{tag}" (as string) to an array of serialized public logs belonging to that key. */ - async #extractTaggedLogs( - blocks: L2BlockNew[], - ): Promise<{ privateTaggedLogs: Map; publicTaggedLogs: Map }> { - const taggedLogsInBlocks = await Promise.all(blocks.map(block => this.#extractTaggedLogsFromBlock(block))); + #extractTaggedLogs(blocks: L2BlockNew[]): { + privateTaggedLogs: Map; + publicTaggedLogs: Map; + } { + const taggedLogsInBlocks = blocks.map(block => this.#extractTaggedLogsFromBlock(block)); // Now we merge the maps from each block into a single map. const privateTaggedLogs = taggedLogsInBlocks.reduce((acc, { privateTaggedLogs }) => { @@ -155,8 +149,8 @@ export class LogStore { * @param blocks - The blocks for which to add the logs. * @returns True if the operation is successful. */ - async addLogs(blocks: L2BlockNew[]): Promise { - const { privateTaggedLogs, publicTaggedLogs } = await this.#extractTaggedLogs(blocks); + addLogs(blocks: L2BlockNew[]): Promise { + const { privateTaggedLogs, publicTaggedLogs } = this.#extractTaggedLogs(blocks); const keysOfPrivateLogsToUpdate = Array.from(privateTaggedLogs.keys()); const keysOfPublicLogsToUpdate = Array.from(publicTaggedLogs.keys()); diff --git a/yarn-project/end-to-end/src/composed/e2e_token_bridge_tutorial_test.test.ts b/yarn-project/end-to-end/src/composed/e2e_token_bridge_tutorial_test.test.ts index f905590e8166..587f6c0a7fff 100644 --- a/yarn-project/end-to-end/src/composed/e2e_token_bridge_tutorial_test.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_token_bridge_tutorial_test.test.ts @@ -229,5 +229,5 @@ describe('e2e_cross_chain_messaging token_bridge_tutorial_test', () => { logger.info(`New L1 balance of ${ownerEthAddress} is ${newL1Balance}`); // docs:end:l1-withdraw expect(newL1Balance).toBe(withdrawAmount); - }, 90000); + }, 110_000); }); diff --git a/yarn-project/pxe/src/logs/log_service.test.ts b/yarn-project/pxe/src/logs/log_service.test.ts index 2443333b0e0e..b9a80928ef3f 100644 --- a/yarn-project/pxe/src/logs/log_service.test.ts +++ b/yarn-project/pxe/src/logs/log_service.test.ts @@ -1,13 +1,10 @@ -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { randomInt } from '@aztec/foundation/crypto/random'; import { Fr } from '@aztec/foundation/curves/bn254'; import { KeyStore } from '@aztec/key-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { PublicLog, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; -import { TxHash, randomIndexedTxEffect } from '@aztec/stdlib/tx'; +import { Tag } from '@aztec/stdlib/logs'; +import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -70,18 +67,12 @@ describe('LogService', () => { }); it('returns a public log if one is found', async () => { - const scopedLog = await TxScopedL2Log.random(true); - (scopedLog.log as PublicLog).contractAddress = contractAddress; + const scopedLog = randomTxScopedPrivateL2Log(); aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[scopedLog]]); aztecNode.getPrivateLogsByTags.mockResolvedValue([[]]); - const indexedTxEffect = await randomIndexedTxEffect(); - aztecNode.getTxEffect.mockImplementation((txHash: TxHash) => - txHash.equals(scopedLog.txHash) ? Promise.resolve(indexedTxEffect) : Promise.resolve(undefined), - ); - - const request = new LogRetrievalRequest(contractAddress, new Tag(scopedLog.log.fields[0])); + const request = new LogRetrievalRequest(contractAddress, new Tag(scopedLog.logData[0])); const responses = await logService.bulkRetrieveLogs([request]); @@ -90,18 +81,12 @@ describe('LogService', () => { }); it('returns a private log if one is found', async () => { - const scopedLog = await TxScopedL2Log.random(false); + const scopedLog = randomTxScopedPrivateL2Log(); aztecNode.getPrivateLogsByTags.mockResolvedValue([[scopedLog]]); aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[]]); - const indexedTxEffect = await randomIndexedTxEffect(); - aztecNode.getTxEffect.mockResolvedValue(indexedTxEffect); - aztecNode.getTxEffect.mockImplementation((txHash: TxHash) => - txHash.equals(scopedLog.txHash) ? Promise.resolve(indexedTxEffect) : Promise.resolve(undefined), - ); - - const request = new LogRetrievalRequest(contractAddress, new Tag(scopedLog.log.fields[0])); + const request = new LogRetrievalRequest(contractAddress, new Tag(scopedLog.logData[0])); const responses = await logService.bulkRetrieveLogs([request]); @@ -109,87 +94,4 @@ describe('LogService', () => { expect(responses[0]).not.toBeNull(); }); }); - - describe('getPublicLogByTag', () => { - const tag = new Tag(Fr.random()); - - beforeEach(() => { - aztecNode.getPublicLogsByTagsFromContract.mockReset(); - aztecNode.getTxEffect.mockReset(); - }); - - it('returns null if no logs found for tag', async () => { - aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[]]); - - const result = await logService.getPublicLogByTag(tag, contractAddress); - expect(result).toBeNull(); - }); - - it('returns log data when single log found', async () => { - const scopedLog = await TxScopedL2Log.random(true); - const logContractAddress = (scopedLog.log as PublicLog).contractAddress; - - aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[scopedLog]]); - const indexedTxEffect = await randomIndexedTxEffect(); - aztecNode.getTxEffect.mockImplementation((txHash: TxHash) => - txHash.equals(scopedLog.txHash) ? Promise.resolve(indexedTxEffect) : Promise.resolve(undefined), - ); - - const result = (await logService.getPublicLogByTag(tag, logContractAddress))!; - - expect(result.logPayload).toEqual(scopedLog.log.getEmittedFieldsWithoutTag()); - expect(result.uniqueNoteHashesInTx).toEqual(indexedTxEffect.data.noteHashes); - expect(result.txHash).toEqual(scopedLog.txHash); - expect(result.firstNullifierInTx).toEqual(indexedTxEffect.data.nullifiers[0]); - - expect(aztecNode.getPublicLogsByTagsFromContract).toHaveBeenCalledWith(logContractAddress, [tag]); - expect(aztecNode.getTxEffect).toHaveBeenCalledWith(scopedLog.txHash); - }); - - it('throws if multiple logs found for tag', async () => { - const scopedLog = await TxScopedL2Log.random(true); - aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[scopedLog, scopedLog]]); - const logContractAddress = (scopedLog.log as PublicLog).contractAddress; - - await expect(logService.getPublicLogByTag(tag, logContractAddress)).rejects.toThrow(/Got 2 logs for tag/); - }); - - it('throws if tx effect not found', async () => { - const scopedLog = await TxScopedL2Log.random(true); - aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[scopedLog]]); - aztecNode.getTxEffect.mockResolvedValue(undefined); - const logContractAddress = (scopedLog.log as PublicLog).contractAddress; - - await expect(logService.getPublicLogByTag(tag, logContractAddress)).rejects.toThrow( - /failed to retrieve tx effects/, - ); - }); - - it('returns log fields that are actually emitted', async () => { - const logContractAddress = await AztecAddress.random(); - const logPlaintext = [Fr.random()]; - const logContent = [tag.value, ...logPlaintext]; - - const log = PublicLog.from({ - contractAddress: logContractAddress, - fields: logContent, - }); - const scopedLogWithPadding = new TxScopedL2Log( - TxHash.random(), - randomInt(100), - randomInt(100), - BlockNumber(randomInt(100)), - L2BlockHash.random(), - 0n, - log, - ); - - aztecNode.getPublicLogsByTagsFromContract.mockResolvedValue([[scopedLogWithPadding]]); - aztecNode.getTxEffect.mockResolvedValue(await randomIndexedTxEffect()); - - const result = await logService.getPublicLogByTag(tag, logContractAddress); - - expect(result?.logPayload).toEqual(logPlaintext); - }); - }); }); diff --git a/yarn-project/pxe/src/logs/log_service.ts b/yarn-project/pxe/src/logs/log_service.ts index b23d610622d6..32683b227e15 100644 --- a/yarn-project/pxe/src/logs/log_service.ts +++ b/yarn-project/pxe/src/logs/log_service.ts @@ -4,15 +4,7 @@ import type { KeyStore } from '@aztec/key-store'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { CompleteAddress } from '@aztec/stdlib/contract'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { - DirectionalAppTaggingSecret, - PendingTaggedLog, - PrivateLogWithTxData, - PublicLogWithTxData, - SiloedTag, - Tag, - TxScopedL2Log, -} from '@aztec/stdlib/logs'; +import { DirectionalAppTaggingSecret, PendingTaggedLog, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; import type { LogRetrievalRequest } from '../contract_function_simulator/noir-structs/log_retrieval_request.js'; import { LogRetrievalResponse } from '../contract_function_simulator/noir-structs/log_retrieval_response.js'; @@ -39,41 +31,23 @@ export class LogService { public async bulkRetrieveLogs(logRetrievalRequests: LogRetrievalRequest[]): Promise<(LogRetrievalResponse | null)[]> { return await Promise.all( logRetrievalRequests.map(async request => { - // TODO(F-231): remove these internal functions and have node endpoints that do this instead const [publicLog, privateLog] = await Promise.all([ - this.getPublicLogByTag(request.tag, request.contractAddress), - this.getPrivateLogByTag(await SiloedTag.compute(request.tag, request.contractAddress)), + this.#getPublicLogByTag(request.tag, request.contractAddress), + this.#getPrivateLogByTag(await SiloedTag.compute(request.tag, request.contractAddress)), ]); - if (publicLog !== null) { - if (privateLog !== null) { - throw new Error( - `Found both a public and private log when searching for tag ${request.tag} from contract ${request.contractAddress}`, - ); - } - - return new LogRetrievalResponse( - publicLog.logPayload, - publicLog.txHash, - publicLog.uniqueNoteHashesInTx, - publicLog.firstNullifierInTx, + if (publicLog !== null && privateLog !== null) { + throw new Error( + `Found both a public and private log when searching for tag ${request.tag} from contract ${request.contractAddress}`, ); - } else if (privateLog !== null) { - return new LogRetrievalResponse( - privateLog.logPayload, - privateLog.txHash, - privateLog.uniqueNoteHashesInTx, - privateLog.firstNullifierInTx, - ); - } else { - return null; } + + return publicLog ?? privateLog; }), ); } - // TODO(F-231): delete this function and implement this behavior in the node instead - public async getPublicLogByTag(tag: Tag, contractAddress: AztecAddress): Promise { + async #getPublicLogByTag(tag: Tag, contractAddress: AztecAddress): Promise { const logs = await this.aztecNode.getPublicLogsByTagsFromContract(contractAddress, [tag]); const logsForTag = logs[0]; @@ -88,24 +62,15 @@ export class LogService { const scopedLog = logsForTag[0]; - // getLogsByTag doesn't have all of the information that we need (notably note hashes and the first nullifier), so - // we need to make a second call to the node for `getTxEffect`. - // TODO(#9789): bundle this information in the `getLogsByTag` call. - const txEffect = await this.aztecNode.getTxEffect(scopedLog.txHash); - if (txEffect == undefined) { - throw new Error(`Unexpected: failed to retrieve tx effects for tx ${scopedLog.txHash} which is known to exist`); - } - - return new PublicLogWithTxData( - scopedLog.log.getEmittedFieldsWithoutTag(), + return new LogRetrievalResponse( + scopedLog.logData.slice(1), // Skip the tag scopedLog.txHash, - txEffect.data.noteHashes, - txEffect.data.nullifiers[0], + scopedLog.noteHashes, + scopedLog.firstNullifier, ); } - // TODO(F-231): delete this function and implement this behavior in the node instead - public async getPrivateLogByTag(siloedTag: SiloedTag): Promise { + async #getPrivateLogByTag(siloedTag: SiloedTag): Promise { const logs = await this.aztecNode.getPrivateLogsByTags([siloedTag]); const logsForTag = logs[0]; @@ -120,19 +85,11 @@ export class LogService { const scopedLog = logsForTag[0]; - // getLogsByTag doesn't have all of the information that we need (notably note hashes and the first nullifier), so - // we need to make a second call to the node for `getTxEffect`. - // TODO(#9789): bundle this information in the `getLogsByTag` call. - const txEffect = await this.aztecNode.getTxEffect(scopedLog.txHash); - if (txEffect == undefined) { - throw new Error(`Unexpected: failed to retrieve tx effects for tx ${scopedLog.txHash} which is known to exist`); - } - - return new PrivateLogWithTxData( - scopedLog.log.getEmittedFieldsWithoutTag(), + return new LogRetrievalResponse( + scopedLog.logData.slice(1), // Skip the tag scopedLog.txHash, - txEffect.data.noteHashes, - txEffect.data.nullifiers[0], + scopedLog.noteHashes, + scopedLog.firstNullifier, ); } @@ -209,32 +166,24 @@ export class LogService { ); } - async #storePendingTaggedLogs( + #storePendingTaggedLogs( contractAddress: AztecAddress, capsuleArrayBaseSlot: Fr, recipient: AztecAddress, privateLogs: TxScopedL2Log[], ) { - // Build all pending tagged logs upfront with their tx effects - const pendingTaggedLogs = await Promise.all( - privateLogs.map(async scopedLog => { - // TODO(#9789): get these effects along with the log - const txEffect = await this.aztecNode.getTxEffect(scopedLog.txHash); - if (!txEffect) { - throw new Error(`Could not find tx effect for tx hash ${scopedLog.txHash}`); - } - - const pendingTaggedLog = new PendingTaggedLog( - scopedLog.log.fields, - scopedLog.txHash, - txEffect.data.noteHashes, - txEffect.data.nullifiers[0], - recipient, - ); + // Build all pending tagged logs from the scoped logs + const pendingTaggedLogs = privateLogs.map(scopedLog => { + const pendingTaggedLog = new PendingTaggedLog( + scopedLog.logData, + scopedLog.txHash, + scopedLog.noteHashes, + scopedLog.firstNullifier, + recipient, + ); - return pendingTaggedLog.toFields(); - }), - ); + return pendingTaggedLog.toFields(); + }); // TODO: This looks like it could belong more at the oracle interface level return this.capsuleDataProvider.appendToCapsuleArray(contractAddress, capsuleArrayBaseSlot, pendingTaggedLogs); diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts index dec3c567d3d3..6ca7fba0d38d 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts @@ -3,11 +3,9 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, PrivateLog, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; -import { makeBlockHeader } from '@aztec/stdlib/testing'; -import { TxHash } from '@aztec/stdlib/tx'; +import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; +import { makeBlockHeader, randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -33,17 +31,8 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { return SiloedTag.compute(tag, app); } - // Move blockTimestamp before tag in args in makeLog - function makeLog(blockHash: Fr, blockNumber: number, blockTimestamp: bigint, tag: Fr) { - return new TxScopedL2Log( - TxHash.random(), - 0, - 0, - BlockNumber(blockNumber), - L2BlockHash.fromField(blockHash), - blockTimestamp, - PrivateLog.random(tag), - ); + function makeLog(blockNumber: number, blockTimestamp: bigint, tag: Fr) { + return randomTxScopedPrivateL2Log({ blockNumber, blockTimestamp, tag }); } beforeAll(async () => { @@ -90,7 +79,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logBlockTimestamp = currentTimestamp - 5000n; // not aged const logIndex = 5; const logTag = await computeSiloedTagForIndex(logIndex); - const logBlockHeader = makeBlockHeader(0, { timestamp: logBlockTimestamp }); aztecNode.getL2Tips.mockResolvedValue({ finalized: { number: BlockNumber(finalizedBlockNumber) }, @@ -100,11 +88,9 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { // The log is finalized aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { - return Promise.all( - tags.map(async (t: SiloedTag) => - t.equals(logTag) - ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logBlockTimestamp, logTag.value)] - : [], + return Promise.resolve( + tags.map((t: SiloedTag) => + t.equals(logTag) ? [makeLog(finalizedBlockNumber, logBlockTimestamp, logTag.value)] : [], ), ); }); @@ -128,7 +114,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const logBlockTimestamp = currentTimestamp - BigInt(MAX_INCLUDE_BY_TIMESTAMP_DURATION) - 1000n; // aged const logIndex = 7; const logTag = await computeSiloedTagForIndex(logIndex); - const logBlockHeader = makeBlockHeader(0, { timestamp: logBlockTimestamp }); aztecNode.getL2Tips.mockResolvedValue({ finalized: { number: BlockNumber(finalizedBlockNumber) }, @@ -138,11 +123,9 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { // The log is finalized aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { - return Promise.all( - tags.map(async (t: SiloedTag) => - t.equals(logTag) - ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logBlockTimestamp, logTag.value)] - : [], + return Promise.resolve( + tags.map((t: SiloedTag) => + t.equals(logTag) ? [makeLog(finalizedBlockNumber, logBlockTimestamp, logTag.value)] : [], ), ); }); @@ -171,8 +154,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { const log2Index = highestFinalizedIndex + UNFINALIZED_TAGGING_INDEXES_WINDOW_LEN; // At the window boundary const log1Tag = await computeSiloedTagForIndex(log1Index); const log2Tag = await computeSiloedTagForIndex(log2Index); - const log1BlockHeader = makeBlockHeader(0, { timestamp: log1BlockTimestamp }); - const log2BlockHeader = makeBlockHeader(1, { timestamp: log2BlockTimestamp }); // Set existing highest aged index and highest finalized index await taggingDataProvider.updateHighestAgedIndex(secret, highestAgedIndex); @@ -189,12 +170,12 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { numQueriedTags += tags.length; - return Promise.all( - tags.map(async (t: SiloedTag) => { + return Promise.resolve( + tags.map((t: SiloedTag) => { if (t.equals(log1Tag)) { - return [makeLog(await log1BlockHeader.hash(), finalizedBlockNumber, log1BlockTimestamp, log1Tag.value)]; + return [makeLog(finalizedBlockNumber, log1BlockTimestamp, log1Tag.value)]; } else if (t.equals(log2Tag)) { - return [makeLog(await log2BlockHeader.hash(), finalizedBlockNumber, log2BlockTimestamp, log2Tag.value)]; + return [makeLog(finalizedBlockNumber, log2BlockTimestamp, log2Tag.value)]; } return []; }), diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts index 6a5239916cf8..73adcd51f51d 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts @@ -1,8 +1,6 @@ import { MAX_INCLUDE_BY_TIMESTAMP_DURATION } from '@aztec/constants'; -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { L2BlockHash } from '@aztec/stdlib/block'; -import { PrivateLog, TxScopedL2Log } from '@aztec/stdlib/logs'; -import { TxHash } from '@aztec/stdlib/tx'; +import { TxScopedL2Log } from '@aztec/stdlib/logs'; +import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { findHighestIndexes } from './find_highest_indexes.js'; @@ -10,15 +8,7 @@ describe('findHighestIndexes', () => { const currentTimestamp = BigInt(Math.floor(Date.now() / 1000)); function makeLog(blockNumber: number, blockTimestamp: bigint): TxScopedL2Log { - return new TxScopedL2Log( - TxHash.random(), - 0, - 0, - BlockNumber(blockNumber), - L2BlockHash.random(), - blockTimestamp, - PrivateLog.random(), - ); + return randomTxScopedPrivateL2Log({ blockNumber, blockTimestamp }); } it('returns undefined for highestAgedIndex when no logs are at least 24 hours old', () => { diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts index 1165c2c82eea..42ef7f523f88 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts @@ -1,10 +1,9 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, PrivateLog, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; -import { makeBlockHeader } from '@aztec/stdlib/testing'; +import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; +import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -27,16 +26,8 @@ describe('loadLogsForRange', () => { return SiloedTag.compute(tag, app); } - function makeLog(txHash: TxHash, blockHash: Fr, blockNumber: number, blockTimestamp: bigint, tag: SiloedTag) { - return new TxScopedL2Log( - txHash, - 0, - 0, - BlockNumber(blockNumber), - L2BlockHash.fromField(blockHash), - blockTimestamp, - PrivateLog.random(tag.value), - ); + function makeLog(txHash: TxHash, blockNumber: number, blockTimestamp: bigint, tag: SiloedTag) { + return randomTxScopedPrivateL2Log({ txHash, blockNumber, blockTimestamp, tag: tag.value }); } beforeAll(async () => { @@ -69,16 +60,14 @@ describe('loadLogsForRange', () => { const timestamp2 = 2000n; const tag1 = await computeSiloedTagForIndex(index1); const tag2 = await computeSiloedTagForIndex(index2); - const blockHeader1 = makeBlockHeader(0, { timestamp: timestamp1 }); - const blockHeader2 = makeBlockHeader(1, { timestamp: timestamp2 }); aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { return Promise.all( - tags.map(async (t: SiloedTag) => { + tags.map((t: SiloedTag) => { if (t.equals(tag1)) { - return [makeLog(txHash1, await blockHeader1.hash(), blockNumber1, timestamp1, tag1)]; + return [makeLog(txHash1, blockNumber1, timestamp1, tag1)]; } else if (t.equals(tag2)) { - return [makeLog(txHash2, await blockHeader2.hash(), blockNumber2, timestamp2, tag2)]; + return [makeLog(txHash2, blockNumber2, timestamp2, tag2)]; } return []; }), @@ -106,17 +95,12 @@ describe('loadLogsForRange', () => { const timestamp1 = 1000n; const timestamp2 = 2000n; const tag = await computeSiloedTagForIndex(index); - const blockHeader1 = makeBlockHeader(0, { timestamp: timestamp1 }); - const blockHeader2 = makeBlockHeader(1, { timestamp: timestamp2 }); aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { - return Promise.all( - tags.map(async (t: SiloedTag) => + return Promise.resolve( + tags.map((t: SiloedTag) => t.equals(tag) - ? [ - makeLog(txHash1, await blockHeader1.hash(), blockNumber1, timestamp1, tag), - makeLog(txHash2, await blockHeader2.hash(), blockNumber2, timestamp2, tag), - ] + ? [makeLog(txHash1, blockNumber1, timestamp1, tag), makeLog(txHash2, blockNumber2, timestamp2, tag)] : [], ), ); @@ -141,18 +125,18 @@ describe('loadLogsForRange', () => { const timestamp = 1000n; const tag1 = await computeSiloedTagForIndex(index1); const tag2 = await computeSiloedTagForIndex(index2); - const blockHeader = makeBlockHeader(0, { timestamp }); - - aztecNode.getPrivateLogsByTags.mockImplementation(async (tags: SiloedTag[]) => { - const blockHash = await blockHeader.hash(); - return tags.map((t: SiloedTag) => { - if (t.equals(tag1)) { - return [makeLog(txHash1, blockHash, blockNumber, timestamp, tag1)]; - } else if (t.equals(tag2)) { - return [makeLog(txHash2, blockHash, blockNumber, timestamp, tag2)]; - } - return []; - }); + + aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { + return Promise.resolve( + tags.map((t: SiloedTag) => { + if (t.equals(tag1)) { + return [makeLog(txHash1, blockNumber, timestamp, tag1)]; + } else if (t.equals(tag2)) { + return [makeLog(txHash2, blockNumber, timestamp, tag2)]; + } + return []; + }), + ); }); const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); @@ -175,18 +159,18 @@ describe('loadLogsForRange', () => { const timestamp = 1000n; const tagAtStart = await computeSiloedTagForIndex(start); const tagAtEnd = await computeSiloedTagForIndex(end); - const blockHeader = makeBlockHeader(0, { timestamp }); - - aztecNode.getPrivateLogsByTags.mockImplementation(async (tags: SiloedTag[]) => { - const blockHash = await blockHeader.hash(); - return tags.map((t: SiloedTag) => { - if (t.equals(tagAtStart)) { - return [makeLog(txHashAtStart, blockHash, 5, timestamp, tagAtStart)]; - } else if (t.equals(tagAtEnd)) { - return [makeLog(txHashAtEnd, blockHash, 6, timestamp, tagAtEnd)]; - } - return []; - }); + + aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { + return Promise.resolve( + tags.map((t: SiloedTag) => { + if (t.equals(tagAtStart)) { + return [makeLog(txHashAtStart, 5, timestamp, tagAtStart)]; + } else if (t.equals(tagAtEnd)) { + return [makeLog(txHashAtEnd, 6, timestamp, tagAtEnd)]; + } + return []; + }), + ); }); const result = await loadLogsForRange(secret, app, aztecNode, start, end, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); @@ -203,18 +187,15 @@ describe('loadLogsForRange', () => { const index = 3; const timestamp = 1000n; const tag = await computeSiloedTagForIndex(index); - const blockHeaderBefore = makeBlockHeader(0, { timestamp }); - const blockHeaderAtAnchor = makeBlockHeader(1, { timestamp }); - const blockHeaderAfter = makeBlockHeader(2, { timestamp }); aztecNode.getPrivateLogsByTags.mockImplementation((tags: SiloedTag[]) => { - return Promise.all( - tags.map(async (t: SiloedTag) => + return Promise.resolve( + tags.map((t: SiloedTag) => t.equals(tag) ? [ - makeLog(TxHash.random(), await blockHeaderBefore.hash(), anchorBlockNumber - 1, timestamp, tag), - makeLog(TxHash.random(), await blockHeaderAtAnchor.hash(), anchorBlockNumber, timestamp, tag), - makeLog(TxHash.random(), await blockHeaderAfter.hash(), anchorBlockNumber + 1, timestamp, tag), + makeLog(TxHash.random(), anchorBlockNumber - 1, timestamp, tag), + makeLog(TxHash.random(), anchorBlockNumber, timestamp, tag), + makeLog(TxHash.random(), anchorBlockNumber + 1, timestamp, tag), ] : [], ), diff --git a/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts index 8f6b1fd39cd2..16c5361ca9fa 100644 --- a/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts @@ -1,10 +1,8 @@ -import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; -import { PrivateLog, TxScopedL2Log } from '@aztec/stdlib/logs'; +import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxHash, TxStatus } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -27,7 +25,7 @@ describe('syncSenderTaggingIndexes', () => { } function makeLog(txHash: TxHash, tag: Fr) { - return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), 0n, PrivateLog.random(tag)); + return randomTxScopedPrivateL2Log({ txHash, tag }); } async function setUp() { diff --git a/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts index 8ad3b8fc0b3f..c6c26c8fe78f 100644 --- a/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts @@ -1,10 +1,9 @@ -import { BlockNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2BlockHash } from '@aztec/stdlib/block'; import type { AztecNode } from '@aztec/stdlib/interfaces/server'; -import { DirectionalAppTaggingSecret, PrivateLog, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; +import { DirectionalAppTaggingSecret, SiloedTag, Tag } from '@aztec/stdlib/logs'; +import { randomTxScopedPrivateL2Log } from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -26,7 +25,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { } function makeLog(txHash: TxHash, tag: Fr) { - return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), 0n, PrivateLog.random(tag)); + return randomTxScopedPrivateL2Log({ txHash, tag }); } beforeAll(async () => { diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index baa18e195bc9..1033989195af 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -30,6 +30,7 @@ import type { LogFilter } from '../logs/log_filter.js'; import { SiloedTag } from '../logs/siloed_tag.js'; import { Tag } from '../logs/tag.js'; import { TxScopedL2Log } from '../logs/tx_scoped_l2_log.js'; +import { randomTxScopedPrivateL2Log } from '../tests/factories.js'; import { getTokenContractArtifact } from '../tests/fixtures.js'; import { BlockHeader } from '../tx/block_header.js'; import type { IndexedTxEffect } from '../tx/indexed_tx_effect.js'; @@ -457,18 +458,18 @@ class MockArchiver implements ArchiverApi { expect(blockNumber).toEqual(BlockNumber(1)); return Promise.resolve(`0x01`); } - async getPrivateLogsByTags(tags: SiloedTag[], _logsPerTag?: number): Promise { + getPrivateLogsByTags(tags: SiloedTag[], _logsPerTag?: number): Promise { expect(tags[0]).toBeInstanceOf(SiloedTag); - return [await Promise.all(tags.map(() => TxScopedL2Log.random(false)))]; + return Promise.resolve([tags.map(() => randomTxScopedPrivateL2Log())]); } - async getPublicLogsByTagsFromContract( + getPublicLogsByTagsFromContract( contractAddress: AztecAddress, tags: Tag[], _logsPerTag?: number, ): Promise { expect(contractAddress).toBeInstanceOf(AztecAddress); expect(tags[0]).toBeInstanceOf(Tag); - return [await Promise.all(tags.map(() => TxScopedL2Log.random(true)))]; + return Promise.resolve([tags.map(() => randomTxScopedPrivateL2Log())]); } async getPublicLogs(filter: LogFilter): Promise { expect(filter.txHash).toBeInstanceOf(TxHash); diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 7f4da739c97e..a2c16db0e04c 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -43,6 +43,7 @@ import type { LogFilter } from '../logs/log_filter.js'; import { SiloedTag } from '../logs/siloed_tag.js'; import { Tag } from '../logs/tag.js'; import { TxScopedL2Log } from '../logs/tx_scoped_l2_log.js'; +import { randomTxScopedPrivateL2Log } from '../tests/factories.js'; import { getTokenContractArtifact } from '../tests/fixtures.js'; import { MerkleTreeId } from '../trees/merkle_tree_id.js'; import { NullifierMembershipWitness } from '../trees/nullifier_membership_witness.js'; @@ -721,12 +722,12 @@ class MockAztecNode implements AztecNode { expect(filter.contractAddress).toBeInstanceOf(AztecAddress); return Promise.resolve({ logs: [await ExtendedContractClassLog.random()], maxLogsHit: true }); } - async getPrivateLogsByTags(tags: SiloedTag[], _logsPerTag?: number): Promise { + getPrivateLogsByTags(tags: SiloedTag[], _logsPerTag?: number): Promise { expect(tags).toHaveLength(1); expect(tags[0]).toBeInstanceOf(SiloedTag); - return [[await TxScopedL2Log.random(false)]]; + return Promise.resolve([[randomTxScopedPrivateL2Log()]]); } - async getPublicLogsByTagsFromContract( + getPublicLogsByTagsFromContract( contractAddress: AztecAddress, tags: Tag[], _logsPerTag?: number, @@ -734,7 +735,7 @@ class MockAztecNode implements AztecNode { expect(contractAddress).toBeInstanceOf(AztecAddress); expect(tags).toHaveLength(1); expect(tags[0]).toBeInstanceOf(Tag); - return [[await TxScopedL2Log.random(true)]]; + return Promise.resolve([[randomTxScopedPrivateL2Log()]]); } sendTx(tx: Tx): Promise { expect(tx).toBeInstanceOf(Tx); diff --git a/yarn-project/stdlib/src/interfaces/get_logs_response.test.ts b/yarn-project/stdlib/src/interfaces/get_logs_response.test.ts index 065b16d75069..c2a32c8d5a02 100644 --- a/yarn-project/stdlib/src/interfaces/get_logs_response.test.ts +++ b/yarn-project/stdlib/src/interfaces/get_logs_response.test.ts @@ -1,10 +1,11 @@ import { jsonStringify } from '@aztec/foundation/json-rpc'; import { TxScopedL2Log } from '../logs/tx_scoped_l2_log.js'; +import { randomTxScopedPrivateL2Log } from '../tests/factories.js'; describe('TxScopedL2Log', () => { - it('serializes to JSON', async () => { - const log = await TxScopedL2Log.random(); + it('serializes to JSON', () => { + const log = randomTxScopedPrivateL2Log(); expect(TxScopedL2Log.schema.parse(JSON.parse(jsonStringify(log)))).toEqual(log); }); }); diff --git a/yarn-project/stdlib/src/logs/index.ts b/yarn-project/stdlib/src/logs/index.ts index 0c3f96c87f62..dafe33e376db 100644 --- a/yarn-project/stdlib/src/logs/index.ts +++ b/yarn-project/stdlib/src/logs/index.ts @@ -1,4 +1,3 @@ -export * from './log_with_tx_data.js'; export * from './directional_app_tagging_secret.js'; export * from './pre_tag.js'; export * from './contract_class_log.js'; diff --git a/yarn-project/stdlib/src/logs/log_with_tx_data.ts b/yarn-project/stdlib/src/logs/log_with_tx_data.ts deleted file mode 100644 index fae2fcdc0bfe..000000000000 --- a/yarn-project/stdlib/src/logs/log_with_tx_data.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { MAX_NOTE_HASHES_PER_TX, PRIVATE_LOG_CIPHERTEXT_LEN } from '@aztec/constants'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { TxHash } from '@aztec/stdlib/tx'; - -// This is used as a response for PXE's custom getPublicLogByTag oracle. -export class PublicLogWithTxData { - constructor( - public logPayload: Fr[], - public txHash: TxHash, - public uniqueNoteHashesInTx: Fr[], - public firstNullifierInTx: Fr, - ) {} -} - -// This is used as a response for PXE's custom getPrivateLogByTag oracle. -export class PrivateLogWithTxData { - constructor( - public logPayload: Fr[], - public txHash: TxHash, - public uniqueNoteHashesInTx: Fr[], - public firstNullifierInTx: Fr, - ) {} - - toNoirSerialization(): (Fr | Fr[])[] { - return [ - ...toBoundedVecSerialization(this.logPayload, PRIVATE_LOG_CIPHERTEXT_LEN), - this.txHash.hash, - ...toBoundedVecSerialization(this.uniqueNoteHashesInTx, MAX_NOTE_HASHES_PER_TX), - this.firstNullifierInTx, - ]; - } - - static noirSerializationOfEmpty(): (Fr | Fr[])[] { - return new PrivateLogWithTxData([], TxHash.zero(), [], new Fr(0)).toNoirSerialization(); - } -} - -function toBoundedVecSerialization(array: Fr[], maxLength: number) { - if (array.length > maxLength) { - throw new Error( - `An array of length ${array.length} cannot be converted to a BoundedVec of max length ${maxLength}`, - ); - } - - return [array.concat(Array(maxLength - array.length).fill(new Fr(0))), new Fr(array.length)]; -} diff --git a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts index bee03d8472d2..31481f4ab49d 100644 --- a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts +++ b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts @@ -1,114 +1,98 @@ import { BlockNumber, BlockNumberSchema } from '@aztec/foundation/branded-types'; -import { BufferReader, bigintToUInt64BE, boolToBuffer, numToUInt32BE } from '@aztec/foundation/serialize'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { schemas as foundationSchemas } from '@aztec/foundation/schemas'; +import { + BufferReader, + bigintToUInt64BE, + numToUInt32BE, + serializeArrayOfBufferableToVector, +} from '@aztec/foundation/serialize'; import { z } from 'zod'; -import { L2BlockHash } from '../block/block_hash.js'; import { schemas } from '../schemas/schemas.js'; import { TxHash } from '../tx/tx_hash.js'; import type { UInt64 } from '../types/shared.js'; -import { PrivateLog } from './private_log.js'; -import { PublicLog } from './public_log.js'; -// TODO(F-231): Drop this and return the PrivateLogWithTxData and PublicLogWithTxData from Aztec node instead. export class TxScopedL2Log { constructor( /* * Hash of the tx where the log is included */ public txHash: TxHash, - /* - * The next available leaf index for the note hash tree for this transaction. It is stored - * with the log so the noteHashIndex can be reconstructed after decryption. - */ - public dataStartIndexForTx: number, - /* - * The index of the log in the transaction. Note that public and private logs are in separate arrays in the tx - * effect and for this reason these indices are independent (a private and public log can have the same index). - */ - public logIndexInTx: number, /* * The block this log is included in */ public blockNumber: BlockNumber, - /* - * The block this log is included in - */ - public blockHash: L2BlockHash, /* * The timestamp of the block this log is included in */ public blockTimestamp: UInt64, /* - * The log data as either a PrivateLog or PublicLog + * The log data as an array of field elements */ - public log: PrivateLog | PublicLog, + public logData: Fr[], + /* + * The note hashes from the tx effect + */ + public noteHashes: Fr[], + /* + * The first nullifier from the tx effect. Used for nonce discovery when processing notes from logs. + * + * (Note nonces are computed as `hash(firstNullifier, noteIndexInTx)`.) + */ + public firstNullifier: Fr, ) {} - get isFromPublic() { - return this.log instanceof PublicLog; - } - static get schema() { return z .object({ txHash: TxHash.schema, - dataStartIndexForTx: z.number(), - logIndexInTx: z.number(), blockNumber: BlockNumberSchema, - blockHash: L2BlockHash.schema, blockTimestamp: schemas.UInt64, - log: z.union([PrivateLog.schema, PublicLog.schema]), + logData: z.array(foundationSchemas.Fr), + noteHashes: z.array(foundationSchemas.Fr), + firstNullifier: foundationSchemas.Fr, }) .transform( - ({ txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log }) => - new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log), + ({ txHash, blockNumber, blockTimestamp, logData, noteHashes, firstNullifier }) => + new TxScopedL2Log(txHash, blockNumber, blockTimestamp, logData, noteHashes, firstNullifier), ); } toBuffer() { return Buffer.concat([ this.txHash.toBuffer(), - numToUInt32BE(this.dataStartIndexForTx), - numToUInt32BE(this.logIndexInTx), numToUInt32BE(this.blockNumber), - this.blockHash.toBuffer(), bigintToUInt64BE(this.blockTimestamp), - boolToBuffer(this.isFromPublic), - this.log.toBuffer(), + serializeArrayOfBufferableToVector(this.logData), + serializeArrayOfBufferableToVector(this.noteHashes), + this.firstNullifier.toBuffer(), ]); } static fromBuffer(buffer: Buffer) { const reader = BufferReader.asReader(buffer); const txHash = reader.readObject(TxHash); - const dataStartIndexForTx = reader.readNumber(); - const logIndexInTx = reader.readNumber(); const blockNumber = BlockNumber(reader.readNumber()); - const blockHash = reader.readObject(L2BlockHash); const blockTimestamp = reader.readUInt64(); - const isFromPublic = reader.readBoolean(); - const log = isFromPublic ? PublicLog.fromBuffer(reader) : PrivateLog.fromBuffer(reader); - - return new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log); - } + const logData = reader.readVector(Fr); + const noteHashes = reader.readVector(Fr); + const firstNullifier = reader.readObject(Fr); - static async random(isFromPublic = Math.random() < 0.5) { - const log = isFromPublic ? await PublicLog.random() : PrivateLog.random(); - return new TxScopedL2Log(TxHash.random(), 1, 1, BlockNumber(1), L2BlockHash.random(), 1n, log); + return new TxScopedL2Log(txHash, blockNumber, blockTimestamp, logData, noteHashes, firstNullifier); } equals(other: TxScopedL2Log) { return ( this.txHash.equals(other.txHash) && - this.dataStartIndexForTx === other.dataStartIndexForTx && - this.logIndexInTx === other.logIndexInTx && this.blockNumber === other.blockNumber && - this.blockHash.equals(other.blockHash) && this.blockTimestamp === other.blockTimestamp && - ((this.log instanceof PublicLog && other.log instanceof PublicLog) || - (this.log instanceof PrivateLog && other.log instanceof PrivateLog)) && - this.log.equals(other.log as any) + this.logData.length === other.logData.length && + this.logData.every((f, i) => f.equals(other.logData[i])) && + this.noteHashes.length === other.noteHashes.length && + this.noteHashes.every((h, i) => h.equals(other.noteHashes[i])) && + this.firstNullifier.equals(other.firstNullifier) ); } } diff --git a/yarn-project/stdlib/src/tests/factories.ts b/yarn-project/stdlib/src/tests/factories.ts index 1d50888685ab..050bbcfb69a9 100644 --- a/yarn-project/stdlib/src/tests/factories.ts +++ b/yarn-project/stdlib/src/tests/factories.ts @@ -131,6 +131,7 @@ import { PublicKeys, computeAddress } from '../keys/index.js'; import { ContractClassLog, ContractClassLogFields } from '../logs/index.js'; import { PrivateLog } from '../logs/private_log.js'; import { FlatPublicLogs, PublicLog } from '../logs/public_log.js'; +import { TxScopedL2Log } from '../logs/tx_scoped_l2_log.js'; import { CountedL2ToL1Message, L2ToL1Message, ScopedL2ToL1Message } from '../messaging/l2_to_l1_message.js'; import { ParityBasePrivateInputs } from '../parity/parity_base_private_inputs.js'; import { ParityPublicInputs } from '../parity/parity_public_inputs.js'; @@ -173,6 +174,7 @@ import { StateReference } from '../tx/state_reference.js'; import { TreeSnapshots } from '../tx/tree_snapshots.js'; import { TxConstantData } from '../tx/tx_constant_data.js'; import { TxContext } from '../tx/tx_context.js'; +import { TxHash } from '../tx/tx_hash.js'; import { TxRequest } from '../tx/tx_request.js'; import { Vector } from '../types/index.js'; import { VkData } from '../vks/index.js'; @@ -1696,3 +1698,46 @@ export async function makeAvmCircuitInputs( export function fr(n: number): Fr { return new Fr(BigInt(n)); } + +/** + * Creates a random TxScopedL2Log with private log data. + */ +export function randomTxScopedPrivateL2Log(opts?: { + tag?: Fr; + txHash?: TxHash; + blockNumber?: number; + blockTimestamp?: bigint; + noteHashes?: Fr[]; + firstNullifier?: Fr; +}) { + const log = PrivateLog.random(opts?.tag); + return new TxScopedL2Log( + opts?.txHash ?? TxHash.random(), + BlockNumber(opts?.blockNumber ?? 1), + opts?.blockTimestamp ?? 1n, + log.getEmittedFields(), + opts?.noteHashes ?? [Fr.random(), Fr.random()], + opts?.firstNullifier ?? Fr.random(), + ); +} + +/** + * Creates a random TxScopedL2Log with public log data. + */ +export async function randomTxScopedPublicL2Log(opts?: { + txHash?: TxHash; + blockNumber?: number; + blockTimestamp?: bigint; + noteHashes?: Fr[]; + firstNullifier?: Fr; +}) { + const log = await PublicLog.random(); + return new TxScopedL2Log( + opts?.txHash ?? TxHash.random(), + BlockNumber(opts?.blockNumber ?? 1), + opts?.blockTimestamp ?? 1n, + log.getEmittedFields(), + opts?.noteHashes ?? [Fr.random(), Fr.random()], + opts?.firstNullifier ?? Fr.random(), + ); +} diff --git a/yarn-project/stdlib/src/tx/tx_effect.ts b/yarn-project/stdlib/src/tx/tx_effect.ts index c0bbef9ed2a4..ad861091fb9d 100644 --- a/yarn-project/stdlib/src/tx/tx_effect.ts +++ b/yarn-project/stdlib/src/tx/tx_effect.ts @@ -214,12 +214,14 @@ export class TxEffect { maxEffects?: number; } = {}): Promise { const count = (max: number, num?: number) => num ?? Math.min(maxEffects ?? randomInt(max), max); + // Every tx effect must have at least 1 nullifier (the first nullifier is used for log indexing) + const countNullifiers = (max: number, num?: number) => Math.max(1, count(max, num)); return new TxEffect( RevertCode.random(), TxHash.random(), new Fr(Math.floor(Math.random() * 100_000)), makeTuple(count(MAX_NOTE_HASHES_PER_TX, numNoteHashes), Fr.random), - makeTuple(count(MAX_NULLIFIERS_PER_TX, numNullifiers), Fr.random), + makeTuple(countNullifiers(MAX_NULLIFIERS_PER_TX, numNullifiers), Fr.random), makeTuple(count(MAX_L2_TO_L1_MSGS_PER_TX, numL2ToL1Msgs), Fr.random), makeTuple(count(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, numPublicDataWrites), PublicDataWrite.random), makeTuple(count(MAX_PRIVATE_LOGS_PER_TX, numPrivateLogs), () => PrivateLog.random()),