From 3e97c2f0500b207caf940516520e5037279c1d0e Mon Sep 17 00:00:00 2001 From: benesjan <13470840+benesjan@users.noreply.github.com> Date: Mon, 22 Dec 2025 18:02:30 +0000 Subject: [PATCH] refactor!: optimizing `AztecNode::getLogsByTags` for new log sync algo Fixes https://linear.app/aztec-labs/issue/F-229/improve-log-sync-performance The return type of `getLogsByTags` did not contain block timestamp which lead to the new log sync algo being inefficient - we needed to perform a lot of calls to `getBlockHeaderByHash` endpoint to get the timestamp. In this PR I include the timestamp in the return value. This is one of 3 PRs in which I clean up the `getLogsByTags` endpoint: 1. This is the first PR - including block timestamp, 2. in the followup PR I will type the tag arg of the function to be `SiloedTag`, 3. in the last PR I will drop pagination from this endpoint. Re point 3 - pagination here doesn't really make sense because we get more logs per tag only if there are multiple devices sending logs from a given sender to a recipient. --- .../archiver/kv_archiver_store/log_store.ts | 20 ++- .../foundation/src/schemas/schemas.ts | 8 ++ yarn-project/pxe/src/logs/log_service.test.ts | 5 + ...ate_logs_for_sender_recipient_pair.test.ts | 40 ++---- ..._private_logs_for_sender_recipient_pair.ts | 8 +- .../utils/find_highest_indexes.test.ts | 51 +++---- .../utils/find_highest_indexes.ts | 6 +- .../utils/load_logs_for_range.test.ts | 134 +++--------------- .../utils/load_logs_for_range.ts | 36 +---- .../sync/sync_sender_tagging_indexes.test.ts | 2 +- ...load_and_store_new_tagging_indexes.test.ts | 2 +- .../stdlib/src/logs/tx_scoped_l2_log.ts | 20 ++- yarn-project/stdlib/src/schemas/schemas.ts | 3 + 13 files changed, 113 insertions(+), 222 deletions(-) diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index b6ff7cc9b79f..02b627486893 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -58,7 +58,15 @@ export class LogStore { const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( - new TxScopedL2Log(txHash, dataStartIndexForTx, logIndex, block.number, blockHash, log).toBuffer(), + new TxScopedL2Log( + txHash, + dataStartIndexForTx, + logIndex, + block.number, + blockHash, + block.timestamp, + log, + ).toBuffer(), ); taggedLogs.set(tag.toString(), currentLogs); }); @@ -69,7 +77,15 @@ export class LogStore { const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( - new TxScopedL2Log(txHash, dataStartIndexForTx, logIndex, block.number, blockHash, log).toBuffer(), + new TxScopedL2Log( + txHash, + dataStartIndexForTx, + logIndex, + block.number, + blockHash, + block.timestamp, + log, + ).toBuffer(), ); taggedLogs.set(tag.toString(), currentLogs); }); diff --git a/yarn-project/foundation/src/schemas/schemas.ts b/yarn-project/foundation/src/schemas/schemas.ts index 1b84ade2be52..fb9a759cba0d 100644 --- a/yarn-project/foundation/src/schemas/schemas.ts +++ b/yarn-project/foundation/src/schemas/schemas.ts @@ -58,6 +58,14 @@ export const schemas = { .max(2 ** 32 - 1), ), + /** Coerces input to UInt64. */ + UInt64: z.union([z.bigint(), z.number(), z.string()]).pipe( + z.coerce + .bigint() + .min(0n) + .max(2n ** 64n - 1n), + ), + /** Accepts a hex string as a Buffer32 type. */ Buffer32: z.string().refine(isHex, 'Not a valid hex string').transform(Buffer32.fromString), diff --git a/yarn-project/pxe/src/logs/log_service.test.ts b/yarn-project/pxe/src/logs/log_service.test.ts index d0f4bff17f14..a418b7413c76 100644 --- a/yarn-project/pxe/src/logs/log_service.test.ts +++ b/yarn-project/pxe/src/logs/log_service.test.ts @@ -85,6 +85,7 @@ describe('LogService', () => { 0, MIN_BLOCK_NUMBER_OF_A_LOG, L2BlockHash.random(), + 0n, PrivateLog.random(tag.value), ); logs[tag.toString()] = [log]; @@ -101,6 +102,7 @@ describe('LogService', () => { 0, BlockNumber.ZERO, L2BlockHash.random(), + 0n, PrivateLog.random(tag.value), ); logs[tag.toString()].push(log); @@ -118,6 +120,7 @@ describe('LogService', () => { 0, blockNumber, L2BlockHash.random(), + 0n, PrivateLog.random(tag.value), ); logs[tag.toString()] = [log]; @@ -137,6 +140,7 @@ describe('LogService', () => { 0, MAX_BLOCK_NUMBER_OF_A_LOG, L2BlockHash.random(), + 0n, PrivateLog.random(tag.value), ); logs[tag.toString()] = [log]; @@ -553,6 +557,7 @@ describe('LogService', () => { randomInt(100), BlockNumber(randomInt(100)), L2BlockHash.random(), + 0n, log, ); diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts index ede10640f48f..eb16f639fb18 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.test.ts @@ -35,13 +35,15 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { return SiloedTag.compute(tag, app); } - function makeLog(blockHash: Fr, blockNumber: number, tag: Fr) { + // Move blockTimestamp before tag in args in makeLog + function makeLog(blockHash: Fr, blockNumber: number, blockTimestamp: bigint, tag: Fr) { return new TxScopedL2Log( TxHash.random(), 0, 0, BlockNumber(blockNumber), L2BlockHash.fromField(blockHash), + blockTimestamp, PrivateLog.random(tag), ); } @@ -54,7 +56,6 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { beforeEach(async () => { aztecNode.getLogsByTags.mockReset(); - aztecNode.getBlockHeaderByHash.mockReset(); aztecNode.getL2Tips.mockReset(); aztecNode.getBlockHeader.mockReset(); taggingDataProvider = new NewRecipientTaggingDataProvider(await openTmpStore('test')); @@ -103,18 +104,13 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode.getLogsByTags.mockImplementation((tags: Fr[]) => { return Promise.all( tags.map(async (t: Fr) => - t.equals(logTag.value) ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logTag.value)] : [], + t.equals(logTag.value) + ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logBlockTimestamp, logTag.value)] + : [], ), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await logBlockHeader.hash())) { - return logBlockHeader; - } - return undefined; - }); - const logs = await loadPrivateLogsForSenderRecipientPair( secret, app, @@ -146,18 +142,13 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { aztecNode.getLogsByTags.mockImplementation((tags: Fr[]) => { return Promise.all( tags.map(async (t: Fr) => - t.equals(logTag.value) ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logTag.value)] : [], + t.equals(logTag.value) + ? [makeLog(await logBlockHeader.hash(), finalizedBlockNumber, logBlockTimestamp, logTag.value)] + : [], ), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await logBlockHeader.hash())) { - return logBlockHeader; - } - return undefined; - }); - const logs = await loadPrivateLogsForSenderRecipientPair( secret, app, @@ -203,24 +194,15 @@ describe('loadPrivateLogsForSenderRecipientPair', () => { return Promise.all( tags.map(async (t: Fr) => { if (t.equals(log1Tag.value)) { - return [makeLog(await log1BlockHeader.hash(), finalizedBlockNumber, log1Tag.value)]; + return [makeLog(await log1BlockHeader.hash(), finalizedBlockNumber, log1BlockTimestamp, log1Tag.value)]; } else if (t.equals(log2Tag.value)) { - return [makeLog(await log2BlockHeader.hash(), finalizedBlockNumber, log2Tag.value)]; + return [makeLog(await log2BlockHeader.hash(), finalizedBlockNumber, log2BlockTimestamp, log2Tag.value)]; } return []; }), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await log1BlockHeader.hash())) { - return log1BlockHeader; - } else if (hash.equals(await log2BlockHeader.hash())) { - return log2BlockHeader; - } - return undefined; - }); - const logs = await loadPrivateLogsForSenderRecipientPair( secret, app, diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts index 0433cf083787..70ab480cd5d6 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts @@ -88,16 +88,16 @@ export async function loadPrivateLogsForSenderRecipientPair( while (true) { // Get private logs with their block timestamps and corresponding tagging indexes - const logsWithTimestampsAndIndexes = await loadLogsForRange(secret, app, aztecNode, start, end, anchorBlockNumber); + const privateLogsWithIndexes = await loadLogsForRange(secret, app, aztecNode, start, end, anchorBlockNumber); - if (logsWithTimestampsAndIndexes.length === 0) { + if (privateLogsWithIndexes.length === 0) { break; } - logs.push(...logsWithTimestampsAndIndexes.map(({ log }) => log)); + logs.push(...privateLogsWithIndexes.map(({ log }) => log)); const { highestAgedIndex, highestFinalizedIndex } = findHighestIndexes( - logsWithTimestampsAndIndexes, + privateLogsWithIndexes, currentTimestamp, finalizedBlockNumber, ); diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts index 47761743522d..6a5239916cf8 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.test.ts @@ -9,13 +9,14 @@ import { findHighestIndexes } from './find_highest_indexes.js'; describe('findHighestIndexes', () => { const currentTimestamp = BigInt(Math.floor(Date.now() / 1000)); - function makeLog(blockNumber: number): TxScopedL2Log { + function makeLog(blockNumber: number, blockTimestamp: bigint): TxScopedL2Log { return new TxScopedL2Log( TxHash.random(), 0, 0, BlockNumber(blockNumber), L2BlockHash.random(), + blockTimestamp, PrivateLog.random(), ); } @@ -23,13 +24,9 @@ describe('findHighestIndexes', () => { it('returns undefined for highestAgedIndex when no logs are at least 24 hours old', () => { const finalizedBlockNumber = 10; const blockTimestamp = currentTimestamp - 1n; // not aged - const log = makeLog(5); + const log = makeLog(5, blockTimestamp); - const result = findHighestIndexes( - [{ log, blockTimestamp, taggingIndex: 3 }], - currentTimestamp, - finalizedBlockNumber, - ); + const result = findHighestIndexes([{ log, taggingIndex: 3 }], currentTimestamp, finalizedBlockNumber); expect(result.highestAgedIndex).toBeUndefined(); expect(result.highestFinalizedIndex).toBe(3); @@ -38,13 +35,9 @@ describe('findHighestIndexes', () => { it('returns undefined for highestFinalizedIndex when no logs are in finalized blocks', () => { const finalizedBlockNumber = 5; const blockTimestamp = currentTimestamp - BigInt(MAX_INCLUDE_BY_TIMESTAMP_DURATION); - const log = makeLog(10); // block 10 > finalizedBlockNumber 5 + const log = makeLog(10, blockTimestamp); // block 10 > finalizedBlockNumber 5 - const result = findHighestIndexes( - [{ log, blockTimestamp, taggingIndex: 3 }], - currentTimestamp, - finalizedBlockNumber, - ); + const result = findHighestIndexes([{ log, taggingIndex: 3 }], currentTimestamp, finalizedBlockNumber); expect(result.highestAgedIndex).toBe(3); expect(result.highestFinalizedIndex).toBeUndefined(); @@ -54,14 +47,14 @@ describe('findHighestIndexes', () => { const finalizedBlockNumber = 10; const blockTimestamp1 = currentTimestamp - BigInt(MAX_INCLUDE_BY_TIMESTAMP_DURATION) - 1000n; // aged const blockTimestamp2 = currentTimestamp - BigInt(MAX_INCLUDE_BY_TIMESTAMP_DURATION) - 500n; // aged - const log1 = makeLog(5); - const log2 = makeLog(6); + const log1 = makeLog(5, blockTimestamp1); + const log2 = makeLog(6, blockTimestamp2); const result = findHighestIndexes( [ - { log: log1, blockTimestamp: blockTimestamp1, taggingIndex: 2 }, - { log: log2, blockTimestamp: blockTimestamp2, taggingIndex: 5 }, - { log: log1, blockTimestamp: blockTimestamp1, taggingIndex: 3 }, + { log: log1, taggingIndex: 2 }, + { log: log2, taggingIndex: 5 }, + { log: log1, taggingIndex: 3 }, ], currentTimestamp, finalizedBlockNumber, @@ -74,15 +67,15 @@ describe('findHighestIndexes', () => { it('selects the highest index from multiple finalized logs', () => { const finalizedBlockNumber = 10; const blockTimestamp = currentTimestamp - 500n; // 500 seconds ago - not aged - const log1 = makeLog(5); - const log2 = makeLog(8); - const log3 = makeLog(10); // At finalized block number + const log1 = makeLog(5, blockTimestamp); + const log2 = makeLog(8, blockTimestamp); + const log3 = makeLog(10, blockTimestamp); // At finalized block number const result = findHighestIndexes( [ - { log: log1, blockTimestamp, taggingIndex: 2 }, - { log: log2, blockTimestamp, taggingIndex: 7 }, - { log: log3, blockTimestamp, taggingIndex: 1 }, + { log: log1, taggingIndex: 2 }, + { log: log2, taggingIndex: 7 }, + { log: log3, taggingIndex: 1 }, ], currentTimestamp, finalizedBlockNumber, @@ -99,11 +92,11 @@ describe('findHighestIndexes', () => { const recentTimestamp = currentTimestamp - 5000n; // Not aged const logs = [ - { log: makeLog(5), blockTimestamp: veryOldTimestamp, taggingIndex: 1 }, // Aged, finalized - { log: makeLog(8), blockTimestamp: oldTimestamp, taggingIndex: 5 }, // Aged, finalized - { log: makeLog(10), blockTimestamp: recentTimestamp, taggingIndex: 8 }, // Not aged, finalized - { log: makeLog(15), blockTimestamp: oldTimestamp, taggingIndex: 12 }, // Aged, not finalized - { log: makeLog(20), blockTimestamp: recentTimestamp, taggingIndex: 15 }, // Not aged, not finalized + { log: makeLog(5, veryOldTimestamp), taggingIndex: 1 }, // Aged, finalized + { log: makeLog(8, oldTimestamp), taggingIndex: 5 }, // Aged, finalized + { log: makeLog(10, recentTimestamp), taggingIndex: 8 }, // Not aged, finalized + { log: makeLog(15, oldTimestamp), taggingIndex: 12 }, // Aged, not finalized + { log: makeLog(20, recentTimestamp), taggingIndex: 15 }, // Not aged, not finalized ]; const result = findHighestIndexes(logs, currentTimestamp, finalizedBlockNumber); diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.ts index 4bb8ee2f317c..7306931a3634 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/find_highest_indexes.ts @@ -5,15 +5,15 @@ import type { TxScopedL2Log } from '@aztec/stdlib/logs'; * Finds the highest aged and the highest finalized tagging indexes. */ export function findHighestIndexes( - logsWithTimestampsAndIndexes: Array<{ log: TxScopedL2Log; blockTimestamp: bigint; taggingIndex: number }>, + privateLogsWithIndexes: Array<{ log: TxScopedL2Log; taggingIndex: number }>, currentTimestamp: bigint, finalizedBlockNumber: number, ): { highestAgedIndex: number | undefined; highestFinalizedIndex: number | undefined } { let highestAgedIndex = undefined; let highestFinalizedIndex = undefined; - for (const { log, blockTimestamp, taggingIndex } of logsWithTimestampsAndIndexes) { - const ageInSeconds = currentTimestamp - blockTimestamp; + for (const { log, taggingIndex } of privateLogsWithIndexes) { + const ageInSeconds = currentTimestamp - log.blockTimestamp; if ( ageInSeconds >= BigInt(MAX_INCLUDE_BY_TIMESTAMP_DURATION) && diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts index 6881ea403f20..96c95d3355d3 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.test.ts @@ -29,13 +29,14 @@ describe('loadLogsForRange', () => { return SiloedTag.compute(tag, app); } - function makeLog(txHash: TxHash, blockHash: Fr, blockNumber: number, tag: SiloedTag) { + function makeLog(txHash: TxHash, blockHash: Fr, blockNumber: number, blockTimestamp: bigint, tag: SiloedTag) { return new TxScopedL2Log( txHash, 0, 0, BlockNumber(blockNumber), L2BlockHash.fromField(blockHash), + blockTimestamp, PrivateLog.random(tag.value), ); } @@ -48,7 +49,6 @@ describe('loadLogsForRange', () => { beforeEach(() => { aztecNode.getLogsByTags.mockReset(); - aztecNode.getBlockHeaderByHash.mockReset(); }); it('returns empty array when no logs found for the given window', async () => { @@ -70,25 +70,19 @@ describe('loadLogsForRange', () => { aztecNode.getLogsByTags.mockImplementation(async (tags: Fr[]) => { const blockHash = await blockHeader.hash(); - const privateLog = makeLog(txHash, blockHash, blockNumber, tag); + const privateLog = makeLog(txHash, blockHash, blockNumber, timestamp, tag); const publicLog = new TxScopedL2Log( TxHash.random(), 0, 0, BlockNumber(blockNumber), L2BlockHash.fromField(blockHash), + timestamp, await PublicLog.random(), ); return tags.map((t: Fr) => (t.equals(tag.value) ? [privateLog, publicLog] : [])); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeader.hash())) { - return blockHeader; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); expect(result).toHaveLength(1); @@ -114,33 +108,24 @@ describe('loadLogsForRange', () => { return Promise.all( tags.map(async (t: Fr) => { if (t.equals(tag1.value)) { - return [makeLog(txHash1, await blockHeader1.hash(), blockNumber1, tag1)]; + return [makeLog(txHash1, await blockHeader1.hash(), blockNumber1, timestamp1, tag1)]; } else if (t.equals(tag2.value)) { - return [makeLog(txHash2, await blockHeader2.hash(), blockNumber2, tag2)]; + return [makeLog(txHash2, await blockHeader2.hash(), blockNumber2, timestamp2, tag2)]; } return []; }), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeader1.hash())) { - return blockHeader1; - } else if (hash.equals(await blockHeader2.hash())) { - return blockHeader2; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); expect(result).toHaveLength(2); const resultByIndex = result.sort((a, b) => a.taggingIndex - b.taggingIndex); expect(resultByIndex[0].taggingIndex).toBe(index1); - expect(resultByIndex[0].blockTimestamp).toBe(timestamp1); + expect(resultByIndex[0].log.blockTimestamp).toBe(timestamp1); expect(resultByIndex[0].log.txHash.equals(txHash1)).toBe(true); expect(resultByIndex[1].taggingIndex).toBe(index2); - expect(resultByIndex[1].blockTimestamp).toBe(timestamp2); + expect(resultByIndex[1].log.blockTimestamp).toBe(timestamp2); expect(resultByIndex[1].log.txHash.equals(txHash2)).toBe(true); }); @@ -161,23 +146,14 @@ describe('loadLogsForRange', () => { tags.map(async (t: Fr) => t.equals(tag.value) ? [ - makeLog(txHash1, await blockHeader1.hash(), blockNumber1, tag), - makeLog(txHash2, await blockHeader2.hash(), blockNumber2, tag), + makeLog(txHash1, await blockHeader1.hash(), blockNumber1, timestamp1, tag), + makeLog(txHash2, await blockHeader2.hash(), blockNumber2, timestamp2, tag), ] : [], ), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeader1.hash())) { - return blockHeader1; - } else if (hash.equals(await blockHeader2.hash())) { - return blockHeader2; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); expect(result).toHaveLength(2); @@ -203,31 +179,23 @@ describe('loadLogsForRange', () => { const blockHash = await blockHeader.hash(); return tags.map((t: Fr) => { if (t.equals(tag1.value)) { - return [makeLog(txHash1, blockHash, blockNumber, tag1)]; + return [makeLog(txHash1, blockHash, blockNumber, timestamp, tag1)]; } else if (t.equals(tag2.value)) { - return [makeLog(txHash2, blockHash, blockNumber, tag2)]; + return [makeLog(txHash2, blockHash, blockNumber, timestamp, tag2)]; } return []; }); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeader.hash())) { - return blockHeader; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); expect(result).toHaveLength(2); - // Should only fetch block header once for the same block - expect(aztecNode.getBlockHeaderByHash).toHaveBeenCalledTimes(1); + const resultByIndex = result.sort((a, b) => a.taggingIndex - b.taggingIndex); expect(resultByIndex[0].taggingIndex).toBe(index1); - expect(resultByIndex[0].blockTimestamp).toBe(timestamp); + expect(resultByIndex[0].log.blockTimestamp).toBe(timestamp); expect(resultByIndex[1].taggingIndex).toBe(index2); - expect(resultByIndex[1].blockTimestamp).toBe(timestamp); + expect(resultByIndex[1].log.blockTimestamp).toBe(timestamp); }); it('respects start (inclusive) and end (exclusive) boundaries', async () => { @@ -245,21 +213,14 @@ describe('loadLogsForRange', () => { const blockHash = await blockHeader.hash(); return tags.map((t: Fr) => { if (t.equals(tagAtStart.value)) { - return [makeLog(txHashAtStart, blockHash, 5, tagAtStart)]; + return [makeLog(txHashAtStart, blockHash, 5, timestamp, tagAtStart)]; } else if (t.equals(tagAtEnd.value)) { - return [makeLog(txHashAtEnd, blockHash, 6, tagAtEnd)]; + return [makeLog(txHashAtEnd, blockHash, 6, timestamp, tagAtEnd)]; } return []; }); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeader.hash())) { - return blockHeader; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, start, end, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); // Should only include log at start (inclusive), not at end (exclusive) @@ -268,50 +229,6 @@ describe('loadLogsForRange', () => { expect(result[0].log.txHash.equals(txHashAtStart)).toBe(true); }); - it('ignores logs from reorged blocks', async () => { - const txHashReorged = TxHash.random(); - const txHashValid = TxHash.random(); - const blockHashReorged = L2BlockHash.random(); - const blockNumberReorged = 5; - const blockNumberValid = 6; - const index1 = 3; - const index2 = 4; - const timestamp = 2000n; - const tag1 = await computeSiloedTagForIndex(index1); - const tag2 = await computeSiloedTagForIndex(index2); - const blockHeaderValid = makeBlockHeader(1, { timestamp }); - - aztecNode.getLogsByTags.mockImplementation((tags: Fr[]) => { - return Promise.all( - tags.map(async (t: Fr) => { - if (t.equals(tag1.value)) { - return [makeLog(txHashReorged, Fr.fromBuffer(blockHashReorged.toBuffer()), blockNumberReorged, tag1)]; - } else if (t.equals(tag2.value)) { - return [makeLog(txHashValid, await blockHeaderValid.hash(), blockNumberValid, tag2)]; - } - return []; - }), - ); - }); - - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - // Block header for reorged block is not found (returns undefined) - if (hash.equals(Fr.fromBuffer(blockHashReorged.toBuffer()))) { - return undefined; - } else if (hash.equals(await blockHeaderValid.hash())) { - return blockHeaderValid; - } - return undefined; - }); - - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, NON_INTERFERING_ANCHOR_BLOCK_NUMBER); - - // Should only include the log from the valid block, ignoring the log from the reorged block - expect(result).toHaveLength(1); - expect(result[0].log.txHash.equals(txHashValid)).toBe(true); - expect(result[0].taggingIndex).toBe(index2); - }); - it('filters out logs from blocks after anchor block', async () => { const anchorBlockNumber = 10; @@ -327,26 +244,15 @@ describe('loadLogsForRange', () => { tags.map(async (t: Fr) => t.equals(tag.value) ? [ - makeLog(TxHash.random(), await blockHeaderBefore.hash(), anchorBlockNumber - 1, tag), - makeLog(TxHash.random(), await blockHeaderAtAnchor.hash(), anchorBlockNumber, tag), - makeLog(TxHash.random(), await blockHeaderAfter.hash(), anchorBlockNumber + 1, tag), + makeLog(TxHash.random(), await blockHeaderBefore.hash(), anchorBlockNumber - 1, timestamp, tag), + makeLog(TxHash.random(), await blockHeaderAtAnchor.hash(), anchorBlockNumber, timestamp, tag), + makeLog(TxHash.random(), await blockHeaderAfter.hash(), anchorBlockNumber + 1, timestamp, tag), ] : [], ), ); }); - aztecNode.getBlockHeaderByHash.mockImplementation(async (hash: Fr) => { - if (hash.equals(await blockHeaderBefore.hash())) { - return blockHeaderBefore; - } else if (hash.equals(await blockHeaderAtAnchor.hash())) { - return blockHeaderAtAnchor; - } else if (hash.equals(await blockHeaderAfter.hash())) { - return blockHeaderAfter; - } - return undefined; - }); - const result = await loadLogsForRange(secret, app, aztecNode, 0, 10, BlockNumber(anchorBlockNumber)); // Should only include logs from blocks at or before the anchor block number diff --git a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts index 74c5d703e625..959f0a21c8b9 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/utils/load_logs_for_range.ts @@ -1,5 +1,4 @@ import type { BlockNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import type { DirectionalAppTaggingSecret, PreTag, TxScopedL2Log } from '@aztec/stdlib/logs'; @@ -11,8 +10,6 @@ import { Tag } from '../../tag.js'; * Gets private logs with their corresponding block timestamps and tagging indexes for the given index range, `app` and * `secret`. At most load logs from blocks up to and including `anchorBlockNumber`. `start` is inclusive and `end` is * exclusive. - * - * TODO: Optimize Aztec Node API such that this function performs only a single call. */ export async function loadLogsForRange( secret: DirectionalAppTaggingSecret, @@ -21,7 +18,7 @@ export async function loadLogsForRange( start: number, end: number, anchorBlockNumber: BlockNumber, -): Promise> { +): Promise> { // Derive tags for the window const preTags: PreTag[] = Array(end - start) .fill(0) @@ -46,34 +43,5 @@ export async function loadLogsForRange( } } - // If no private logs were obtained, return an empty array - if (privateLogsWithIndexes.length === 0) { - return []; - } - - // Get unique block hashes - const uniqueBlockHashes = Array.from(new Set(privateLogsWithIndexes.map(({ log }) => log.blockHash.toBigInt()))).map( - hash => new Fr(hash), - ); - - // Get block headers for all unique block hashes - const blockHeaders = await Promise.all(uniqueBlockHashes.map(blockHash => aztecNode.getBlockHeaderByHash(blockHash))); - - // Return logs with their corresponding block timestamps and tagging indexes - const result: Array<{ log: TxScopedL2Log; blockTimestamp: bigint; taggingIndex: number }> = []; - for (const { log, taggingIndex } of privateLogsWithIndexes) { - // TODO: Unify types of blockHash on log and on block header so we don't need to do this ugly conversion. - const logBlockHash = log.blockHash.toBigInt(); - const logBlockHeader = blockHeaders[uniqueBlockHashes.findIndex(hash => hash.toBigInt() === logBlockHash)]; - if (!logBlockHeader) { - // If the block header for a log cannot be found, it indicates a reorg occurred between `getLogsByTags` and - // `getBlockHeaderByHash`. It is correct and safe to ignore such logs because they have been pruned from - // the chain. PXE block synchronizer will reset any state following the reorg block. - continue; - } - - result.push({ log, blockTimestamp: logBlockHeader.globalVariables.timestamp, taggingIndex }); - } - - return result; + return privateLogsWithIndexes; } diff --git a/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts index 3a93bae46312..50c8678b53d8 100644 --- a/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sync/sync_sender_tagging_indexes.test.ts @@ -27,7 +27,7 @@ describe('syncSenderTaggingIndexes', () => { } function makeLog(txHash: TxHash, tag: Fr) { - return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), PrivateLog.random(tag)); + return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), 0n, PrivateLog.random(tag)); } async function setUp() { diff --git a/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts b/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts index b740c752abba..f9f536358377 100644 --- a/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts +++ b/yarn-project/pxe/src/tagging/sync/utils/load_and_store_new_tagging_indexes.test.ts @@ -28,7 +28,7 @@ describe('loadAndStoreNewTaggingIndexes', () => { } function makeLog(txHash: TxHash, tag: Fr) { - return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), PrivateLog.random(tag)); + return new TxScopedL2Log(txHash, 0, 0, BlockNumber(0), L2BlockHash.random(), 0n, PrivateLog.random(tag)); } beforeAll(async () => { diff --git a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts index b9fb478a1824..2c9b8931c679 100644 --- a/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts +++ b/yarn-project/stdlib/src/logs/tx_scoped_l2_log.ts @@ -1,10 +1,12 @@ import { BlockNumber, BlockNumberSchema } from '@aztec/foundation/branded-types'; -import { BufferReader, boolToBuffer, numToUInt32BE } from '@aztec/foundation/serialize'; +import { BufferReader, bigintToUInt64BE, boolToBuffer, numToUInt32BE } from '@aztec/foundation/serialize'; import { z } from 'zod'; import { L2BlockHash } from '../block/block_hash.js'; +import { schemas } from '../schemas/schemas.js'; import { TxHash } from '../tx/tx_hash.js'; +import type { UInt64 } from '../types/shared.js'; import { PrivateLog } from './private_log.js'; import { PublicLog } from './public_log.js'; @@ -33,6 +35,10 @@ export class TxScopedL2Log { * The block this log is included in */ public blockHash: L2BlockHash, + /* + * The timestamp of the block this log is included in + */ + public blockTimestamp: UInt64, /* * The log data as either a PrivateLog or PublicLog */ @@ -51,11 +57,12 @@ export class TxScopedL2Log { logIndexInTx: z.number(), blockNumber: BlockNumberSchema, blockHash: L2BlockHash.schema, + blockTimestamp: schemas.UInt64, log: z.union([PrivateLog.schema, PublicLog.schema]), }) .transform( - ({ txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, log }) => - new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, log), + ({ txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log }) => + new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log), ); } @@ -66,6 +73,7 @@ export class TxScopedL2Log { numToUInt32BE(this.logIndexInTx), numToUInt32BE(this.blockNumber), this.blockHash.toBuffer(), + bigintToUInt64BE(this.blockTimestamp), boolToBuffer(this.isFromPublic), this.log.toBuffer(), ]); @@ -78,15 +86,16 @@ export class TxScopedL2Log { const logIndexInTx = reader.readNumber(); const blockNumber = BlockNumber(reader.readNumber()); const blockHash = reader.readObject(L2BlockHash); + const blockTimestamp = reader.readUInt64(); const isFromPublic = reader.readBoolean(); const log = isFromPublic ? PublicLog.fromBuffer(reader) : PrivateLog.fromBuffer(reader); - return new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, log); + return new TxScopedL2Log(txHash, dataStartIndexForTx, logIndexInTx, blockNumber, blockHash, blockTimestamp, log); } static async random(isFromPublic = Math.random() < 0.5) { const log = isFromPublic ? await PublicLog.random() : PrivateLog.random(); - return new TxScopedL2Log(TxHash.random(), 1, 1, BlockNumber(1), L2BlockHash.random(), log); + return new TxScopedL2Log(TxHash.random(), 1, 1, BlockNumber(1), L2BlockHash.random(), BigInt(1), log); } equals(other: TxScopedL2Log) { @@ -96,6 +105,7 @@ export class TxScopedL2Log { this.logIndexInTx === other.logIndexInTx && this.blockNumber === other.blockNumber && this.blockHash.equals(other.blockHash) && + this.blockTimestamp === other.blockTimestamp && ((this.log instanceof PublicLog && other.log instanceof PublicLog) || (this.log instanceof PrivateLog && other.log instanceof PrivateLog)) && this.log.equals(other.log as any) diff --git a/yarn-project/stdlib/src/schemas/schemas.ts b/yarn-project/stdlib/src/schemas/schemas.ts index bc8636e27539..f32e0b2f6a34 100644 --- a/yarn-project/stdlib/src/schemas/schemas.ts +++ b/yarn-project/stdlib/src/schemas/schemas.ts @@ -40,6 +40,9 @@ export const schemas = { /** Coerces input to UInt32. */ UInt32: foundationSchemas.UInt32, + /** Coerces input to UInt64. */ + UInt64: foundationSchemas.UInt64, + /** Accepts a hex string as a Buffer32 type. */ Buffer32: foundationSchemas.Buffer32 as ZodFor,