diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 427ff8d88ac..e1979488813 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -315,7 +315,8 @@ export interface AztecNode * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). * @param tags - The tags to filter the logs by. * @returns For each received tag, an array of matching logs and metadata (e.g. tx hash) is returned. An empty - array implies no logs match that tag. + * array implies no logs match that tag. There can be multiple logs for 1 tag because tag reuse can happen + * --> e.g. when sending a note from multiple unsynched devices. */ getLogsByTags(tags: Fr[]): Promise; diff --git a/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts b/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts index 4a884180eb8..37b98d62282 100644 --- a/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts +++ b/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts @@ -3,7 +3,11 @@ import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; export class IndexedTaggingSecret { - constructor(public appTaggingSecret: Fr, public index: number) {} + constructor(public appTaggingSecret: Fr, public index: number) { + if (index < 0) { + throw new Error('IndexedTaggingSecret index out of bounds'); + } + } toFields(): Fr[] { return [this.appTaggingSecret, new Fr(this.index)]; diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index fdd3058ab1d..acd71477f52 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -38,7 +38,7 @@ import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; import { produceNoteDaos } from '../note_decryption_utils/produce_note_daos.js'; import { getAcirSimulator } from '../simulator/index.js'; -import { getInitialIndexes, getLeftMostIndexedTaggingSecrets, getRightMostIndexes } from './tagging_utils.js'; +import { getIndexedTaggingSecretsForTheWindow, getInitialIndexesMap } from './tagging_utils.js'; /** * A data oracle that provides information needed for simulating a transaction. @@ -424,116 +424,137 @@ export class SimulatorOracle implements DBOracle { // Half the size of the window we slide over the tagging secret indexes. const WINDOW_HALF_SIZE = 10; + // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. + // However it is impossible at the moment due to the language not supporting nested slices. + // This nesting is necessary because for a given set of tags we don't + // know how many logs we will get back. Furthermore, these logs are of undetermined + // length, since we don't really know the note they correspond to until we decrypt them. + const recipients = scopes ? scopes : await this.keyStore.getAccounts(); - // A map of never-before-seen logs going from recipient address to logs - const newLogsMap = new Map(); + // A map of logs going from recipient address to logs. Note that the logs might have been processed before + // due to us having a sliding window that "looks back" for logs as well. (We look back as there is no guarantee + // that a logs will be received ordered by a given tax index and that the tags won't be reused). + const logsMap = new Map(); const contractName = await this.contractDataOracle.getDebugContractName(contractAddress); for (const recipient of recipients) { - const logs: TxScopedL2Log[] = []; - // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. - // However it is impossible at the moment due to the language not supporting nested slices. - // This nesting is necessary because for a given set of tags we don't - // know how many logs we will get back. Furthermore, these logs are of undetermined - // length, since we don't really know the note they correspond to until we decrypt them. - - // 1. Get all the secrets for the recipient and sender pairs (#9365) - const indexedTaggingSecrets = await this.#getIndexedTaggingSecretsForContacts(contractAddress, recipient); - - // 1.1 Set up a sliding window with an offset. Chances are the sender might have messed up - // and inadvertently incremented their index without us getting any logs (for example, in case - // of a revert). If we stopped looking for logs the first time we don't receive any logs for a tag, - // we might never receive anything from that sender again. - // Also there's a possibility that we have advanced our index, but the sender has reused it, - // so we might have missed some logs. For these reasons, we have to look both back and ahead of - // the stored index. - - // App tagging secrets along with an index in a window to check in the current iteration. Called current because - // this value will be updated as we iterate through the window. - let currentSecrets = getLeftMostIndexedTaggingSecrets(indexedTaggingSecrets, WINDOW_HALF_SIZE); - // Right-most indexes in a window to check stored in a key-value map where key is the app tagging secret - // and value is the index to check (the right-most index in the window). - const rightMostIndexesMap = getRightMostIndexes(indexedTaggingSecrets, WINDOW_HALF_SIZE); + const logsForRecipient: TxScopedL2Log[] = []; + + // Get all the secrets for the recipient and sender pairs (#9365) + const secrets = await this.#getIndexedTaggingSecretsForContacts(contractAddress, recipient); + + // We fetch logs for a window of indexes in a range: + // . + // + // We use this window approach because it could happen that a sender might have messed up and inadvertently + // incremented their index without us getting any logs (for example, in case of a revert). If we stopped looking + // for logs the first time we don't receive any logs for a tag, we might never receive anything from that sender again. + // Also there's a possibility that we have advanced our index, but the sender has reused it, so we might have missed + // some logs. For these reasons, we have to look both back and ahead of the stored index. + let secretsAndWindows = secrets.map(secret => { + return { + appTaggingSecret: secret.appTaggingSecret, + leftMostIndex: Math.max(0, secret.index - WINDOW_HALF_SIZE), + rightMostIndex: secret.index + WINDOW_HALF_SIZE, + }; + }); + + // As we iterate we store the largest index we have seen for a given secret to later on store it in the db. + const newLargestIndexMapToStore: { [k: string]: number } = {}; + // The initial/unmodified indexes of the secrets stored in a key-value map where key is the app tagging secret. - const initialIndexesMap = getInitialIndexes(indexedTaggingSecrets); - // A map of indexes to increment for secrets for which we have found logs with an index higher than the one - // stored. - const indexesToIncrementMap: { [k: string]: number } = {}; - - while (currentSecrets.length > 0) { - // 2. Compute tags using the secrets, recipient and index. Obtain logs for each tag (#9380) - const currentTags = currentSecrets.map(secret => - // We compute the siloed tags since we need the tags as they appear in the log. + const initialIndexesMap = getInitialIndexesMap(secrets); + + while (secretsAndWindows.length > 0) { + const secretsForTheWholeWindow = getIndexedTaggingSecretsForTheWindow(secretsAndWindows); + const tagsForTheWholeWindow = secretsForTheWholeWindow.map(secret => secret.computeSiloedTag(recipient, contractAddress), ); + // We store the new largest indexes we find in the iteration in the following map to later on construct + // a new set of secrets and windows to fetch logs for. + const newLargestIndexMapForIteration: { [k: string]: number } = {}; + // Fetch the logs for the tags and iterate over them - const logsByTags = await this.aztecNode.getLogsByTags(currentTags); - const secretsWithNewIndex: IndexedTaggingSecret[] = []; + const logsByTags = await this.aztecNode.getLogsByTags(tagsForTheWholeWindow); + logsByTags.forEach((logsByTag, logIndex) => { - const { appTaggingSecret: currentSecret, index: currentIndex } = currentSecrets[logIndex]; - const currentSecretAsStr = currentSecret.toString(); - this.log.debug(`Syncing logs for recipient ${recipient} at contract ${contractName}(${contractAddress})`, { - recipient, - secret: currentSecret, - index: currentIndex, - contractName, - contractAddress, - }); - // 3.1. Append logs to the list and increment the index for the tags that have logs (#9380) if (logsByTag.length > 0) { - const newIndex = currentIndex + 1; - this.log.debug( - `Found ${logsByTag.length} logs as recipient ${recipient}. Incrementing index to ${newIndex} at contract ${contractName}(${contractAddress})`, - { - recipient, - secret: currentSecret, - newIndex, - contractName, - contractAddress, - }, - ); - logs.push(...logsByTag); - - if (currentIndex >= initialIndexesMap[currentSecretAsStr]) { - // 3.2. We found an index higher than the stored/initial one so we update it in the db later on (#9380) - indexesToIncrementMap[currentSecretAsStr] = newIndex; - // 3.3. We found an index higher than the initial one so we slide the window. - rightMostIndexesMap[currentSecretAsStr] = currentIndex + WINDOW_HALF_SIZE; + // The logs for the given tag exist so we store them for later processing + logsForRecipient.push(...logsByTag); + + // We retrieve the indexed tagging secret corresponding to the log as I need that to evaluate whether + // a new largest index have been found. + const secretCorrespondingToLog = secretsForTheWholeWindow[logIndex]; + const initialIndex = initialIndexesMap[secretCorrespondingToLog.appTaggingSecret.toString()]; + + this.log.debug(`Found ${logsByTag.length} logs as recipient ${recipient}`, { + recipient, + secret: secretCorrespondingToLog.appTaggingSecret, + contractName, + contractAddress, + }); + + if ( + secretCorrespondingToLog.index >= initialIndex && + (newLargestIndexMapForIteration[secretCorrespondingToLog.appTaggingSecret.toString()] === undefined || + secretCorrespondingToLog.index >= + newLargestIndexMapForIteration[secretCorrespondingToLog.appTaggingSecret.toString()]) + ) { + // We have found a new largest index so we store it for later processing (storing it in the db + fetching + // the difference of the window sets of current and the next iteration) + newLargestIndexMapForIteration[secretCorrespondingToLog.appTaggingSecret.toString()] = + secretCorrespondingToLog.index + 1; + + this.log.debug( + `Incrementing index to ${ + secretCorrespondingToLog.index + 1 + } at contract ${contractName}(${contractAddress})`, + ); } } - // 3.4 Keep increasing the index (inside the window) temporarily for the tags that have no logs - // There's a chance the sender missed some and we want to catch up - if (currentIndex < rightMostIndexesMap[currentSecretAsStr]) { - const newTaggingSecret = new IndexedTaggingSecret(currentSecret, currentIndex + 1); - secretsWithNewIndex.push(newTaggingSecret); - } }); - // We store the new indexes for the secrets that have logs with an index higher than the one stored. - await this.db.setTaggingSecretsIndexesAsRecipient( - Object.keys(indexesToIncrementMap).map( - secret => new IndexedTaggingSecret(Fr.fromHexString(secret), indexesToIncrementMap[secret]), - ), - ); + // Now based on the new largest indexes we found, we will construct a new secrets and windows set to fetch logs + // for. Note that it's very unlikely that a new log from the current window would appear between the iterations + // so we fetch the logs only for the difference of the window sets. + const newSecretsAndWindows = []; + for (const [appTaggingSecret, newIndex] of Object.entries(newLargestIndexMapForIteration)) { + const secret = secrets.find(secret => secret.appTaggingSecret.toString() === appTaggingSecret); + if (secret) { + newSecretsAndWindows.push({ + appTaggingSecret: secret.appTaggingSecret, + // We set the left most index to the new index to avoid fetching the same logs again + leftMostIndex: newIndex, + rightMostIndex: newIndex + WINDOW_HALF_SIZE, + }); + + // We store the new largest index in the map to later store it in the db. + newLargestIndexMapToStore[appTaggingSecret] = newIndex; + } else { + throw new Error( + `Secret not found for appTaggingSecret ${appTaggingSecret}. This is a bug as it should never happen!`, + ); + } + } - // We've processed all the current secret-index pairs so we proceed to the next iteration. - currentSecrets = secretsWithNewIndex; + // Now we set the new secrets and windows and proceed to the next iteration. + secretsAndWindows = newSecretsAndWindows; } - newLogsMap.set( + // We filter the logs by block number and store them in the map. + logsMap.set( recipient.toString(), - // Remove logs with a block number higher than the max block number - // Duplicates are likely to happen due to the sliding window, so we also filter them out - logs.filter( - (log, index, self) => - // The following condition is true if the log has small enough block number and is unique - // --> the right side of the && is true if the index of the current log is the first occurrence - // of the log in the array --> that way we ensure uniqueness. - log.blockNumber <= maxBlockNumber && index === self.findIndex(otherLog => otherLog.equals(log)), + logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber), + ); + + // At this point we have processed all the logs for the recipient so we store the new largest indexes in the db. + await this.db.setTaggingSecretsIndexesAsRecipient( + Object.entries(newLargestIndexMapToStore).map( + ([appTaggingSecret, index]) => new IndexedTaggingSecret(Fr.fromHexString(appTaggingSecret), index), ), ); } - return newLogsMap; + return logsMap; } /** diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index b291362943c..722eaf8e944 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -235,7 +235,8 @@ describe('Simulator oracle', () => { const senderOffset = 0; generateMockLogs(senderOffset); const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); - // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first one + half of the logs for the second index + // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first + // one + half of the logs for the second index expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); // Recompute the secrets (as recipient) to ensure indexes are updated @@ -254,9 +255,9 @@ describe('Simulator oracle', () => { expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); - // We should have called the node 12 times: - // 2 times with logs (sliding the window) + 10 times with no results (window size) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2 + SENDER_OFFSET_WINDOW_SIZE); + // We should have called the node 2 times: + // 2 times: first time during initial request, second time after pushing the edge of the window once + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); }); it('should sync tagged logs as senders', async () => { @@ -334,9 +335,9 @@ describe('Simulator oracle', () => { expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([6, 6, 6, 6, 6, 7, 7, 7, 7, 7]); - // We should have called the node 17 times: - // 5 times with no results (sender offset) + 2 times with logs (sliding the window) + 10 times with no results (window size) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(5 + 2 + SENDER_OFFSET_WINDOW_SIZE); + // We should have called the node 2 times: + // 2 times: first time during initial request, second time after pushing the edge of the window once + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); }); it("should sync tagged logs for which indexes are not updated if they're inside the window", async () => { @@ -360,16 +361,16 @@ describe('Simulator oracle', () => { expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); // First sender should have 2 logs, but keep index 2 since they were built using the same tag - // Next 4 senders hould also have index 2 = offset + 1 + // Next 4 senders should also have index 2 = offset + 1 // Last 5 senders should have index 3 = offset + 2 const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([2, 2, 2, 2, 2, 3, 3, 3, 3, 3]); - // We should have called the node 13 times: - // 1 time without logs + 2 times with logs (sliding the window) + 10 times with no results (window size) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(3 + SENDER_OFFSET_WINDOW_SIZE); + // We should have called the node 2 times: + // first time during initial request, second time after pushing the edge of the window once + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); }); it("should not sync tagged logs for which indexes are not updated if they're outside the window", async () => { @@ -398,9 +399,8 @@ describe('Simulator oracle', () => { expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([11, 11, 11, 11, 11, 11, 11, 11, 11, 11]); - // We should have called the node SENDER_OFFSET_WINDOW_SIZE + 1 (with logs) + SENDER_OFFSET_WINDOW_SIZE: - // Once for index 1 (NUM_SENDERS/2 logs) + 2 times the sliding window (no logs each time) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(1 + 2 * SENDER_OFFSET_WINDOW_SIZE); + // We should have called the node once and that is only for the first window + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(1); }); it('should sync tagged logs from scratch after a DB wipe', async () => { @@ -422,8 +422,9 @@ describe('Simulator oracle', () => { // No logs should be synced since we start from index 2 = 12 - window_size expect(syncedLogs.get(recipient.address.toString())).toHaveLength(0); - // We should have called the node 21 times (window size + current_index + window size) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2 * SENDER_OFFSET_WINDOW_SIZE + 1); + // Since no logs were synced, window edge hash not been pushed and for this reason we should have called + // the node only once for the initial window + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(1); aztecNode.getLogsByTags.mockClear(); @@ -433,16 +434,16 @@ describe('Simulator oracle', () => { syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); // First sender should have 2 logs, but keep index 1 since they were built using the same tag - // Next 4 senders hould also have index 1 = offset + 1 + // Next 4 senders should also have index 1 = offset + 1 // Last 5 senders should have index 2 = offset + 2 const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); - // We should have called the node 12 times: - // 2 times with logs (sliding the window) + 10 times with no results (window size) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2 + SENDER_OFFSET_WINDOW_SIZE); + // We should have called the node 2 times: + // first time during initial request, second time after pushing the edge of the window once + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); }); it('should not sync tagged logs with a blockNumber > maxBlockNumber', async () => { diff --git a/yarn-project/pxe/src/simulator_oracle/tagging_utils.ts b/yarn-project/pxe/src/simulator_oracle/tagging_utils.ts index 52ed10a1cfc..82759eaf038 100644 --- a/yarn-project/pxe/src/simulator_oracle/tagging_utils.ts +++ b/yarn-project/pxe/src/simulator_oracle/tagging_utils.ts @@ -1,41 +1,15 @@ -import { IndexedTaggingSecret } from '@aztec/circuits.js'; +import { type Fr, IndexedTaggingSecret } from '@aztec/circuits.js'; -/** - * Gets indexed tagging secrets with leftmost indexes. - * @param indexedTaggingSecrets - The indexed tagging secrets to get the leftmost indexed tagging secrets from. - * @param windowHalfSize- The half size of the window to slide over the tagging secret indexes. - * @returns The leftmost indexed tagging secrets. - */ -export function getLeftMostIndexedTaggingSecrets( - indexedTaggingSecrets: IndexedTaggingSecret[], - windowHalfSize: number, +export function getIndexedTaggingSecretsForTheWindow( + secretsAndWindows: { appTaggingSecret: Fr; leftMostIndex: number; rightMostIndex: number }[], ): IndexedTaggingSecret[] { - return indexedTaggingSecrets.map( - indexedTaggingSecret => - new IndexedTaggingSecret( - indexedTaggingSecret.appTaggingSecret, - Math.max(0, indexedTaggingSecret.index - windowHalfSize), - ), - ); -} - -/** - * Creates a map from app tagging secret to rightmost index. - * @param indexedTaggingSecrets - The indexed tagging secrets to get the rightmost indexes from. - * @param windowHalfSize- The half size of the window to slide over the tagging secret indexes. - * @returns The map from app tagging secret to rightmost index. - */ -export function getRightMostIndexes( - indexedTaggingSecrets: IndexedTaggingSecret[], - windowHalfSize: number, -): { [k: string]: number } { - const rightMostIndexes: { [k: string]: number } = {}; - - for (const indexedTaggingSecret of indexedTaggingSecrets) { - rightMostIndexes[indexedTaggingSecret.appTaggingSecret.toString()] = indexedTaggingSecret.index + windowHalfSize; + const secrets: IndexedTaggingSecret[] = []; + for (const secretAndWindow of secretsAndWindows) { + for (let i = secretAndWindow.leftMostIndex; i <= secretAndWindow.rightMostIndex; i++) { + secrets.push(new IndexedTaggingSecret(secretAndWindow.appTaggingSecret, i)); + } } - - return rightMostIndexes; + return secrets; } /** @@ -43,7 +17,7 @@ export function getRightMostIndexes( * @param indexedTaggingSecrets - The indexed tagging secrets to get the initial indexes from. * @returns The map from app tagging secret to initial index. */ -export function getInitialIndexes(indexedTaggingSecrets: IndexedTaggingSecret[]): { [k: string]: number } { +export function getInitialIndexesMap(indexedTaggingSecrets: IndexedTaggingSecret[]): { [k: string]: number } { const initialIndexes: { [k: string]: number } = {}; for (const indexedTaggingSecret of indexedTaggingSecrets) {