diff --git a/yarn-project/accounts/package.json b/yarn-project/accounts/package.json index d4d3d8ab9168..0817971899bb 100644 --- a/yarn-project/accounts/package.json +++ b/yarn-project/accounts/package.json @@ -52,6 +52,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index c1571c1429c4..c0ebc2d04e69 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -43,6 +43,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/aztec-faucet/package.json b/yarn-project/aztec-faucet/package.json index 54eb3313a3d9..6d8ccc7a0220 100644 --- a/yarn-project/aztec-faucet/package.json +++ b/yarn-project/aztec-faucet/package.json @@ -37,6 +37,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index f2bbd2972dec..30d19cff4e9a 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -37,6 +37,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index a0b23c4ce5d2..2ae5ce692532 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -140,12 +140,14 @@ describe('aztec node', () => { maxBlockNumber: new MaxBlockNumber(true, new Fr(1)), getSize: () => 1, toBuffer: () => Fr.ZERO.toBuffer(), + equals: () => true, }; validMaxBlockNumberMetadata.data.forRollup!.rollupValidationRequests = { maxBlockNumber: new MaxBlockNumber(true, new Fr(5)), getSize: () => 1, toBuffer: () => Fr.ZERO.toBuffer(), + equals: () => true, }; lastBlockNumber = 3; diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index 40ff9add3382..98f57facfd73 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -55,6 +55,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 1063b182abbb..573ef5698a2d 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -94,6 +94,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/bb-prover/package.json b/yarn-project/bb-prover/package.json index 3fc9ae17d27d..7bc4f99ecc02 100644 --- a/yarn-project/bb-prover/package.json +++ b/yarn-project/bb-prover/package.json @@ -41,6 +41,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index 03a2bfa79293..175fa563fb73 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -5,6 +5,7 @@ import { type PublicKernelNonTailRequest, type PublicKernelTailRequest, type ServerCircuitProver, + TubeProofAndVK, makePublicInputsAndRecursiveProof, } from '@aztec/circuit-types'; import { type CircuitProvingStats, type CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; @@ -662,9 +663,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { return await this.runInDirectory(operation); } - public async getTubeProof( - input: TubeInputs, - ): Promise<{ tubeVK: VerificationKeyData; tubeProof: RecursiveProof }> { + public async getTubeProof(input: TubeInputs): Promise { // this probably is gonna need to call client ivc const operation = async (bbWorkingDirectory: string) => { logger.debug(`createTubeProof: ${bbWorkingDirectory}`); @@ -688,7 +687,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { } fields`, ); - return { tubeVK, tubeProof }; + return { verificationKey: tubeVK, proof: tubeProof, type: 'tube-proof' }; }; return await this.runInDirectory(operation); } diff --git a/yarn-project/bb-prover/src/test/test_circuit_prover.ts b/yarn-project/bb-prover/src/test/test_circuit_prover.ts index 63a5d7cf1baa..1fd89431cc9e 100644 --- a/yarn-project/bb-prover/src/test/test_circuit_prover.ts +++ b/yarn-project/bb-prover/src/test/test_circuit_prover.ts @@ -4,6 +4,7 @@ import { type PublicKernelNonTailRequest, type PublicKernelTailRequest, type ServerCircuitProver, + TubeProofAndVK, makePublicInputsAndRecursiveProof, } from '@aztec/circuit-types'; import { @@ -261,13 +262,11 @@ export class TestCircuitProver implements ServerCircuitProver { ); } - public async getTubeProof( - _tubeInput: TubeInputs, - ): Promise<{ tubeVK: VerificationKeyData; tubeProof: RecursiveProof }> { + public async getTubeProof(_tubeInput: TubeInputs): Promise { await this.delay(); return { - tubeVK: VerificationKeyData.makeFake(), - tubeProof: makeEmptyRecursiveProof(TUBE_PROOF_LENGTH), + verificationKey: VerificationKeyData.makeFake(), + proof: makeEmptyRecursiveProof(TUBE_PROOF_LENGTH), }; } diff --git a/yarn-project/bot/package.json b/yarn-project/bot/package.json index 6e187ffd065b..ac515b882187 100644 --- a/yarn-project/bot/package.json +++ b/yarn-project/bot/package.json @@ -31,6 +31,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/builder/package.json b/yarn-project/builder/package.json index acde8d5092b1..597b83807a11 100644 --- a/yarn-project/builder/package.json +++ b/yarn-project/builder/package.json @@ -47,6 +47,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/circuit-types/package.json b/yarn-project/circuit-types/package.json index d72351c6ecbc..e1e5019f4b73 100644 --- a/yarn-project/circuit-types/package.json +++ b/yarn-project/circuit-types/package.json @@ -42,6 +42,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts index 3a72f58d7310..b456e3b98897 100644 --- a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts +++ b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts @@ -3,7 +3,7 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type L2Block } from '../l2_block.js'; -import { type MerkleTreeId } from '../merkle_tree_id.js'; +import { type MerkleTreeId, type TreeHeights } from '../merkle_tree_id.js'; import { type SiblingPath } from '../sibling_path/sibling_path.js'; /** @@ -198,11 +198,11 @@ export interface MerkleTreeOperations { * @param subtreeHeight - Height of the subtree. * @returns The witness data for the leaves to be updated when inserting the new ones. */ - batchInsert( + batchInsert( treeId: ID, leaves: Buffer[], subtreeHeight: number, - ): Promise>; + ): Promise>; } /** Operations on merkle trees world state that can modify the underlying store. */ diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index 33127e4c5351..3cc0455cb3e8 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -10,6 +10,7 @@ import { type KernelCircuitPublicInputs, type MergeRollupInputs, type NESTED_RECURSIVE_PROOF_LENGTH, + type ParityPublicInputs, type PrivateKernelEmptyInputData, type Proof, type PublicKernelCircuitPublicInputs, @@ -26,21 +27,25 @@ import { import type { PublicKernelNonTailRequest, PublicKernelTailRequest } from '../tx/processed_tx.js'; +type PublicInputs = { inputs: T }; + export type AvmProofAndVerificationKey = { proof: Proof; verificationKey: AvmVerificationKeyData; }; -export type PublicInputsAndRecursiveProof = { - inputs: T; +export type PublicInputsAndRecursiveProof = PublicInputs & NestedRecursiveProofAndVK; + +export type PublicInputsAndTubeProof = PublicInputs & TubeProofAndVK; + +export type NestedRecursiveProofAndVK = { proof: RecursiveProof; verificationKey: VerificationKeyData; }; -export type PublicInputsAndTubeProof = { - inputs: T; - proof: RecursiveProof; +export type TubeProofAndVK = { verificationKey: VerificationKeyData; + proof: RecursiveProof; }; export function makePublicInputsAndRecursiveProof( @@ -76,7 +81,8 @@ export enum ProvingRequestType { BASE_PARITY, ROOT_PARITY, - // Recursive Client IVC verification to connect private -> public or rollup + + /** Recursive Client IVC verification to connect private to public or rollup */ TUBE_PROOF, } @@ -132,6 +138,25 @@ export type ProvingRequest = inputs: TubeInputs; }; +export type SimulationRequestPublicInputs = { + [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: KernelCircuitPublicInputs; + [ProvingRequestType.PUBLIC_VM]: null; + + [ProvingRequestType.PUBLIC_KERNEL_NON_TAIL]: PublicKernelCircuitPublicInputs; + [ProvingRequestType.PUBLIC_KERNEL_TAIL]: KernelCircuitPublicInputs; + + [ProvingRequestType.BASE_ROLLUP]: BaseOrMergeRollupPublicInputs; + [ProvingRequestType.MERGE_ROLLUP]: BaseOrMergeRollupPublicInputs; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: BlockRootOrBlockMergePublicInputs; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: BlockRootOrBlockMergePublicInputs; + [ProvingRequestType.ROOT_ROLLUP]: RootRollupPublicInputs; + + [ProvingRequestType.BASE_PARITY]: ParityPublicInputs; + [ProvingRequestType.ROOT_PARITY]: ParityPublicInputs; + + [ProvingRequestType.TUBE_PROOF]: null; +}; + export type ProvingRequestPublicInputs = { [ProvingRequestType.PRIVATE_KERNEL_EMPTY]: PublicInputsAndRecursiveProof; [ProvingRequestType.PUBLIC_VM]: AvmProofAndVerificationKey; @@ -147,10 +172,12 @@ export type ProvingRequestPublicInputs = { [ProvingRequestType.BASE_PARITY]: RootParityInput; [ProvingRequestType.ROOT_PARITY]: RootParityInput; - // TODO(#7369) properly structure tube proof flow - [ProvingRequestType.TUBE_PROOF]: { tubeVK: VerificationKeyData; tubeProof: RecursiveProof }; + + [ProvingRequestType.TUBE_PROOF]: TubeProofAndVK; }; +export type SimulationRequestResult = SimulationRequestPublicInputs[T]; + export type ProvingRequestResult = ProvingRequestPublicInputs[T]; export interface ProvingJobSource { diff --git a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts index 802cc5ea983d..f19cd16731b5 100644 --- a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts +++ b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts @@ -4,6 +4,7 @@ import { type PublicInputsAndTubeProof, type PublicKernelNonTailRequest, type PublicKernelTailRequest, + type TubeProofAndVK, type Tx, } from '@aztec/circuit-types'; import { @@ -20,13 +21,11 @@ import { type PrivateKernelEmptyInputData, type PublicKernelCircuitPublicInputs, type RECURSIVE_PROOF_LENGTH, - type RecursiveProof, type RootParityInput, type RootParityInputs, type RootRollupInputs, type RootRollupPublicInputs, type TubeInputs, - type VerificationKeyData, } from '@aztec/circuits.js'; /** @@ -67,11 +66,7 @@ export interface ServerCircuitProver { * Get a recursively verified client IVC proof (making it a compatible honk proof for the rest of the rollup). * @param input - Input to the circuit. */ - getTubeProof( - tubeInput: TubeInputs, - signal?: AbortSignal, - epochNumber?: number, - ): Promise<{ tubeVK: VerificationKeyData; tubeProof: RecursiveProof }>; + getTubeProof(tubeInput: TubeInputs, signal?: AbortSignal, epochNumber?: number): Promise; /** * Creates a proof for the given input. diff --git a/yarn-project/circuit-types/src/merkle_tree_id.ts b/yarn-project/circuit-types/src/merkle_tree_id.ts index 25ab0897284a..785d00ad2a13 100644 --- a/yarn-project/circuit-types/src/merkle_tree_id.ts +++ b/yarn-project/circuit-types/src/merkle_tree_id.ts @@ -1,8 +1,13 @@ import { + ARCHIVE_HEIGHT, ARCHIVE_TREE_ID, L1_TO_L2_MESSAGE_TREE_ID, + L1_TO_L2_MSG_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, NOTE_HASH_TREE_ID, + NULLIFIER_TREE_HEIGHT, NULLIFIER_TREE_ID, + PUBLIC_DATA_TREE_HEIGHT, PUBLIC_DATA_TREE_ID, } from '@aztec/circuits.js'; @@ -21,3 +26,17 @@ export enum MerkleTreeId { export const merkleTreeIds = () => { return Object.values(MerkleTreeId).filter((v): v is MerkleTreeId => !isNaN(Number(v))); }; + +const TREE_HEIGHTS = { + [MerkleTreeId.NOTE_HASH_TREE]: NOTE_HASH_TREE_HEIGHT, + [MerkleTreeId.ARCHIVE]: ARCHIVE_HEIGHT, + [MerkleTreeId.L1_TO_L2_MESSAGE_TREE]: L1_TO_L2_MSG_TREE_HEIGHT, + [MerkleTreeId.NULLIFIER_TREE]: NULLIFIER_TREE_HEIGHT, + [MerkleTreeId.PUBLIC_DATA_TREE]: PUBLIC_DATA_TREE_HEIGHT, +} as const; + +export type TreeHeights = typeof TREE_HEIGHTS; + +export function getTreeHeight(treeId: TID): TreeHeights[TID] { + return TREE_HEIGHTS[treeId]; +} diff --git a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts index df22de9a56cf..ec0eb192b297 100644 --- a/yarn-project/circuit-types/src/sibling_path/sibling_path.ts +++ b/yarn-project/circuit-types/src/sibling_path/sibling_path.ts @@ -68,7 +68,7 @@ export class SiblingPath { * Returns the path buffer underlying the sibling path. * @returns The Buffer array representation of this object. */ - public toBufferArray(): Buffer[] { + public toBufferArray(): Tuple { return this.data; } diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 4e73bf0561ca..335ed08f46a6 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -65,7 +65,7 @@ export type AvmProvingRequest = { kernelRequest: PublicKernelNonTailRequest; }; -export type PublicProvingRequest = AvmProvingRequest | PublicKernelRequest; +export type PublicProvingRequest = AvmProvingRequest | PublicKernelTailRequest; /** * Represents a tx that has been processed by the sequencer public processor, diff --git a/yarn-project/circuits.js/package.json b/yarn-project/circuits.js/package.json index 524b4a622104..52b1803cff1c 100644 --- a/yarn-project/circuits.js/package.json +++ b/yarn-project/circuits.js/package.json @@ -78,6 +78,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/circuits.js/src/structs/content_commitment.ts b/yarn-project/circuits.js/src/structs/content_commitment.ts index 92e0145bb0e7..1b8d415a19fd 100644 --- a/yarn-project/circuits.js/src/structs/content_commitment.ts +++ b/yarn-project/circuits.js/src/structs/content_commitment.ts @@ -48,6 +48,15 @@ export class ContentCommitment { return serialized; } + equals(other: ContentCommitment): boolean { + return ( + this.numTxs.equals(other.numTxs) && + this.txsEffectsHash.equals(other.txsEffectsHash) && + this.inHash.equals(other.inHash) && + this.outHash.equals(other.outHash) + ); + } + static fromBuffer(buffer: Buffer | BufferReader): ContentCommitment { const reader = BufferReader.asReader(buffer); diff --git a/yarn-project/circuits.js/src/structs/global_variables.ts b/yarn-project/circuits.js/src/structs/global_variables.ts index 50e88a61592c..1e8000ebf07f 100644 --- a/yarn-project/circuits.js/src/structs/global_variables.ts +++ b/yarn-project/circuits.js/src/structs/global_variables.ts @@ -150,4 +150,17 @@ export class GlobalVariables { this.gasFees.isEmpty() ); } + + equals(other: GlobalVariables) { + return ( + this.chainId.equals(other.chainId) && + this.version.equals(other.version) && + this.blockNumber.equals(other.blockNumber) && + this.slotNumber.equals(other.slotNumber) && + this.timestamp.equals(other.timestamp) && + this.coinbase.equals(other.coinbase) && + this.feeRecipient.equals(other.feeRecipient) && + this.gasFees.equals(other.gasFees) + ); + } } diff --git a/yarn-project/circuits.js/src/structs/header.ts b/yarn-project/circuits.js/src/structs/header.ts index dbdb09881982..121175023d44 100644 --- a/yarn-project/circuits.js/src/structs/header.ts +++ b/yarn-project/circuits.js/src/structs/header.ts @@ -65,6 +65,16 @@ export class Header { return Header.fromBuffer(this.toBuffer()); } + equals(other: Header): boolean { + return ( + this.lastArchive.equals(other.lastArchive) && + this.contentCommitment.equals(other.contentCommitment) && + this.state.equals(other.state) && + this.globalVariables.equals(other.globalVariables) && + this.totalFees.equals(other.totalFees) + ); + } + static fromBuffer(buffer: Buffer | BufferReader): Header { const reader = BufferReader.asReader(buffer); diff --git a/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts index 88ba4af36d32..fb7d3a0b89b2 100644 --- a/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/combined_accumulated_data.ts @@ -70,6 +70,22 @@ export class CombinedAccumulatedData { public gasUsed: Gas, ) {} + equals(other: CombinedAccumulatedData) { + return ( + this.noteHashes.every((x, i) => x.equals(other.noteHashes[i])) && + this.nullifiers.every((x, i) => x.equals(other.nullifiers[i])) && + this.l2ToL1Msgs.every((x, i) => x.equals(other.l2ToL1Msgs[i])) && + this.noteEncryptedLogsHashes.every((x, i) => x.equals(other.noteEncryptedLogsHashes[i])) && + this.encryptedLogsHashes.every((x, i) => x.equals(other.encryptedLogsHashes[i])) && + this.unencryptedLogsHashes.every((x, i) => x.equals(other.unencryptedLogsHashes[i])) && + this.noteEncryptedLogPreimagesLength.equals(other.noteEncryptedLogPreimagesLength) && + this.encryptedLogPreimagesLength.equals(other.encryptedLogPreimagesLength) && + this.unencryptedLogPreimagesLength.equals(other.unencryptedLogPreimagesLength) && + this.publicDataUpdateRequests.every((x, i) => x.equals(other.publicDataUpdateRequests[i])) && + this.gasUsed.equals(other.gasUsed) + ); + } + getSize() { return ( arraySerializedSizeOfNonEmpty(this.noteHashes) + diff --git a/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts b/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts index a1c58d786787..425c1068046f 100644 --- a/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts @@ -31,6 +31,15 @@ export class CombinedConstantData { public globalVariables: GlobalVariables, ) {} + equals(other: CombinedConstantData) { + return ( + this.historicalHeader.equals(other.historicalHeader) && + this.txContext.equals(other.txContext) && + this.vkTreeRoot.equals(other.vkTreeRoot) && + this.globalVariables.equals(other.globalVariables) + ); + } + toBuffer() { return serializeToBuffer(this.historicalHeader, this.txContext, this.vkTreeRoot, this.globalVariables); } diff --git a/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts index 3c001d198601..5fc6e58226b0 100644 --- a/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/kernel_circuit_public_inputs.ts @@ -38,6 +38,17 @@ export class KernelCircuitPublicInputs { public feePayer: AztecAddress, ) {} + equals(other: KernelCircuitPublicInputs): boolean { + return ( + this.rollupValidationRequests.equals(other.rollupValidationRequests) && + this.end.equals(other.end) && + this.constants.equals(other.constants) && + this.startState.equals(other.startState) && + this.revertCode === other.revertCode && + this.feePayer.equals(other.feePayer) + ); + } + getNonEmptyNullifiers() { return this.end.nullifiers.filter(n => !n.isZero()); } diff --git a/yarn-project/circuits.js/src/structs/kernel/kernel_data.ts b/yarn-project/circuits.js/src/structs/kernel/kernel_data.ts index 855ad7f8866a..8eaff1f6e672 100644 --- a/yarn-project/circuits.js/src/structs/kernel/kernel_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/kernel_data.ts @@ -3,6 +3,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; import { RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; +import { MembershipWitness } from '../membership_witness.js'; import { RecursiveProof, makeEmptyRecursiveProof } from '../recursive_proof.js'; import { type UInt32 } from '../shared.js'; import { VerificationKeyData } from '../verification_key.js'; @@ -54,6 +55,17 @@ export class KernelData { ); } + static withEmptyProof(publicInputs: KernelCircuitPublicInputs) { + const emptyWitness = MembershipWitness.empty(VK_TREE_HEIGHT); + return new KernelData( + publicInputs, + makeEmptyRecursiveProof(RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeEmpty(), + Number(emptyWitness.leafIndex), + emptyWitness.siblingPath, + ); + } + toBuffer() { return serializeToBuffer(this.publicInputs, this.proof, this.vk, this.vkIndex, this.vkPath); } diff --git a/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts index e81eb230b009..f7e86333427c 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_accumulated_data.ts @@ -114,6 +114,20 @@ export class PublicAccumulatedData { ); } + equals(other: PublicAccumulatedData) { + return ( + this.noteHashes.every((x, i) => x.equals(other.noteHashes[i])) && + this.nullifiers.every((x, i) => x.equals(other.nullifiers[i])) && + this.l2ToL1Msgs.every((x, i) => x.equals(other.l2ToL1Msgs[i])) && + this.noteEncryptedLogsHashes.every((x, i) => x.equals(other.noteEncryptedLogsHashes[i])) && + this.encryptedLogsHashes.every((x, i) => x.equals(other.encryptedLogsHashes[i])) && + this.unencryptedLogsHashes.every((x, i) => x.equals(other.unencryptedLogsHashes[i])) && + this.publicDataUpdateRequests.every((x, i) => x.equals(other.publicDataUpdateRequests[i])) && + this.publicCallStack.every((x, i) => x.equals(other.publicCallStack[i])) && + this.gasUsed.equals(other.gasUsed) + ); + } + [inspect.custom]() { // print out the non-empty fields return `PublicAccumulatedData { diff --git a/yarn-project/circuits.js/src/structs/kernel/public_call_data.ts b/yarn-project/circuits.js/src/structs/kernel/public_call_data.ts index 3f15a2e59030..5a1114f978df 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_call_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_call_data.ts @@ -23,6 +23,10 @@ export class PublicCallData { public readonly bytecodeHash: Fr, ) {} + withProof(proof: Proof) { + return new PublicCallData(this.callStackItem, proof, this.bytecodeHash); + } + toBuffer() { return serializeToBuffer(this.callStackItem, this.proof, this.bytecodeHash); } diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts index 190593832db6..6cc23ce80fd3 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts @@ -18,6 +18,14 @@ export class PublicKernelCircuitPrivateInputs { public readonly publicCall: PublicCallData, ) {} + /** Returns a copy of these private inputs with the given proof for the previous kernel and avm. */ + withNestedProofs(avm: PublicCallData['proof'], ...kernel: Parameters) { + return new PublicKernelCircuitPrivateInputs( + this.previousKernel.withProof(...kernel), + this.publicCall.withProof(avm), + ); + } + /** * Serializes the object to a buffer. * @returns - Buffer representation of the object. diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts index ca542885d3e8..ba01681e9a98 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts @@ -84,6 +84,18 @@ export class PublicKernelCircuitPublicInputs { return !this.publicTeardownCallStack[0].isEmpty(); } + equals(other: PublicKernelCircuitPublicInputs) { + return ( + this.validationRequests.equals(other.validationRequests) && + this.endNonRevertibleData.equals(other.endNonRevertibleData) && + this.end.equals(other.end) && + this.constants.equals(other.constants) && + this.revertCode === other.revertCode && + this.publicTeardownCallStack.every((item, i) => item.equals(other.publicTeardownCallStack[i])) && + this.feePayer.equals(other.feePayer) + ); + } + /** * Deserializes from a buffer or reader, corresponding to a write in cpp. * @param buffer - Buffer or reader to read from. diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_data.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_data.ts index 7a1608f9a8e2..e6c9763ad70b 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_data.ts @@ -1,4 +1,4 @@ -import { makeTuple } from '@aztec/foundation/array'; +import { FieldsOf, makeTuple } from '@aztec/foundation/array'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -64,6 +64,21 @@ export class PublicKernelData { ); } + /** Returns a copy of this struct with the given proof set. */ + withProof( + fields: Pick, 'proof' | 'vk'> & + Partial, 'clientIvcProof' | 'vkIndex' | 'vkPath'>>, + ) { + return new PublicKernelData( + this.publicInputs, + fields.proof, + fields.vk, + fields.vkIndex ?? 0, + fields.vkPath ?? makeTuple(VK_TREE_HEIGHT, Fr.zero), + fields.clientIvcProof, + ); + } + /** * Serialize this as a buffer. * @returns The buffer. diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts index c0bb9b4d82ff..cf34c01c6392 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_tail_circuit_private_inputs.ts @@ -63,6 +63,18 @@ export class PublicKernelTailCircuitPrivateInputs { return this.toBuffer().toString('hex'); } + withNestedProof(...kernel: Parameters) { + return new PublicKernelTailCircuitPrivateInputs( + this.previousKernel.withProof(...kernel), + this.noteHashReadRequestHints, + this.nullifierReadRequestHints, + this.nullifierNonExistentReadRequestHints, + this.l1ToL2MsgReadRequestHints, + this.publicDataHints, + this.startState, + ); + } + static fromString(str: string) { return PublicKernelTailCircuitPrivateInputs.fromBuffer(Buffer.from(str, 'hex')); } diff --git a/yarn-project/circuits.js/src/structs/log_hash.ts b/yarn-project/circuits.js/src/structs/log_hash.ts index a51086e5d113..deaf2a37a135 100644 --- a/yarn-project/circuits.js/src/structs/log_hash.ts +++ b/yarn-project/circuits.js/src/structs/log_hash.ts @@ -19,6 +19,10 @@ export class LogHash implements Ordered { return new LogHash(reader.readField(), reader.readU32(), reader.readField()); } + equals(other: LogHash): boolean { + return this.value.equals(other.value) && this.counter === other.counter && this.length.equals(other.length); + } + isEmpty() { return this.value.isZero() && this.length.isZero() && !this.counter; } @@ -56,6 +60,10 @@ export class ScopedLogHash implements Ordered { return this.logHash.value; } + equals(other: ScopedLogHash): boolean { + return this.logHash.equals(other.logHash) && this.contractAddress.equals(other.contractAddress); + } + toFields(): Fr[] { return [...this.logHash.toFields(), this.contractAddress.toField()]; } diff --git a/yarn-project/circuits.js/src/structs/max_block_number.ts b/yarn-project/circuits.js/src/structs/max_block_number.ts index 998fdd4a1a41..53db66d24810 100644 --- a/yarn-project/circuits.js/src/structs/max_block_number.ts +++ b/yarn-project/circuits.js/src/structs/max_block_number.ts @@ -19,6 +19,10 @@ export class MaxBlockNumber { public value: Fr, ) {} + equals(other: MaxBlockNumber) { + return this.isSome === other.isSome && this.value.equals(other.value); + } + /** * Serialize as a buffer. * @returns The buffer. diff --git a/yarn-project/circuits.js/src/structs/membership_witness.ts b/yarn-project/circuits.js/src/structs/membership_witness.ts index fd623300be17..502ef07177dc 100644 --- a/yarn-project/circuits.js/src/structs/membership_witness.ts +++ b/yarn-project/circuits.js/src/structs/membership_witness.ts @@ -56,7 +56,10 @@ export class MembershipWitness { return new MembershipWitness(pathSize, leafIndex, arr); } - static fromBufferArray(leafIndex: bigint, siblingPath: Tuple): MembershipWitness { + static fromBufferArray( + leafIndex: bigint, + siblingPath: Tuple, + ): MembershipWitness { return new MembershipWitness( siblingPath.length as N, leafIndex, diff --git a/yarn-project/circuits.js/src/structs/note_hash.ts b/yarn-project/circuits.js/src/structs/note_hash.ts index 0ddd44a80217..b3f83959a136 100644 --- a/yarn-project/circuits.js/src/structs/note_hash.ts +++ b/yarn-project/circuits.js/src/structs/note_hash.ts @@ -7,6 +7,10 @@ import { type Ordered } from '../interfaces/index.js'; export class NoteHash { constructor(public value: Fr, public counter: number) {} + equals(other: NoteHash) { + return this.value.equals(other.value) && this.counter === other.counter; + } + toFields(): Fr[] { return [this.value, new Fr(this.counter)]; } @@ -53,6 +57,10 @@ export class ScopedNoteHash implements Ordered { return this.noteHash.value; } + equals(other: ScopedNoteHash) { + return this.noteHash.equals(other.noteHash) && this.contractAddress.equals(other.contractAddress); + } + toFields(): Fr[] { return [...this.noteHash.toFields(), this.contractAddress.toField()]; } diff --git a/yarn-project/circuits.js/src/structs/nullifier.ts b/yarn-project/circuits.js/src/structs/nullifier.ts index 7f1e73477e1d..61736125cffd 100644 --- a/yarn-project/circuits.js/src/structs/nullifier.ts +++ b/yarn-project/circuits.js/src/structs/nullifier.ts @@ -16,6 +16,10 @@ export class Nullifier implements Ordered { return new Nullifier(reader.readField(), reader.readU32(), reader.readField()); } + equals(other: Nullifier): boolean { + return this.value.equals(other.value) && this.counter === other.counter && this.noteHash.equals(other.noteHash); + } + isEmpty() { return this.value.isZero() && !this.counter && this.noteHash.isZero(); } diff --git a/yarn-project/circuits.js/src/structs/parity/base_parity_inputs.ts b/yarn-project/circuits.js/src/structs/parity/base_parity_inputs.ts index 292432538cc9..f6e2253aabd9 100644 --- a/yarn-project/circuits.js/src/structs/parity/base_parity_inputs.ts +++ b/yarn-project/circuits.js/src/structs/parity/base_parity_inputs.ts @@ -11,15 +11,23 @@ export class BaseParityInputs { public readonly vkTreeRoot: Fr, ) {} - public static fromSlice( + public static sliceMessages( array: Tuple, index: number, - vkTreeRoot: Fr, - ): BaseParityInputs { + ): Tuple { const start = index * NUM_MSGS_PER_BASE_PARITY; const end = start + NUM_MSGS_PER_BASE_PARITY; const msgs = array.slice(start, end); - return new BaseParityInputs(msgs as Tuple, vkTreeRoot); + return msgs as Tuple; + } + + public static fromSlice( + array: Tuple, + index: number, + vkTreeRoot: Fr, + ): BaseParityInputs { + const msgs = BaseParityInputs.sliceMessages(array, index); + return new BaseParityInputs(msgs, vkTreeRoot); } /** Serializes the inputs to a buffer. */ diff --git a/yarn-project/circuits.js/src/structs/parity/parity_public_inputs.ts b/yarn-project/circuits.js/src/structs/parity/parity_public_inputs.ts index 2d909fe4a886..94c658821fcc 100644 --- a/yarn-project/circuits.js/src/structs/parity/parity_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/parity/parity_public_inputs.ts @@ -68,4 +68,12 @@ export class ParityPublicInputs { static fromString(str: string) { return ParityPublicInputs.fromBuffer(Buffer.from(str, 'hex')); } + + public equals(other: ParityPublicInputs) { + return ( + this.shaRoot.equals(other.shaRoot) && + this.convertedRoot.equals(other.convertedRoot) && + this.vkTreeRoot.equals(other.vkTreeRoot) + ); + } } diff --git a/yarn-project/circuits.js/src/structs/parity/root_parity_input.ts b/yarn-project/circuits.js/src/structs/parity/root_parity_input.ts index 87e1f7b36095..8f5244d8b6ca 100644 --- a/yarn-project/circuits.js/src/structs/parity/root_parity_input.ts +++ b/yarn-project/circuits.js/src/structs/parity/root_parity_input.ts @@ -1,10 +1,11 @@ +import { makeTuple } from '@aztec/foundation/array'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; import { VK_TREE_HEIGHT } from '../../constants.gen.js'; import { RecursiveProof } from '../recursive_proof.js'; -import { VerificationKeyAsFields } from '../verification_key.js'; +import { VerificationKeyAsFields, VerificationKeyData } from '../verification_key.js'; import { ParityPublicInputs } from './parity_public_inputs.js'; export class RootParityInput { @@ -27,6 +28,10 @@ export class RootParityInput { return this.toBuffer().toString('hex'); } + hasEmptyProof() { + return this.proof.isEmpty(); + } + static from( fields: FieldsOf>, ): RootParityInput { @@ -56,4 +61,17 @@ export class RootParityInput { ): RootParityInput { return RootParityInput.fromBuffer(Buffer.from(str, 'hex'), expectedSize); } + + /** Creates a struct with empty proof and verification key. Used for simulation. */ + static withEmptyProof( + publicInputs: ParityPublicInputs, + proofLength: PROOF_LENGTH, + ): RootParityInput { + return new RootParityInput( + RecursiveProof.empty(proofLength), + VerificationKeyData.makeEmpty().keyAsFields, + makeTuple(VK_TREE_HEIGHT, Fr.zero), + publicInputs, + ); + } } diff --git a/yarn-project/circuits.js/src/structs/partial_state_reference.ts b/yarn-project/circuits.js/src/structs/partial_state_reference.ts index af0722edd576..9197e6116a69 100644 --- a/yarn-project/circuits.js/src/structs/partial_state_reference.ts +++ b/yarn-project/circuits.js/src/structs/partial_state_reference.ts @@ -21,6 +21,14 @@ export class PartialStateReference { return this.noteHashTree.getSize() + this.nullifierTree.getSize() + this.publicDataTree.getSize(); } + equals(other: PartialStateReference) { + return ( + this.noteHashTree.equals(other.noteHashTree) && + this.nullifierTree.equals(other.nullifierTree) && + this.publicDataTree.equals(other.publicDataTree) + ); + } + static fromBuffer(buffer: Buffer | BufferReader): PartialStateReference { const reader = BufferReader.asReader(buffer); return new PartialStateReference( diff --git a/yarn-project/circuits.js/src/structs/public_call_request.ts b/yarn-project/circuits.js/src/structs/public_call_request.ts index 2d53efb37ead..2c3359aac91a 100644 --- a/yarn-project/circuits.js/src/structs/public_call_request.ts +++ b/yarn-project/circuits.js/src/structs/public_call_request.ts @@ -30,6 +30,10 @@ export class PublicCallRequest { return serializeToBuffer(this.item, this.counter); } + equals(other: PublicCallRequest): boolean { + return this.item.equals(other.item) && this.counter === other.counter; + } + /** * Deserialize this from a buffer. * @param buffer - The bufferable type from which to deserialize. diff --git a/yarn-project/circuits.js/src/structs/public_call_stack_item_compressed.ts b/yarn-project/circuits.js/src/structs/public_call_stack_item_compressed.ts index 8df09f6bf4a8..d6d4e167f592 100644 --- a/yarn-project/circuits.js/src/structs/public_call_stack_item_compressed.ts +++ b/yarn-project/circuits.js/src/structs/public_call_stack_item_compressed.ts @@ -50,6 +50,18 @@ export class PublicCallStackItemCompressed { return serializeToBuffer(...PublicCallStackItemCompressed.getFields(this)); } + equals(other: PublicCallStackItemCompressed): boolean { + return ( + this.contractAddress.equals(other.contractAddress) && + this.callContext.equals(other.callContext) && + this.argsHash.equals(other.argsHash) && + this.returnsHash.equals(other.returnsHash) && + this.revertCode.equals(other.revertCode) && + this.startGasLeft.equals(other.startGasLeft) && + this.endGasLeft.equals(other.endGasLeft) + ); + } + /** * Deserializes from a buffer or reader. * @param buffer - Buffer or reader to read from. diff --git a/yarn-project/circuits.js/src/structs/public_validation_requests.ts b/yarn-project/circuits.js/src/structs/public_validation_requests.ts index b9e7d179b5a4..f70917875836 100644 --- a/yarn-project/circuits.js/src/structs/public_validation_requests.ts +++ b/yarn-project/circuits.js/src/structs/public_validation_requests.ts @@ -72,6 +72,17 @@ export class PublicValidationRequests { return this.toBuffer().toString('hex'); } + equals(other: PublicValidationRequests) { + return ( + this.forRollup.equals(other.forRollup) && + this.noteHashReadRequests.every((r, i) => r.equals(other.noteHashReadRequests[i])) && + this.nullifierReadRequests.every((r, i) => r.equals(other.nullifierReadRequests[i])) && + this.nullifierNonExistentReadRequests.every((r, i) => r.equals(other.nullifierNonExistentReadRequests[i])) && + this.l1ToL2MsgReadRequests.every((r, i) => r.equals(other.l1ToL2MsgReadRequests[i])) && + this.publicDataReads.every((r, i) => r.equals(other.publicDataReads[i])) + ); + } + static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); return new PublicValidationRequests( diff --git a/yarn-project/circuits.js/src/structs/read_request.ts b/yarn-project/circuits.js/src/structs/read_request.ts index 60127af7c0d8..4907eb6106e2 100644 --- a/yarn-project/circuits.js/src/structs/read_request.ts +++ b/yarn-project/circuits.js/src/structs/read_request.ts @@ -14,6 +14,10 @@ export class ReadRequest { public counter: number, ) {} + equals(other: ReadRequest) { + return this.value.equals(other.value) && this.counter === other.counter; + } + /** * Serialize this as a buffer. * @returns The buffer. @@ -80,6 +84,10 @@ export class ScopedReadRequest { return this.readRequest.counter; } + equals(other: ScopedReadRequest) { + return this.readRequest.equals(other.readRequest) && this.contractAddress.equals(other.contractAddress); + } + /** * Serialize this as a buffer. * @returns The buffer. diff --git a/yarn-project/circuits.js/src/structs/recursive_proof.ts b/yarn-project/circuits.js/src/structs/recursive_proof.ts index 4f94adbe212b..eb074b407809 100644 --- a/yarn-project/circuits.js/src/structs/recursive_proof.ts +++ b/yarn-project/circuits.js/src/structs/recursive_proof.ts @@ -20,7 +20,6 @@ export class RecursiveProof { * Holds the serialized proof data in an array of fields, this is without the public inputs */ public proof: Tuple, - /** * Holds the serialized proof data in a binary buffer, this contains the public inputs */ @@ -79,6 +78,14 @@ export class RecursiveProof { ): RecursiveProof { return RecursiveProof.fromBuffer(Buffer.from(str, 'hex'), expectedSize); } + + static empty(size: N) { + return new RecursiveProof(makeTuple(size, Fr.zero), makeEmptyProof(), true); + } + + isEmpty() { + return this.proof.every(field => field.isZero()); + } } /** @@ -87,7 +94,7 @@ export class RecursiveProof { * @returns The empty "proof". */ export function makeEmptyRecursiveProof(size: N) { - return new RecursiveProof(makeTuple(size, Fr.zero), makeEmptyProof(), true); + return RecursiveProof.empty(size); } export function makeRecursiveProof(size: PROOF_LENGTH, seed = 1) { diff --git a/yarn-project/circuits.js/src/structs/rollup/append_only_tree_snapshot.ts b/yarn-project/circuits.js/src/structs/rollup/append_only_tree_snapshot.ts index c831cbef31f4..f01704a1438a 100644 --- a/yarn-project/circuits.js/src/structs/rollup/append_only_tree_snapshot.ts +++ b/yarn-project/circuits.js/src/structs/rollup/append_only_tree_snapshot.ts @@ -42,6 +42,10 @@ export class AppendOnlyTreeSnapshot { return this.toBuffer().toString(STRING_ENCODING); } + public equals(other: AppendOnlyTreeSnapshot) { + return this.root.equals(other.root) && this.nextAvailableLeafIndex === other.nextAvailableLeafIndex; + } + static fromBuffer(buffer: Buffer | BufferReader): AppendOnlyTreeSnapshot { const reader = BufferReader.asReader(buffer); return new AppendOnlyTreeSnapshot(Fr.fromBuffer(reader), reader.readNumber()); diff --git a/yarn-project/circuits.js/src/structs/rollup/base_or_merge_rollup_public_inputs.ts b/yarn-project/circuits.js/src/structs/rollup/base_or_merge_rollup_public_inputs.ts index d814dd1861ce..f1fb49049309 100644 --- a/yarn-project/circuits.js/src/structs/rollup/base_or_merge_rollup_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/rollup/base_or_merge_rollup_public_inputs.ts @@ -96,6 +96,19 @@ export class BaseOrMergeRollupPublicInputs { return this.toBuffer().toString('hex'); } + public equals(other: BaseOrMergeRollupPublicInputs) { + return ( + this.rollupType === other.rollupType && + this.numTxs === other.numTxs && + this.constants.equals(other.constants) && + this.start.equals(other.start) && + this.end.equals(other.end) && + this.txsEffectsHash.equals(other.txsEffectsHash) && + this.outHash.equals(other.outHash) && + this.accumulatedFees.equals(other.accumulatedFees) + ); + } + /** * Deserializes from a hex string. * @param str - A hex string to deserialize from. diff --git a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts index e7a9835974bd..2f17dca1f2fb 100644 --- a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts @@ -61,6 +61,14 @@ export class ConstantRollupData { toBuffer() { return serializeToBuffer(...ConstantRollupData.getFields(this)); } + + public equals(other: ConstantRollupData) { + return ( + this.lastArchive.equals(other.lastArchive) && + this.vkTreeRoot.equals(other.vkTreeRoot) && + this.globalVariables.equals(other.globalVariables) + ); + } } /** diff --git a/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts index 193acbff4852..6d9b0056d044 100644 --- a/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts @@ -1,9 +1,9 @@ +import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; import { GlobalVariables } from '../global_variables.js'; -import { EthAddress } from '../index.js'; import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; /** @@ -110,6 +110,21 @@ export class BlockRootOrBlockMergePublicInputs { static fromString(str: string) { return BlockRootOrBlockMergePublicInputs.fromBuffer(Buffer.from(str, 'hex')); } + + public equals(other: BlockRootOrBlockMergePublicInputs) { + return ( + this.previousArchive.equals(other.previousArchive) && + this.newArchive.equals(other.newArchive) && + this.previousBlockHash.equals(other.previousBlockHash) && + this.endBlockHash.equals(other.endBlockHash) && + this.startGlobalVariables.equals(other.startGlobalVariables) && + this.endGlobalVariables.equals(other.endGlobalVariables) && + this.outHash.equals(other.outHash) && + this.fees.every((fee, i) => fee.equals(other.fees[i])) && + this.vkTreeRoot.equals(other.vkTreeRoot) && + this.proverId.equals(other.proverId) + ); + } } export class FeeRecipient { @@ -131,4 +146,8 @@ export class FeeRecipient { toFields() { return serializeToFields(...FeeRecipient.getFields(this)); } + + public equals(other: FeeRecipient) { + return this.recipient.equals(other.recipient) && this.value.equals(other.value); + } } diff --git a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts index 2d203aa8ffd8..ef49b6836763 100644 --- a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts +++ b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts @@ -3,7 +3,7 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { NESTED_RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; import { MembershipWitness } from '../membership_witness.js'; import { RecursiveProof } from '../recursive_proof.js'; -import { VerificationKeyAsFields } from '../verification_key.js'; +import { VerificationKeyAsFields, VerificationKeyData } from '../verification_key.js'; import { BlockRootOrBlockMergePublicInputs } from './block_root_or_block_merge_public_inputs.js'; /** @@ -51,4 +51,16 @@ export class PreviousRollupBlockData { MembershipWitness.fromBuffer(reader, VK_TREE_HEIGHT), ); } + + /** Creates an instance with an empty proof and verification key. Useful for simulation. */ + public static withEmptyProof( + blockRootOrBlockMergePublicInputs: BlockRootOrBlockMergePublicInputs, + ): PreviousRollupBlockData { + return new PreviousRollupBlockData( + blockRootOrBlockMergePublicInputs, + RecursiveProof.empty(NESTED_RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeEmpty().keyAsFields, + MembershipWitness.empty(VK_TREE_HEIGHT), + ); + } } diff --git a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_data.ts b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_data.ts index 85d495186beb..2f02757c0bba 100644 --- a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_data.ts +++ b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_data.ts @@ -2,8 +2,8 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { NESTED_RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; import { MembershipWitness } from '../membership_witness.js'; -import { RecursiveProof } from '../recursive_proof.js'; -import { VerificationKeyAsFields } from '../verification_key.js'; +import { RecursiveProof, makeEmptyRecursiveProof } from '../recursive_proof.js'; +import { VerificationKeyAsFields, VerificationKeyData } from '../verification_key.js'; import { BaseOrMergeRollupPublicInputs } from './base_or_merge_rollup_public_inputs.js'; /** @@ -51,4 +51,14 @@ export class PreviousRollupData { MembershipWitness.fromBuffer(reader, VK_TREE_HEIGHT), ); } + + /** Creates a struct with empty proof and verification key. Used for simulation. */ + public static withEmptyProof(baseOrMergeRollupPublicInputs: BaseOrMergeRollupPublicInputs): PreviousRollupData { + return new PreviousRollupData( + baseOrMergeRollupPublicInputs, + makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + VerificationKeyData.makeEmpty().keyAsFields, + MembershipWitness.empty(VK_TREE_HEIGHT), + ); + } } diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index f33487e23783..cc674286cc31 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -154,4 +154,19 @@ export class RootRollupPublicInputs { static fromString(str: string) { return RootRollupPublicInputs.fromBuffer(Buffer.from(str, 'hex')); } + + public equals(other: RootRollupPublicInputs): boolean { + return ( + this.previousArchive.equals(other.previousArchive) && + this.endArchive.equals(other.endArchive) && + this.previousBlockHash.equals(other.previousBlockHash) && + this.endBlockHash.equals(other.endBlockHash) && + this.endTimestamp.equals(other.endTimestamp) && + this.endBlockNumber.equals(other.endBlockNumber) && + this.outHash.equals(other.outHash) && + this.fees.every((fee, i) => fee.equals(other.fees[i])) && + this.vkTreeRoot.equals(other.vkTreeRoot) && + this.proverId.equals(other.proverId) + ); + } } diff --git a/yarn-project/circuits.js/src/structs/rollup_validation_requests.ts b/yarn-project/circuits.js/src/structs/rollup_validation_requests.ts index 4113649893df..acea321cde96 100644 --- a/yarn-project/circuits.js/src/structs/rollup_validation_requests.ts +++ b/yarn-project/circuits.js/src/structs/rollup_validation_requests.ts @@ -26,6 +26,10 @@ export class RollupValidationRequests { return this.toBuffer().toString('hex'); } + equals(other: RollupValidationRequests) { + return this.maxBlockNumber.equals(other.maxBlockNumber); + } + static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); return new RollupValidationRequests(MaxBlockNumber.fromFields(reader)); diff --git a/yarn-project/circuits.js/src/structs/state_reference.ts b/yarn-project/circuits.js/src/structs/state_reference.ts index 5aa48b38c1c9..e84af202cbb4 100644 --- a/yarn-project/circuits.js/src/structs/state_reference.ts +++ b/yarn-project/circuits.js/src/structs/state_reference.ts @@ -35,6 +35,10 @@ export class StateReference { return fields; } + equals(other: StateReference): boolean { + return this.l1ToL2MessageTree.equals(other.l1ToL2MessageTree) && this.partial.equals(other.partial); + } + static fromBuffer(buffer: Buffer | BufferReader): StateReference { const reader = BufferReader.asReader(buffer); return new StateReference(reader.readObject(AppendOnlyTreeSnapshot), reader.readObject(PartialStateReference)); diff --git a/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts b/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts index e32a0068b404..3992df0264af 100644 --- a/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts +++ b/yarn-project/circuits.js/src/structs/tree_leaf_read_request.ts @@ -4,6 +4,10 @@ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/ export class TreeLeafReadRequest { constructor(public value: Fr, public leafIndex: Fr) {} + equals(other: TreeLeafReadRequest) { + return this.value.equals(other.value) && this.leafIndex.equals(other.leafIndex); + } + toBuffer(): Buffer { return serializeToBuffer(this.value, this.leafIndex); } diff --git a/yarn-project/circuits.js/src/structs/tx_context.ts b/yarn-project/circuits.js/src/structs/tx_context.ts index a4b602b7f301..b505d85f7727 100644 --- a/yarn-project/circuits.js/src/structs/tx_context.ts +++ b/yarn-project/circuits.js/src/structs/tx_context.ts @@ -40,6 +40,14 @@ export class TxContext { return serializeToBuffer(...TxContext.getFields(this)); } + equals(other: TxContext): boolean { + return ( + this.chainId.equals(other.chainId) && + this.version.equals(other.version) && + this.gasSettings.equals(other.gasSettings) + ); + } + static fromFields(fields: Fr[] | FieldReader): TxContext { const reader = FieldReader.asReader(fields); return new TxContext(reader.readField(), reader.readField(), reader.readObject(GasSettings)); diff --git a/yarn-project/circuits.js/src/structs/verification_key.ts b/yarn-project/circuits.js/src/structs/verification_key.ts index 9b927bc834df..1c708f07c58f 100644 --- a/yarn-project/circuits.js/src/structs/verification_key.ts +++ b/yarn-project/circuits.js/src/structs/verification_key.ts @@ -296,6 +296,10 @@ export class VerificationKeyData { return new VerificationKeyData(VerificationKeyAsFields.makeFake(), VerificationKey.makeFake().toBuffer()); } + static makeEmpty(): VerificationKeyData { + return new VerificationKeyData(VerificationKeyAsFields.makeEmpty(), VerificationKeyAsFields.makeEmpty().toBuffer()); + } + /** * Serialize as a buffer. * @returns The buffer. @@ -344,6 +348,13 @@ export class AvmVerificationKeyData { return new AvmVerificationKeyData(AvmVerificationKeyAsFields.makeFake(), VerificationKey.makeFake().toBuffer()); } + static makeEmpty(): AvmVerificationKeyData { + return new AvmVerificationKeyData( + AvmVerificationKeyAsFields.makeEmpty(), + AvmVerificationKeyAsFields.makeEmpty().toBuffer(), + ); + } + /** * Serialize as a buffer. * @returns The buffer. diff --git a/yarn-project/cli-wallet/package.json b/yarn-project/cli-wallet/package.json index ea37249686db..e8fe9b83dfb5 100644 --- a/yarn-project/cli-wallet/package.json +++ b/yarn-project/cli-wallet/package.json @@ -48,6 +48,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/cli/package.json b/yarn-project/cli/package.json index c0f4af797327..b6a9891babd8 100644 --- a/yarn-project/cli/package.json +++ b/yarn-project/cli/package.json @@ -48,6 +48,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 98d1e156cd4c..bf630f4e0475 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -131,6 +131,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/entrypoints/package.json b/yarn-project/entrypoints/package.json index 48c6c5535a49..6a278c79364b 100644 --- a/yarn-project/entrypoints/package.json +++ b/yarn-project/entrypoints/package.json @@ -41,6 +41,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index b7518c9547fe..2e49f7f23244 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -57,6 +57,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index 2af937f8c183..b63a267cbfe3 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -15,6 +15,7 @@ "./collection": "./dest/collection/index.js", "./config": "./dest/config/index.js", "./crypto": "./dest/crypto/index.js", + "./decorators": "./dest/decorators/index.js", "./error": "./dest/error/index.js", "./eth-address": "./dest/eth-address/index.js", "./eth-signature": "./dest/eth-signature/index.js", @@ -68,6 +69,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } @@ -156,4 +160,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/foundation/src/collection/array.ts b/yarn-project/foundation/src/collection/array.ts index 6f2262a1af3c..4784dc807572 100644 --- a/yarn-project/foundation/src/collection/array.ts +++ b/yarn-project/foundation/src/collection/array.ts @@ -7,9 +7,14 @@ import { type Tuple } from '../serialize/types.js'; * @param length - Target length. * @returns A new padded array. */ -export function padArrayEnd(arr: T[], elem: T, length: N): Tuple { +export function padArrayEnd( + arr: T[], + elem: T, + length: N, + errMsg = `Array size exceeds target length`, +): Tuple { if (arr.length > length) { - throw new Error(`Array size exceeds target length`); + throw new Error(errMsg); } // Since typescript cannot always deduce that something is a tuple, we cast return [...arr, ...Array(length - arr.length).fill(elem)] as Tuple; @@ -28,9 +33,14 @@ export function removeArrayPaddingEnd(arr: T[], isEmpty: (item: T) => boolean * @param length - Target length. * @returns A new padded array. */ -export function padArrayStart(arr: T[], elem: T, length: N): Tuple { +export function padArrayStart( + arr: T[], + elem: T, + length: N, + errMsg = `Array size exceeds target length`, +): Tuple { if (arr.length > length) { - throw new Error(`Array size exceeds target length`); + throw new Error(errMsg); } // Since typescript cannot always deduce that something is a tuple, we cast return [...Array(length - arr.length).fill(elem), ...arr] as Tuple; diff --git a/yarn-project/foundation/src/decorators/index.ts b/yarn-project/foundation/src/decorators/index.ts new file mode 100644 index 000000000000..4ed809e11dfc --- /dev/null +++ b/yarn-project/foundation/src/decorators/index.ts @@ -0,0 +1 @@ +export * from './memoize.js'; diff --git a/yarn-project/foundation/src/decorators/memoize.test.ts b/yarn-project/foundation/src/decorators/memoize.test.ts new file mode 100644 index 000000000000..edefd48e8b3c --- /dev/null +++ b/yarn-project/foundation/src/decorators/memoize.test.ts @@ -0,0 +1,129 @@ +import { sleep } from '../sleep/index.js'; +import { memoize } from './memoize.js'; + +function loggedMethod(originalMethod: any, context: ClassMethodDecoratorContext) { + console.log('Building logged method'); + const methodName = String(context.name); + function replacementMethod(this: any, ...args: any[]) { + console.log(`LOG: Entering method '${methodName}'.`); + const result = originalMethod.call(this, ...args); + console.log(`LOG: Exiting method '${methodName}'.`); + return result; + } + return replacementMethod; +} + +function loggedMethod2(headMessage = 'LOG:') { + return function actualDecorator(originalMethod: any, context: ClassMethodDecoratorContext) { + const methodName = String(context.name); + function replacementMethod(this: any, ...args: any[]) { + console.log(`${headMessage} Entering method '${methodName}'.`); + const result = originalMethod.call(this, ...args); + console.log(`${headMessage} Exiting method '${methodName}'.`); + return result; + } + return replacementMethod; + }; +} + +class Memoized { + public readonly counters: Record = {}; + + @memoize + voidPublic() { + this.inc('voidPublic'); + } + + @memoize + synchPublic() { + this.inc('synchPublic'); + return this.counters['synchPublic'] + 10; + } + + callSyncPrivate() { + this.synchPrivate(); + this.synchPrivate(); + } + + @memoize + private synchPrivate() { + this.inc('synchPrivate'); + return this.counters['synchPrivate'] + 20; + } + + @memoize + async asyncPublic() { + this.inc('asyncPublic'); + await sleep(1); + return this.counters['asyncPublic'] + 30; + } + + async callAsyncPrivate() { + await this.asyncPrivate(); + await this.asyncPrivate(); + } + + @memoize + private async asyncPrivate() { + this.inc('asyncPrivate'); + await sleep(1); + return this.counters['asyncPrivate'] + 40; + } + + @memoize + async asyncVoid() { + this.inc('asyncVoid'); + await sleep(1); + } + + private inc(name: string) { + if (!(name in this.counters)) { + this.counters[name] = 0; + } + this.counters[name]++; + } +} + +describe('memoize', () => { + let memoized: Memoized; + + beforeEach(() => { + memoized = new Memoized(); + }); + + it('memoizes public void', () => { + memoized.voidPublic(); + memoized.voidPublic(); + expect(memoized.counters['voidPublic']).toBe(1); + }); + + it('memoizes public synchronous', () => { + expect(memoized.synchPublic()).toBe(11); + expect(memoized.synchPublic()).toBe(11); + expect(memoized.counters['synchPublic']).toBe(1); + }); + + it('memoizes private synchronous', () => { + memoized.callSyncPrivate(); + memoized.callSyncPrivate(); + expect(memoized.counters['synchPrivate']).toBe(1); + }); + + it('memoizes public asynchronous', async () => { + expect(await memoized.asyncPublic()).toBe(31); + expect(await memoized.asyncPublic()).toBe(31); + expect(memoized.counters['asyncPublic']).toBe(1); + }); + + it('memoizes private asynchronous', async () => { + await memoized.callAsyncPrivate(); + await memoized.callAsyncPrivate(); + expect(memoized.counters['asyncPrivate']).toBe(1); + }); + + it('memoizes void asynchronous', async () => { + await memoized.asyncVoid(); + await memoized.asyncVoid(); + expect(memoized.counters['asyncVoid']).toBe(1); + }); +}); diff --git a/yarn-project/foundation/src/decorators/memoize.ts b/yarn-project/foundation/src/decorators/memoize.ts new file mode 100644 index 000000000000..ad4ee8377cf4 --- /dev/null +++ b/yarn-project/foundation/src/decorators/memoize.ts @@ -0,0 +1,11 @@ +export function memoize(fn: () => Result, context: ClassMethodDecoratorContext) { + return function (this: This) { + const key = `__${String(context.name)}_value`; + const thisWithKey = this as { [key: string]: Result }; + if (!(key in this)) { + const result = fn.call(this); + thisWithKey[key] = result; + } + return thisWithKey[key]; + }; +} diff --git a/yarn-project/foundation/src/serialize/types.ts b/yarn-project/foundation/src/serialize/types.ts index 0f09f56103c1..a0f063501690 100644 --- a/yarn-project/foundation/src/serialize/types.ts +++ b/yarn-project/foundation/src/serialize/types.ts @@ -20,6 +20,7 @@ export function assertLength(array: T[], n: N): Tuple } return array as Tuple; } + /** * Annoying, mapping a tuple does not preserve length. * This is a helper to preserve length during a map operation. @@ -38,3 +39,18 @@ type MapTuple any> = { export function mapTuple any>(tuple: T, fn: F): MapTuple { return tuple.map(fn) as MapTuple; } + +// TODO(palla): Fix mapTuple with the following signature. Commented out for now since it breaks several calls in noir-mappings. +// export function mapTuple any>(tuple: T, fn: F): MapTuple { +// return tuple.map(fn) as MapTuple; +// } +// type MapTuple any> = { +// [K in keyof T]: F extends (item: T[K]) => infer V ? V : never; +// }; + +/** Equivalent of Promise.all for tuples. */ +export function awaitTuple>( + tuple: T, +): Promise<{ -readonly [P in keyof T]: Awaited }> { + return Promise.all(tuple) as Promise<{ -readonly [P in keyof T]: Awaited }>; +} diff --git a/yarn-project/foundation/src/types/index.ts b/yarn-project/foundation/src/types/index.ts index 71872c1d99f0..bb87b659f4fa 100644 --- a/yarn-project/foundation/src/types/index.ts +++ b/yarn-project/foundation/src/types/index.ts @@ -13,5 +13,8 @@ export type FunctionsOf = { /** Marks a set of properties of a type as optional. */ export type PartialBy = Omit & Partial>; +/** Marks a set of properties of a type as required. */ +export type RequiredBy = Omit & Required>; + /** Removes readonly modifiers for a type. */ export type Writeable = { -readonly [P in keyof T]: T[P] }; diff --git a/yarn-project/ivc-integration/package.json b/yarn-project/ivc-integration/package.json index 2976b6de2d8d..ab02f6ffaa7e 100644 --- a/yarn-project/ivc-integration/package.json +++ b/yarn-project/ivc-integration/package.json @@ -39,6 +39,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/key-store/package.json b/yarn-project/key-store/package.json index 0bf868d644d2..c66a9bd63a99 100644 --- a/yarn-project/key-store/package.json +++ b/yarn-project/key-store/package.json @@ -35,6 +35,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index 2ca6477b1498..ae05345eeaad 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -34,6 +34,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/merkle-tree/package.json b/yarn-project/merkle-tree/package.json index 0446f9a9d02c..15447b9c794c 100644 --- a/yarn-project/merkle-tree/package.json +++ b/yarn-project/merkle-tree/package.json @@ -37,6 +37,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/noir-contracts.js/package.json b/yarn-project/noir-contracts.js/package.json index be2ad23370e2..eff346f3fe9c 100644 --- a/yarn-project/noir-contracts.js/package.json +++ b/yarn-project/noir-contracts.js/package.json @@ -35,6 +35,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index c33e8ba46eb6..6523bb7c35bd 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -39,6 +39,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/noir-protocol-circuits-types/src/vks.ts b/yarn-project/noir-protocol-circuits-types/src/vks.ts index 563432e4c38b..ac1d18726f26 100644 --- a/yarn-project/noir-protocol-circuits-types/src/vks.ts +++ b/yarn-project/noir-protocol-circuits-types/src/vks.ts @@ -6,6 +6,7 @@ import { EMPTY_NESTED_INDEX, Fr, MERGE_ROLLUP_INDEX, + MembershipWitness, type MerkleTree, MerkleTreeCalculator, PRIVATE_KERNEL_EMPTY_INDEX, @@ -191,3 +192,9 @@ export function getVKSiblingPath(vkIndex: number) { VK_TREE_HEIGHT, ); } + +export function getVKMembershipWitness(vk: VerificationKeyData | VerificationKeyAsFields | Fr) { + const index = getVKIndex(vk); + const siblingPath = getVKSiblingPath(index); + return new MembershipWitness(VK_TREE_HEIGHT, BigInt(index), siblingPath); +} diff --git a/yarn-project/p2p-bootstrap/package.json b/yarn-project/p2p-bootstrap/package.json index 9e9d564c9aee..57225e75e381 100644 --- a/yarn-project/p2p-bootstrap/package.json +++ b/yarn-project/p2p-bootstrap/package.json @@ -61,6 +61,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index 5d754eea5522..5c9d007e0cca 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -37,6 +37,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/package.common.json b/yarn-project/package.common.json index 7ef660b828e2..f3d2aca61c5b 100644 --- a/yarn-project/package.common.json +++ b/yarn-project/package.common.json @@ -28,6 +28,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/protocol-contracts/package.json b/yarn-project/protocol-contracts/package.json index c7bff393480f..0bae20accc83 100644 --- a/yarn-project/protocol-contracts/package.json +++ b/yarn-project/protocol-contracts/package.json @@ -46,6 +46,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 4171e53f7815..3bb3b2b8056e 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -41,6 +41,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index eb8ab0fc407a..b56bb40819ae 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -141,8 +141,15 @@ export const makeEmptyProcessedTx = (builderDb: MerkleTreeOperations, chainId: F return makeEmptyProcessedTxFromHistoricalTreeRoots(header, chainId, version, getVKTreeRoot()); }; -// Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs -export const updateExpectedTreesFromTxs = async (db: MerkleTreeOperations, txs: ProcessedTx[]) => { +/** + * Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs + * @remarks Does not work if one of the tx has fee payment enabled + */ +export const updateExpectedTreesFromTxs = async ( + db: MerkleTreeOperations, + txs: ProcessedTx[], + l1ToL2Messages?: Fr[], +) => { await db.appendLeaves( MerkleTreeId.NOTE_HASH_TREE, txs.flatMap(tx => @@ -164,15 +171,26 @@ export const updateExpectedTreesFromTxs = async (db: MerkleTreeOperations, txs: ), NULLIFIER_TREE_HEIGHT, ); + for (const tx of txs) { await db.batchInsert( MerkleTreeId.PUBLIC_DATA_TREE, - tx.data.end.publicDataUpdateRequests.map(write => { - return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); - }), + [ + ...tx.data.end.publicDataUpdateRequests.map(write => { + return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); + }), + // Add an extra leaf at the end to account for the fee payer write, + // so the number of writes match the actual ones done by the sequencer; + // note that this will not work if the test tx has fee payment enabled + PublicDataTreeLeaf.empty().toBuffer(), + ], PUBLIC_DATA_SUBTREE_HEIGHT, ); } + + if (l1ToL2Messages) { + await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2Messages); + } }; export const makeGlobals = (blockNumber: number) => { diff --git a/yarn-project/prover-client/src/orchestrator-2/adapters/prover.ts b/yarn-project/prover-client/src/orchestrator-2/adapters/prover.ts new file mode 100644 index 000000000000..c5818fc9620b --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/adapters/prover.ts @@ -0,0 +1,53 @@ +import { type ProvingRequest, type ProvingRequestResult, ProvingRequestType, type ServerCircuitProver } from '@aztec/circuit-types'; + +import { type Prover } from '../types.js'; + +export class ProverAdapter implements Prover { + constructor( + public readonly epochNumber: number, + private readonly prover: ServerCircuitProver, + private readonly signal?: AbortSignal, + ) {} + + prove(request: Request): Promise>; + prove(request: ProvingRequest): Promise> { + switch (request.type) { + case ProvingRequestType.BASE_PARITY: + return this.prover.getBaseParityProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.ROOT_PARITY: + return this.prover.getRootParityProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.BASE_ROLLUP: + return this.prover.getBaseRollupProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.MERGE_ROLLUP: + return this.prover.getMergeRollupProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.BLOCK_ROOT_ROLLUP: + return this.prover.getBlockRootRollupProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.BLOCK_MERGE_ROLLUP: + return this.prover.getBlockMergeRollupProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.ROOT_ROLLUP: + return this.prover.getRootRollupProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.TUBE_PROOF: + return this.prover.getTubeProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.PUBLIC_VM: + return this.prover.getAvmProof(request.inputs, this.signal, this.epochNumber); + case ProvingRequestType.PUBLIC_KERNEL_NON_TAIL: + return this.prover.getPublicKernelProof( + { type: request.kernelType, inputs: request.inputs }, + this.signal, + this.epochNumber, + ); + case ProvingRequestType.PUBLIC_KERNEL_TAIL: + return this.prover.getPublicTailProof( + { type: request.kernelType, inputs: request.inputs }, + this.signal, + this.epochNumber, + ); + case ProvingRequestType.PRIVATE_KERNEL_EMPTY: + return this.prover.getEmptyPrivateKernelProof(request.inputs, this.signal, this.epochNumber); + default: { + const _: never = request; + throw new Error('Unsupported proving request type'); + } + } + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/adapters/simulator.ts b/yarn-project/prover-client/src/orchestrator-2/adapters/simulator.ts new file mode 100644 index 000000000000..665d652ab400 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/adapters/simulator.ts @@ -0,0 +1,57 @@ +import { type ProvingRequest, ProvingRequestType, type ServerCircuitProver, type SimulationRequestResult } from '@aztec/circuit-types'; + +import { type Simulator } from '../types.js'; + +export class SimulatorAdapter implements Simulator { + constructor( + public readonly epochNumber: number, + private readonly simulator: ServerCircuitProver, + private readonly signal?: AbortSignal, + ) {} + + simulate(request: Request): Promise>; + async simulate(request: ProvingRequest): Promise> { + switch (request.type) { + case ProvingRequestType.BASE_PARITY: + return (await this.simulator.getBaseParityProof(request.inputs, this.signal, this.epochNumber)).publicInputs; + case ProvingRequestType.ROOT_PARITY: + return (await this.simulator.getRootParityProof(request.inputs, this.signal, this.epochNumber)).publicInputs; + case ProvingRequestType.BASE_ROLLUP: + return (await this.simulator.getBaseRollupProof(request.inputs, this.signal, this.epochNumber)).inputs; + case ProvingRequestType.MERGE_ROLLUP: + return (await this.simulator.getMergeRollupProof(request.inputs, this.signal, this.epochNumber)).inputs; + case ProvingRequestType.BLOCK_ROOT_ROLLUP: + return (await this.simulator.getBlockRootRollupProof(request.inputs, this.signal, this.epochNumber)).inputs; + case ProvingRequestType.BLOCK_MERGE_ROLLUP: + return (await this.simulator.getBlockMergeRollupProof(request.inputs, this.signal, this.epochNumber)).inputs; + case ProvingRequestType.ROOT_ROLLUP: + return (await this.simulator.getRootRollupProof(request.inputs, this.signal, this.epochNumber)).inputs; + case ProvingRequestType.TUBE_PROOF: + return null; + case ProvingRequestType.PUBLIC_VM: + return null; + case ProvingRequestType.PUBLIC_KERNEL_NON_TAIL: + return ( + await this.simulator.getPublicKernelProof( + { type: request.kernelType, inputs: request.inputs }, + this.signal, + this.epochNumber, + ) + ).inputs; + case ProvingRequestType.PUBLIC_KERNEL_TAIL: + return ( + await this.simulator.getPublicTailProof( + { type: request.kernelType, inputs: request.inputs }, + this.signal, + this.epochNumber, + ) + ).inputs; + case ProvingRequestType.PRIVATE_KERNEL_EMPTY: + return (await this.simulator.getEmptyPrivateKernelProof(request.inputs, this.signal, this.epochNumber)).inputs; + default: { + const _: never = request; + throw new Error('Unsupported proving request type'); + } + } + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.test.ts new file mode 100644 index 000000000000..fc6f89e137a4 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.test.ts @@ -0,0 +1,61 @@ +import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { times } from '@aztec/foundation/collection'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; + +import { makeBloatedProcessedTx, updateExpectedTreesFromTxs } from '../mocks/fixtures.js'; +import { TestContext } from './test/context.js'; + +describe('BlockOrchestrator', () => { + let context: TestContext; + let logger: DebugLogger; + + beforeAll(() => { + logger = createDebugLogger('aztec:prover-client:test:block-orchestrator'); + }); + + beforeEach(async () => { + context = await TestContext.new(logger); + }); + + const buildBlock = async (opts: { simulationOnly?: boolean } = {}) => { + const txs = times(4, i => makeBloatedProcessedTx(context.actualDb, i + 1)); + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + const blockNumber = 20; + const numTxs = txs.length; + + const orchestrator = context.buildBlockOrchestrator({ numTxs, l1ToL2Messages, blockNumber, ...opts }); + + await orchestrator.start(); + for (const tx of txs) { + await orchestrator.addTx(tx); + } + await orchestrator.updateState(); + + const block = await orchestrator.getBlock(); + expect(block.number).toEqual(blockNumber); + + const result = await orchestrator.simulate(); + expect(result.endGlobalVariables.blockNumber.toNumber()).toEqual(blockNumber); + + context.logger.info('Updating expected db with txs'); + await updateExpectedTreesFromTxs(context.expectsDb, txs, l1ToL2Messages); + await context.assertDbsMatch(); + + return orchestrator; + }; + + it('builds a block with 4 txs', async () => { + await buildBlock({ simulationOnly: true }); + context.logger.info('Completed block'); + }); + + it('proves a block with 4 txs', async () => { + const orchestrator = await buildBlock(); + const proof = await orchestrator.prove(); + expect(proof.inputs).toEqual(await orchestrator.simulate()); + expect(proof.proof).toBeDefined(); + context.logger.info('Completed proving'); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.ts b/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.ts new file mode 100644 index 000000000000..5ece7465bd6e --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/block-orchestrator.ts @@ -0,0 +1,367 @@ +import { + Body, + L2Block, + MerkleTreeId, + type ProcessedTx, + type PublicInputsAndRecursiveProof, + type TxEffect, +} from '@aztec/circuit-types'; +import { + type BlockRootOrBlockMergePublicInputs, + ContentCommitment, + Fr, + type GlobalVariables, + Header, + StateReference, +} from '@aztec/circuits.js'; +import { sha256Trunc } from '@aztec/foundation/crypto'; +import { memoize } from '@aztec/foundation/decorators'; +import { type LogData, createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; + +import { getTreeSnapshot, validateBlockRootOutput } from '../orchestrator/block-building-helpers.js'; +import { BlockRootCircuit } from './circuits/block-root.js'; +import { MergeRollupCircuit } from './circuits/merge-rollup.js'; +import { ParityOrchestrator } from './parity-orchestrator.js'; +import { TxOrchestrator } from './tx-orchestrator.js'; +import { type OrchestratorContext } from './types.js'; +import { getMergeLocation } from './utils.js'; + +/** + * Orchestrates the simulation and proving of a single block. + */ +export class BlockOrchestrator { + private readonly txs: TxOrchestrator[] = []; + private readonly merges: MergeRollupCircuit[] = []; + + private parity?: ParityOrchestrator; + private body?: Body; + + private readonly simulationPromise = promiseWithResolvers(); + private readonly blockPromise = promiseWithResolvers(); + private readonly proofPromise = + promiseWithResolvers>(); + + private readonly logger = createDebugLogger('aztec:prover-client:block-orchestrator'); + private readonly logdata: LogData; + + constructor( + /** Index of the block within the epoch (zero-based) */ + public readonly index: number, + private readonly numTxs: number, + private readonly globalVariables: GlobalVariables, + private readonly l1ToL2Messages: Fr[], + private readonly context: OrchestratorContext, + ) { + this.handleError = this.handleError.bind(this); + this.logdata = { blockNumber: globalVariables.blockNumber.toNumber() }; + } + + /** Whether this orchestrator is configured to only simulate the block and not run any proving. */ + public get isSimulationOnly() { + return this.context.options.simulationOnly; + } + + /** + * Starts the current block. + * Updates world-state with new L1 to L2 messages. + * Asynchronously begins simulation and proving of all parity circuits. + */ + public async start() { + this.logger.verbose(`Starting block with ${this.numTxs} expected txs`, this.logdata); + + // Take snapshot of archive tree as block starts + await this.getBlockRoot().takeArchiveSnapshot(); + // Initialize parity circuits + const parity = new ParityOrchestrator(this.l1ToL2Messages, this.context); + this.parity = parity; + // Block until world-state is updated + const state = await parity.updateState(); + this.logger.verbose(`Updated L1-to-L2 tree with ${this.l1ToL2Messages.length} messages`, this.logdata); + // Pass the root parity output to root circuit + this.getBlockRoot().setRootParityState(state); + // Start simulation and proving + this.startParity(parity); + } + + /** + * Adds a new transaction to the block. + * Updates world-state with the effects of the transaction. + * Asynchronously begins proving of all the transaction circuits (Tube, AVM, Public Kernel, Base Rollup). + * @param processedTx - The processed transaction to add. + */ + public async addTx(processedTx: ProcessedTx): Promise { + this.logger.verbose(`Adding tx to block`, { ...this.logdata, txHash: processedTx.hash.toString() }); + + const index = this.txs.length; + const txOrchestrator = new TxOrchestrator(processedTx, this.globalVariables, index, this.context); + this.txs.push(txOrchestrator); + + // Block until world-state is updated + await txOrchestrator.updateState(); + this.logger.verbose(`Updated trees with tx effects`, { ...this.logdata, txHash: processedTx.hash.toString() }); + + // Start simulation and proving + this.startWork(txOrchestrator); + } + + /** + * Flags the current block as ended and pads it with empty transactions if needed. + * We only need to pad the block if it has zero or one transactions. + * Blocks until padding txs are added, and starts proving them. + * @remarks Pending implementation + */ + @memoize + public endBlock(): Promise { + this.logger.verbose(`Marking block as completed`, this.logdata); + + // TODO: Create padding txs and await them + const nonEmptyTxEffects: TxEffect[] = this.txs.map(tx => tx.getTxEffect()).filter(txEffect => !txEffect.isEmpty()); + const body = new Body(nonEmptyTxEffects); + this.body = body; + + return Promise.resolve(); + } + + /** + * Computes the block and updates world-state archive with its header. + * Flags block as ended if has not been already. + * Requires all simulation tasks to have been completed, blocks until they are. + */ + @memoize + public async updateState(): Promise { + await this.endBlock(); + this.logger.debug(`Awaiting block to be built to update state`, this.logdata); + const block = await this.getBlock(); + await this.context.db.updateArchive(block.header); + this.logger.verbose(`Updated archive tree with block header`, { + ...this.logdata, + blockHash: block.header.hash().toString(), + }); + + const rootOutput = await this.getBlockRoot().simulate(); + await validateBlockRootOutput(rootOutput, block.header, this.context.db); + } + + /** + * Returns the full L2 block computed by this orchestrator. + * Requires all simulation tasks to have been completed. + * @returns The full L2 block. + */ + public getBlock(): Promise { + return this.blockPromise.promise; + } + + /** + * Returns the simulation output of the block root circuit. + */ + public simulate(): Promise { + return this.simulationPromise.promise; + } + + /** + * Returns the proof for the block root circuit. + */ + public prove(): Promise> { + if (this.isSimulationOnly) { + throw new Error(`Cannot prove block in simulation-only mode`); + } + return this.proofPromise.promise; + } + + /** Start simulation and proving of the parity orchestrator, and wire its outputs to the block root circuit. */ + private startParity(parity: ParityOrchestrator) { + void parity + .simulate() + .then(simulation => this.getBlockRoot().setRootParitySimulation(simulation)) + .catch(this.handleError); + + if (!this.isSimulationOnly) { + void parity + .prove() + .then(proof => this.getBlockRoot().setRootParityProof(proof)) + .catch(this.handleError); + } + } + + /** + * Starts simulation and proving of the given tx orchestrator or merge circuit, + * and wires its outputs to the next merge circuit (which may be the block root circuit). + */ + private startWork(source: TxOrchestrator | MergeRollupCircuit) { + const { index } = source; + + // Merge level, or maxlevels + 1 if it's a tx + const mergeLevelCount = Math.ceil(Math.log2(this.numTxs)) - 1; + const level = 'level' in source ? source.level : mergeLevelCount + 1; + + void source + .simulate() + .then(simulation => this.getParentMerge(level, index).setNestedSimulation(simulation, index % 2)) + .catch(this.handleError); + + if (!this.isSimulationOnly) { + void source + .prove() + .then(proof => this.getParentMerge(level, index).setNestedProof(proof, index % 2)) + .catch(this.handleError); + } + } + + /** + * Returns or creates the parent merge circuit for the given node in the proving tree. + * If the returned node is the root, returns the block root circuit. + */ + private getParentMerge(level: number, index: number): MergeRollupCircuit | BlockRootCircuit { + this.logger.debug(`Requesting parent merge`, { ...this.logdata, level, index }); + const location = getMergeLocation({ level, index, total: this.numTxs }); + if (location.level === 0) { + this.logger.debug(`Returning root`, { ...this.logdata, ...location }); + return this.getBlockRoot(); + } + + if (!this.merges[location.indexWithinTree]) { + this.logger.debug(`Creating new merge at `, { ...this.logdata, ...location }); + const merge = new MergeRollupCircuit(location.level, location.indexWithinLevel, this.context); + this.startWork(merge); + this.merges[location.indexWithinTree] = merge; + } else { + this.logger.debug(`Returning existing merge at `, { ...this.logdata, ...location }); + } + + return this.merges[location.indexWithinTree]; + } + + /** + * Initializes the block root circuit and starts its simulation and proving. + * Note that simulation and proving won't actually start until its nested circuits are ready. + * @returns The block root circuit for this block. + */ + @memoize + private getBlockRoot() { + this.logger.debug(`Creating new block-root circuit`, this.logdata); + const blockRoot = new BlockRootCircuit(this.globalVariables.blockNumber.toNumber(), this.context); + + void blockRoot + .simulate() + .then(output => { + this.simulationPromise.resolve(output); + return this.makeBlock(output); + }) + .then(block => this.blockPromise.resolve(block)) + .catch(this.handleError); + + if (!this.isSimulationOnly) { + void blockRoot + .prove() + .then(proof => { + this.proofPromise.resolve(proof); + }) + .catch(this.handleError); + } + + return blockRoot; + } + + private async makeBlock(blockRootOutputs: BlockRootOrBlockMergePublicInputs): Promise { + const archive = blockRootOutputs.newArchive; + + const header = await this.makeHeader(blockRootOutputs); + if (!header.hash().equals(blockRootOutputs.endBlockHash)) { + throw new Error( + `Block header hash mismatch: ${header.hash().toString()} !== ${blockRootOutputs.endBlockHash.toString()}`, + ); + } + + // Note we may want to save the tx effect separately, so we don't need to rehydrate all tx orchestrators if this block orch goes down + const nonEmptyTxEffects: TxEffect[] = this.txs.map(tx => tx.getTxEffect()).filter(txEffect => !txEffect.isEmpty()); + const body = new Body(nonEmptyTxEffects); + if (!body.getTxsEffectsHash().equals(header.contentCommitment.txsEffectsHash)) { + const bodyTxEffectsHex = body.getTxsEffectsHash().toString('hex'); + const headerTxEffectsHex = header.contentCommitment.txsEffectsHash.toString('hex'); + throw new Error(`Txs effects hash mismatch: ${bodyTxEffectsHex} != ${headerTxEffectsHex}`); + } + + this.logger.debug(`Assembled L2 block`, { ...this.logdata, blockHash: header.hash().toString() }); + return L2Block.fromFields({ archive, header, body }); + } + + private async makeHeader(blockRootOutputs: BlockRootOrBlockMergePublicInputs) { + this.logger.debug(`Building block header`, this.logdata); + const blockRoot = this.getBlockRoot(); + const [leftMerge, rightMerge, rootParity] = await blockRoot.getSimulationInputs(); + this.logger.debug(`Acquired block root inputs`, this.logdata); + + const contentCommitment = new ContentCommitment( + new Fr(leftMerge.numTxs + rightMerge.numTxs), + sha256Trunc(Buffer.concat([leftMerge.txsEffectsHash.toBuffer(), rightMerge.txsEffectsHash.toBuffer()])), + rootParity.shaRoot.toBuffer(), + sha256Trunc(Buffer.concat([leftMerge.outHash.toBuffer(), rightMerge.outHash.toBuffer()])), + ); + const state = new StateReference( + await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.context.db), + rightMerge.end, + ); + + const fees = leftMerge.accumulatedFees.add(rightMerge.accumulatedFees); + const header = new Header(blockRootOutputs.previousArchive, contentCommitment, state, this.globalVariables, fees); + this.logger.debug(`Completed block header`, { ...this.logdata, blockHash: header.hash().toString() }); + return header; + } + + private handleError(err: Error) { + this.logger.error(`Error in block orchestrator`, err, this.logdata); + throw new Error('Unimplemented'); + // cancel all outstanding work + // reject all outstanding promises + // log loudly + // this will be repeated across all three orchestrators, maybe refactor into helper? + } + + static load(_id: string, _context: OrchestratorContext) { + throw new Error('Unimplemented'); + // const metadata: BlockOrchestratorMetadata = await context.metadataStore.load(id); + // if (!metadata) { + // throw new Error('Block not found'); + // } + // const { index, numTxs, globalVariables, l1ToL2Messages } = metadata; + // const orchestrator = new BlockOrchestrator(index, numTxs, globalVariables, l1ToL2Messages, context); + // if (metadata.status === 'proven') { + // const proofKey = id + 'proof'; // need to devise a proper scheme for this + // const proof = (await context.payloadStore.load( + // proofKey, + // )) as unknown as PublicInputsAndRecursiveProof; // deserialize! + // if (!proof) { + // // fall back to no-proof + // } + // // orchestrator.handleBlockRootProof(proof); // should also set the block itself..? + // return orchestrator; // we're good! + // } + // if (metadata.status === 'updated-state' || metadata.status === 'processed-txs') { + // // set l2 block body if all txs have been processed + // orchestrator.body = metadata.body; + // } + // // now, we need to rehydrate the orchestrator with partial proving state + // // let's start with the merges + // // what are we loading here? just the ids? + // const merges = await context.metadataStore.list(id + 'merges'); // again, proper key scheme! + // // we actually only need the highest full level of merges, can forget about the rest + // // need to get that from the list of merges given their level and index, or return undefined if none + // const highestLevel = merges; // getHighestCompleteLevel(merges); + // if (highestLevel) { + // for (const merge of merges) { + // const mergeOrch = await MergeRollupProvingJob.load(merge, context); + // orchestrator.merges.push(mergeOrch); // set them to the proper index! + // // wire them up! + // } + // } + // // if we don't have a full level of merges, we need to rehydrate the txs + // const txs = await context.metadataStore.list(id + 'txs'); + // for (const tx of txs) { + // const txOrch = await TxOrchestrator.load(tx, context); + // orchestrator.txs.push(txOrch); // set them to the proper index! + // // wire them up! + // } + // return orchestrator; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/avm.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/avm.ts new file mode 100644 index 000000000000..302eef435dc9 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/avm.ts @@ -0,0 +1,42 @@ +import { type AvmProofAndVerificationKey, type AvmProvingRequest, ProvingRequestType } from '@aztec/circuit-types'; +import { AvmCircuitInputs, AvmVerificationKeyData, makeEmptyProof } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; + +import { type Circuit, type OrchestratorContext } from '../types.js'; + +export class AvmCircuit implements Circuit { + private logger = createDebugLogger('aztec:prover-client:avm-circuit'); + + constructor(public readonly provingRequest: AvmProvingRequest, private context: OrchestratorContext) {} + + public simulate(): Promise { + return Promise.resolve(null); + } + + @memoize + public async prove(): Promise { + const inputs = this.getInputs(); + + try { + return await this.context.prover.prove({ type: ProvingRequestType.PUBLIC_VM, inputs }); + } catch (err) { + if (this.context.options.avmProvingStrict) { + throw err; + } else { + this.logger.warn(`Error thrown when proving AVM circuit, but strict proving is off. Error: ${err}.`); + return { proof: makeEmptyProof(), verificationKey: AvmVerificationKeyData.makeEmpty() }; + } + } + } + + private getInputs() { + const provingRequest = this.provingRequest; + if (provingRequest.type !== 'AVM') { + throw new Error(`Invalid proving request type for AVM: ${provingRequest.type}`); + } + + const publicInputs = provingRequest.kernelRequest.inputs.publicCall.callStackItem.publicInputs; + return AvmCircuitInputs.from({ ...provingRequest, publicInputs }); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/base-parity.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/base-parity.ts new file mode 100644 index 000000000000..696b23aa88e8 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/base-parity.ts @@ -0,0 +1,36 @@ +import { ProvingRequestType } from '@aztec/circuit-types'; +import { + BaseParityInputs, + type Fr, + type NUM_MSGS_PER_BASE_PARITY, + type ParityPublicInputs, + type RootParityInput, +} from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { type Tuple } from '@aztec/foundation/serialize'; +import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; + +import { type OrchestratorContext , type Circuit } from '../types.js'; + +export class BaseParityCircuit implements Circuit { + constructor( + public readonly messages: Tuple, + public readonly index: number, + private readonly context: OrchestratorContext, + ) {} + + @memoize + public buildInputs() { + return new BaseParityInputs(this.messages, getVKTreeRoot()); + } + + @memoize + public simulate(): Promise { + return this.context.simulator.simulate({ type: ProvingRequestType.BASE_PARITY, inputs: this.buildInputs() }); + } + + @memoize + public prove(): Promise> { + return this.context.prover.prove({ type: ProvingRequestType.BASE_PARITY, inputs: this.buildInputs() }); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/base-rollup.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/base-rollup.ts new file mode 100644 index 000000000000..c737c7958bc9 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/base-rollup.ts @@ -0,0 +1,252 @@ +import { + MerkleTreeId, + type ProcessedTx, + ProvingRequestType, + type PublicInputsAndRecursiveProof, + type TubeProofAndVK, +} from '@aztec/circuit-types'; +import { + type AppendOnlyTreeSnapshot, + type BaseOrMergeRollupPublicInputs, + BaseRollupInputs, + Fr, + type GlobalVariables, + type KernelCircuitPublicInputs, + KernelData, + MAX_NULLIFIERS_PER_TX, + MembershipWitness, + NOTE_HASH_SUBTREE_HEIGHT, + NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_SUBTREE_HEIGHT, + NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_TREE_HEIGHT, + NullifierLeafPreimage, + PartialStateReference, + PublicDataHint, + type PublicKernelCircuitPublicInputs, + StateDiffHints, +} from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { type FieldsOf } from '@aztec/foundation/types'; +import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types'; +import { HintsBuilder, computeFeePayerBalanceLeafSlot } from '@aztec/simulator'; + +import { + getConstantRollupData, + getMembershipWitnessFor, + getSubtreeSiblingPath, + getTreeSnapshot, + makeEmptyMembershipWitness, + processPublicDataUpdateRequests, + validatePartialState, +} from '../../orchestrator/block-building-helpers.js'; +import { type Circuit, type OrchestratorContext } from '../types.js'; + +type BaseRollupNestedProof = + | PublicInputsAndRecursiveProof + | PublicInputsAndRecursiveProof + | TubeProofAndVK; + +export class BaseRollupCircuit implements Circuit { + /** Resolved when updateState is called and inputs to the base rollup are computed. */ + private readonly commonInputs = promiseWithResolvers, 'kernelData'>>(); + + /** Proof from the last kernel in the tx. */ + private readonly nestedKernelProof = promiseWithResolvers(); + + /** Snapshots from trees after executing state updates */ + private treeSnapshots?: Record< + MerkleTreeId.NOTE_HASH_TREE | MerkleTreeId.NULLIFIER_TREE | MerkleTreeId.PUBLIC_DATA_TREE, + AppendOnlyTreeSnapshot + >; + + private readonly logger = createDebugLogger('aztec:prover-client:base-rollup'); + + constructor( + public readonly tx: ProcessedTx, + public readonly globalVariables: GlobalVariables, + public readonly index: number, + private readonly context: OrchestratorContext, + ) {} + + public setNestedKernelProof(proof: BaseRollupNestedProof) { + this.nestedKernelProof.resolve(proof); + } + + public async updateState() { + this.logger.debug('Updating state'); + const commonInputs = await this.buildCommonInputs(); + this.logger.debug('Built common inputs'); + this.treeSnapshots = await this.getTreeSnapshots(); + this.logger.debug('Got tree snapshots'); + this.commonInputs.resolve(commonInputs); + } + + private async getTreeSnapshots() { + return { + [MerkleTreeId.NOTE_HASH_TREE]: await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, this.context.db), + [MerkleTreeId.NULLIFIER_TREE]: await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, this.context.db), + [MerkleTreeId.PUBLIC_DATA_TREE]: await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, this.context.db), + }; + } + + private async buildCommonInputs(): Promise, 'kernelData'>> { + const { + tx, + globalVariables, + context: { db }, + } = this; + + // Get trees info before any changes hit + const constants = await getConstantRollupData(globalVariables, db); + const start = new PartialStateReference( + await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), + await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), + await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db), + ); + + // Get the subtree sibling paths for the circuit + const noteHashSubtreeSiblingPath = padArrayEnd( + await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db), + Fr.ZERO, + NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, + ); + + // Create data hint for reading fee payer initial balance in Fee Juice + // If no fee payer is set, read hint should be empty + // If there is already a public data write for this slot, also skip the read hint + const hintsBuilder = new HintsBuilder(db); + const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer); + const existingBalanceWrite = tx.data.end.publicDataUpdateRequests.find(write => write.leafSlot.equals(leafSlot)); + const feePayerFeeJuiceBalanceReadHint = + leafSlot.isZero() || existingBalanceWrite + ? PublicDataHint.empty() + : await hintsBuilder.getPublicDataHint(leafSlot.toBigInt()); + + // Update the note hash trees with the new items being inserted to get the new roots + // that will be used by the next iteration of the base rollup circuit, skipping the empty ones + const noteHashes = tx.data.end.noteHashes; + await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes); + + // The read witnesses for a given TX should be generated before the writes of the same TX are applied. + // All reads that refer to writes in the same tx are transient and can be simplified out. + const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db); + + // Update the nullifier tree, capturing the low nullifier info for each individual operation + const { + lowLeavesWitnessData: nullifierWitnessLeaves, + newSubtreeSiblingPath: unpaddedNullifierSubtreeSiblingPath, + sortedNewLeaves: sortednullifiers, + sortedNewLeavesIndexes, + } = await db.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + tx.data.end.nullifiers.map(n => n.toBuffer()), + NULLIFIER_SUBTREE_HEIGHT, + ); + + if (nullifierWitnessLeaves === undefined) { + throw new Error(`Could not craft nullifier batch insertion proofs`); + } + + // Extract witness objects from returned data + const nullifierPredecessorMembershipWitnesses = padArrayEnd( + nullifierWitnessLeaves.map(({ index, siblingPath }) => + MembershipWitness.fromBufferArray(index, siblingPath.toBufferArray()), + ), + makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), + MAX_NULLIFIERS_PER_TX, + ); + + const nullifierSubtreeSiblingPath = padArrayEnd( + unpaddedNullifierSubtreeSiblingPath.toFields(), + Fr.ZERO, + NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, + ); + + const nullifierPredecessorPreimages = padArrayEnd( + nullifierWitnessLeaves.map(l => l.leafPreimage as NullifierLeafPreimage), + NullifierLeafPreimage.empty(), + MAX_NULLIFIERS_PER_TX, + ); + + const publicDataSiblingPath = txPublicDataUpdateRequestInfo.newPublicDataSubtreeSiblingPath; + + const stateDiffHints = StateDiffHints.from({ + nullifierPredecessorPreimages, + nullifierPredecessorMembershipWitnesses, + sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])), + sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]), + noteHashSubtreeSiblingPath, + nullifierSubtreeSiblingPath, + publicDataSiblingPath, + }); + + const blockHash = tx.data.constants.historicalHeader.hash(); + const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, db); + + return { + start, + stateDiffHints, + feePayerFeeJuiceBalanceReadHint: feePayerFeeJuiceBalanceReadHint, + sortedPublicDataWrites: txPublicDataUpdateRequestInfo.sortedPublicDataWrites, + sortedPublicDataWritesIndexes: txPublicDataUpdateRequestInfo.sortedPublicDataWritesIndexes, + lowPublicDataWritesPreimages: txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages, + lowPublicDataWritesMembershipWitnesses: txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses, + archiveRootMembershipWitness, + constants, + }; + } + + @memoize + private async getSimulationInputs() { + const commonInputs = await this.commonInputs.promise; + const kernelData = KernelData.withEmptyProof(this.tx.data); + return BaseRollupInputs.from({ ...commonInputs, kernelData }); + } + + @memoize + private async getProvingInputs() { + const commonInputs = await this.commonInputs.promise; + const kernelProof = await this.nestedKernelProof.promise; + + const { proof, verificationKey } = kernelProof; + const vkIndex = getVKIndex(verificationKey); + const vkPath = getVKSiblingPath(vkIndex); + const kernelData = new KernelData(this.tx.data, proof, verificationKey, vkIndex, vkPath); + + return BaseRollupInputs.from({ ...commonInputs, kernelData }); + } + + @memoize + public async simulate(): Promise { + const result = await this.context.simulator.simulate({ + type: ProvingRequestType.BASE_ROLLUP, + inputs: await this.getSimulationInputs(), + }); + + this.validateResult(result); + return result; + } + + @memoize + public async prove(): Promise> { + const result = await this.context.prover.prove({ + type: ProvingRequestType.BASE_ROLLUP, + inputs: await this.getProvingInputs(), + }); + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + + this.validateResult(result.inputs); + return result; + } + + private validateResult(result: BaseOrMergeRollupPublicInputs) { + validatePartialState(result.end, this.treeSnapshots!); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/block-merge.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/block-merge.ts new file mode 100644 index 000000000000..e0a02902a0aa --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/block-merge.ts @@ -0,0 +1,77 @@ +import { ProvingRequestType, type PublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { BlockMergeRollupInputs, type BlockRootOrBlockMergePublicInputs, PreviousRollupBlockData } from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { memoize } from '@aztec/foundation/decorators'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { mapTuple } from '@aztec/foundation/serialize'; +import { getVKMembershipWitness } from '@aztec/noir-protocol-circuits-types'; + +import { type OrchestratorContext , type Circuit } from '../types.js'; + +export class BlockMergeCircuit implements Circuit { + private readonly simulationInputs = makeTuple(2, () => promiseWithResolvers()); + private readonly provingInputs = makeTuple(2, () => + promiseWithResolvers>(), + ); + + constructor( + public readonly level: number, + public readonly index: number, + private readonly context: OrchestratorContext, + ) {} + + public setNestedSimulation(simulation: BlockRootOrBlockMergePublicInputs, index: number) { + this.checkIndex(index); + this.simulationInputs[index].resolve(simulation); + } + + public setNestedProof(proof: PublicInputsAndRecursiveProof, index: number) { + this.checkIndex(index); + this.provingInputs[index].resolve(proof); + } + + private checkIndex(index: number) { + if (index > 1) { + throw new Error('Invalid child merge index.'); + } + } + + private async getSimulationInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.simulationInputs, ({ promise }) => promise)); + return new BlockMergeRollupInputs(mapTuple(inputs, merge => PreviousRollupBlockData.withEmptyProof(merge))); + } + + private async getProvingInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.provingInputs, ({ promise }) => promise)); + + return new BlockMergeRollupInputs( + mapTuple( + inputs, + ({ inputs, proof, verificationKey }) => + new PreviousRollupBlockData( + inputs, + proof, + verificationKey.keyAsFields, + getVKMembershipWitness(verificationKey), + ), + ), + ); + } + + @memoize + public async simulate(): Promise { + const inputs = await this.getSimulationInputs(); + return this.context.simulator.simulate({ type: ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs }); + } + + @memoize + public async prove(): Promise> { + const inputs = await this.getProvingInputs(); + const result = await this.context.prover.prove({ type: ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/block-root.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/block-root.ts new file mode 100644 index 000000000000..c82743c4bffe --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/block-root.ts @@ -0,0 +1,206 @@ +import { MerkleTreeId, ProvingRequestType, type PublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { + type ARCHIVE_HEIGHT, + type AppendOnlyTreeSnapshot, + type BaseOrMergeRollupPublicInputs, + type BlockRootOrBlockMergePublicInputs, + BlockRootRollupInputs, + Fr, + NESTED_RECURSIVE_PROOF_LENGTH, + type ParityPublicInputs, + PreviousRollupData, + RootParityInput, +} from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { memoize } from '@aztec/foundation/decorators'; +import { type LogData, createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { type Tuple, awaitTuple, mapTuple } from '@aztec/foundation/serialize'; +import { getVKMembershipWitness } from '@aztec/noir-protocol-circuits-types'; + +import { getRootTreeSiblingPath, getTreeSnapshot } from '../../orchestrator/block-building-helpers.js'; +import { type Circuit, type OrchestratorContext, type ParityState } from '../types.js'; + +/** + * Handles the block root circuit. This circuit is responsible for proving the root of the block. + * Requires inputs from root parity, two child merge circuits, and a snapshot of the archive tree. + */ +export class BlockRootCircuit implements Circuit { + private readonly simulationInputs = makeTuple(2, () => promiseWithResolvers()); + private readonly provingInputs = makeTuple(2, () => + promiseWithResolvers>(), + ); + + private readonly rootParitySimulationInput = promiseWithResolvers(); + private readonly rootParityProvingInput = + promiseWithResolvers>(); + private readonly rootParityStateInput = promiseWithResolvers(); + + private readonly archiveSnapshot = promiseWithResolvers<{ + startArchiveSnapshot: AppendOnlyTreeSnapshot; + newArchiveSiblingPath: Tuple; + }>(); + + private readonly logger = createDebugLogger('aztec:prover-client:block-root-circuit'); + private readonly logdata: LogData; + + constructor(public readonly blockNumber: number, private context: OrchestratorContext) { + this.logdata = { blockNumber }; + } + + public getSimulationInputs() { + return Promise.all([ + this.simulationInputs[0].promise, + this.simulationInputs[1].promise, + this.rootParitySimulationInput.promise, + ] as const); + } + + public setNestedSimulation(simulation: BaseOrMergeRollupPublicInputs, index: number) { + this.logger.debug(`Setting nested simulation for index ${index}`, this.logdata); + this.checkIndex(index); + this.simulationInputs[index].resolve(simulation); + } + + public setNestedProof(proof: PublicInputsAndRecursiveProof, index: number) { + this.logger.debug(`Setting nested proof for index ${index}`, this.logdata); + this.checkIndex(index); + this.provingInputs[index].resolve(proof); + } + + private checkIndex(index: number) { + if (index > 1) { + throw new Error('Invalid child merge index.'); + } + } + + public setRootParitySimulation(input: ParityPublicInputs) { + this.logger.debug(`Setting root parity simulation`, this.logdata); + this.rootParitySimulationInput.resolve(input); + } + + public setRootParityProof(input: RootParityInput) { + this.logger.debug(`Setting root parity proof`, this.logdata); + this.rootParityProvingInput.resolve(input); + } + + public setRootParityState(input: ParityState) { + this.logger.debug(`Setting root parity state`, this.logdata); + this.rootParityStateInput.resolve(input); + } + + public async takeArchiveSnapshot() { + this.logger.debug(`Acquiring archive snapshot`, this.logdata); + const db = this.context.db; + + const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); + const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db); + + this.archiveSnapshot.resolve({ startArchiveSnapshot, newArchiveSiblingPath }); + } + + @memoize + private async buildSimulationInputs() { + this.logger.debug(`Awaiting root parity simulation inputs`, this.logdata); + const [rootParityInput, rootParityState] = await Promise.all([ + this.rootParitySimulationInput.promise, + this.rootParityStateInput.promise, + ] as const); + + const { + l1ToL2Messages: newL1ToL2Messages, + messageTreeSnapshot: startL1ToL2MessageTreeSnapshot, + newL1ToL2MessageTreeRootSiblingPath, + } = rootParityState; + + this.logger.debug(`Awaiting merge simulation inputs`, this.logdata); + const previousRollupData: BlockRootRollupInputs['previousRollupData'] = await awaitTuple( + mapTuple(this.simulationInputs, ({ promise }: { promise: Promise }) => + promise.then(input => PreviousRollupData.withEmptyProof(input)), + ), + ); + + this.logger.debug(`Awaiting archive snapshot`, this.logdata); + const { startArchiveSnapshot, newArchiveSiblingPath } = await this.archiveSnapshot.promise; + + const l1ToL2Roots = RootParityInput.withEmptyProof(rootParityInput, NESTED_RECURSIVE_PROOF_LENGTH); + + return BlockRootRollupInputs.from({ + previousRollupData, + l1ToL2Roots, + newL1ToL2Messages, + newL1ToL2MessageTreeRootSiblingPath, + startL1ToL2MessageTreeSnapshot, + startArchiveSnapshot, + newArchiveSiblingPath, + // TODO(#7346): Inject previous block hash (required when integrating batch rollup circuits) + previousBlockHash: Fr.ZERO, + proverId: this.context.proverId, + }); + } + + @memoize + private async buildProvingInputs() { + const [rootParityProof, rootParityState] = await Promise.all([ + this.rootParityProvingInput.promise, + this.rootParityStateInput.promise, + ] as const); + + const { + l1ToL2Messages: newL1ToL2Messages, + messageTreeSnapshot: startL1ToL2MessageTreeSnapshot, + newL1ToL2MessageTreeRootSiblingPath, + } = rootParityState; + + const rollupInputs = await Promise.all(mapTuple(this.provingInputs, ({ promise }) => promise)); + const previousRollupData: BlockRootRollupInputs['previousRollupData'] = mapTuple( + rollupInputs, + ({ inputs, proof, verificationKey }) => + new PreviousRollupData(inputs, proof, verificationKey.keyAsFields, getVKMembershipWitness(verificationKey)), + ); + + const l1ToL2Roots = new RootParityInput( + rootParityProof.proof, + rootParityProof.verificationKey, + rootParityProof.vkPath, + rootParityProof.publicInputs, + ); + + const { startArchiveSnapshot, newArchiveSiblingPath } = await this.archiveSnapshot.promise; + + return BlockRootRollupInputs.from({ + previousRollupData, + l1ToL2Roots, + newL1ToL2Messages, + newL1ToL2MessageTreeRootSiblingPath, + startL1ToL2MessageTreeSnapshot, + startArchiveSnapshot, + newArchiveSiblingPath, + // TODO(#7346): Inject previous block hash (required when integrating batch rollup circuits) + previousBlockHash: Fr.ZERO, + proverId: this.context.proverId, + }); + } + + @memoize + public async simulate(): Promise { + this.logger.debug(`Awaiting simulation inputs`, this.logdata); + const inputs = await this.buildSimulationInputs(); + this.logger.debug(`Acquired simulation inputs`, this.logdata); + return this.context.simulator.simulate({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs }); + } + + @memoize + public async prove(): Promise> { + this.logger.debug(`Awaiting proving inputs`, this.logdata); + const inputs = await this.buildProvingInputs(); + this.logger.debug(`Acquired proving inputs`, this.logdata); + const result = await this.context.prover.prove({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/merge-rollup.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/merge-rollup.ts new file mode 100644 index 000000000000..7bfba2ca8c90 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/merge-rollup.ts @@ -0,0 +1,68 @@ +import { ProvingRequestType, type PublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { type BaseOrMergeRollupPublicInputs, MergeRollupInputs, PreviousRollupData } from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { memoize } from '@aztec/foundation/decorators'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { mapTuple } from '@aztec/foundation/serialize'; +import { getVKMembershipWitness } from '@aztec/noir-protocol-circuits-types'; + +import { type OrchestratorContext , type Circuit } from '../types.js'; + +export class MergeRollupCircuit implements Circuit { + private readonly simulationInputs = makeTuple(2, () => promiseWithResolvers()); + private readonly provingInputs = makeTuple(2, () => + promiseWithResolvers>(), + ); + + constructor(public readonly level: number, public readonly index: number, private context: OrchestratorContext) {} + + public setNestedSimulation(simulation: BaseOrMergeRollupPublicInputs, index: number) { + this.checkIndex(index); + this.simulationInputs[index].resolve(simulation); + } + + public setNestedProof(proof: PublicInputsAndRecursiveProof, index: number) { + this.checkIndex(index); + this.provingInputs[index].resolve(proof); + } + + private checkIndex(index: number) { + if (index > 1) { + throw new Error('Invalid child merge index.'); + } + } + + private async getSimulationInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.simulationInputs, ({ promise }) => promise)); + return new MergeRollupInputs(mapTuple(inputs, input => PreviousRollupData.withEmptyProof(input))); + } + + private async getProvingInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.provingInputs, ({ promise }) => promise)); + + return new MergeRollupInputs( + mapTuple( + inputs, + ({ inputs, proof, verificationKey }) => + new PreviousRollupData(inputs, proof, verificationKey.keyAsFields, getVKMembershipWitness(verificationKey)), + ), + ); + } + + @memoize + public async simulate(): Promise { + const inputs = await this.getSimulationInputs(); + return this.context.simulator.simulate({ type: ProvingRequestType.MERGE_ROLLUP, inputs }); + } + + @memoize + public async prove(): Promise> { + const inputs = await this.getProvingInputs(); + const result = await this.context.prover.prove({ type: ProvingRequestType.MERGE_ROLLUP, inputs }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-non-tail.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-non-tail.ts new file mode 100644 index 000000000000..38b7cf7c2c04 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-non-tail.ts @@ -0,0 +1,71 @@ +import { + type AvmProofAndVerificationKey, + type NestedRecursiveProofAndVK, + ProvingRequestType, + type PublicInputsAndRecursiveProof, + type PublicKernelNonTailRequest, + type TubeProofAndVK, +} from '@aztec/circuit-types'; +import { type PublicKernelCircuitPrivateInputs, type PublicKernelCircuitPublicInputs } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; + +import { type Circuit, type OrchestratorContext } from '../types.js'; + +type PublicKernelPreviousProof = NestedRecursiveProofAndVK | TubeProofAndVK; + +export class PublicKernelNonTailCircuit implements Circuit { + private logger = createDebugLogger('aztec:prover-client:public-kernel-non-tail'); + + private previousKernelProof = promiseWithResolvers(); + private nestedAvmProof = promiseWithResolvers(); + + constructor(public readonly provingRequest: PublicKernelNonTailRequest, private context: OrchestratorContext) {} + + public setPreviousKernelProof(proof: PublicKernelPreviousProof) { + this.previousKernelProof.resolve(proof); + } + + public setNestedAvmProof(proof: AvmProofAndVerificationKey) { + this.nestedAvmProof.resolve(proof); + } + + @memoize + private async getProvingInputs(): Promise { + const previousKernelProof = await this.previousKernelProof.promise; + const avmProof = await this.nestedAvmProof.promise; + + // TODO: The kernel does not require the AVM VK, we're just getting the proof. Is that correct? + // TODO(#7369): How should we properly set up vkPath, vkIndex, and clientIvc for previous kernel? + // This is lifted from the TxProvingState, but ignores those properties on the previous kernel. + // How come it works? Maybe recursive proofs in public kernels are not being verified yet + return this.provingRequest.inputs.withNestedProofs(avmProof.proof, { + proof: previousKernelProof.proof, + vk: previousKernelProof.verificationKey, + }); + } + + @memoize + public simulate(): Promise { + const { inputs, type: kernelType } = this.provingRequest; + return this.context.simulator.simulate({ type: ProvingRequestType.PUBLIC_KERNEL_NON_TAIL, kernelType, inputs }); + } + + @memoize + public async prove(): Promise> { + const inputs = await this.getProvingInputs(); + + const result = await this.context.prover.prove({ + type: ProvingRequestType.PUBLIC_KERNEL_NON_TAIL, + kernelType: this.provingRequest.type, + inputs, + }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-tail.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-tail.ts new file mode 100644 index 000000000000..6a4404e21c76 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/public-kernel-tail.ts @@ -0,0 +1,62 @@ +import { + type NestedRecursiveProofAndVK, + ProvingRequestType, + type PublicInputsAndRecursiveProof, + type PublicKernelTailRequest, + type TubeProofAndVK, +} from '@aztec/circuit-types'; +import { type KernelCircuitPublicInputs, type PublicKernelTailCircuitPrivateInputs } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; + +import { type Circuit, type OrchestratorContext } from '../types.js'; + +type PublicKernelPreviousProof = NestedRecursiveProofAndVK | TubeProofAndVK; + +export class PublicKernelTailCircuit implements Circuit { + private logger = createDebugLogger('aztec:prover-client:public-kernel-non-tail'); + + private previousKernelProof = promiseWithResolvers(); + + constructor(public readonly provingRequest: PublicKernelTailRequest, private context: OrchestratorContext) {} + + public setPreviousKernelProof(proof: PublicKernelPreviousProof) { + this.previousKernelProof.resolve(proof); + } + + @memoize + private async getProvingInputs(): Promise { + const previousKernelProof = await this.previousKernelProof.promise; + + // TODO(#7369): How should we properly set up vkPath and vkIndex for previous kernel? + // See the public kernel non-tail as well + return this.provingRequest.inputs.withNestedProof({ + proof: previousKernelProof.proof, + vk: previousKernelProof.verificationKey, + }); + } + + @memoize + public simulate(): Promise { + const { inputs, type: kernelType } = this.provingRequest; + return this.context.simulator.simulate({ type: ProvingRequestType.PUBLIC_KERNEL_TAIL, kernelType, inputs }); + } + + @memoize + public async prove(): Promise> { + const inputs = await this.getProvingInputs(); + + const result = await this.context.prover.prove({ + type: ProvingRequestType.PUBLIC_KERNEL_TAIL, + kernelType: this.provingRequest.type, + inputs, + }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/root-parity.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/root-parity.ts new file mode 100644 index 000000000000..935a9f05914b --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/root-parity.ts @@ -0,0 +1,78 @@ +import { ProvingRequestType } from '@aztec/circuit-types'; +import { + NESTED_RECURSIVE_PROOF_LENGTH, + NUM_BASE_PARITY_PER_ROOT_PARITY, + type ParityPublicInputs, + type RECURSIVE_PROOF_LENGTH, + RootParityInput, + RootParityInputs, +} from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { memoize } from '@aztec/foundation/decorators'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { awaitTuple, mapTuple } from '@aztec/foundation/serialize'; + +import { type OrchestratorContext , type Circuit } from '../types.js'; + +export class RootParityCircuit implements Circuit { + private readonly simulationInputs = makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, () => + promiseWithResolvers(), + ); + private readonly provingInputs = makeTuple(NUM_BASE_PARITY_PER_ROOT_PARITY, () => + promiseWithResolvers>(), + ); + + constructor(private context: OrchestratorContext) {} + + public setNestedSimulation(input: ParityPublicInputs, index: number) { + this.checkIndex(index); + this.simulationInputs[index].resolve(input); + } + + public setNestedProof(input: RootParityInput, index: number) { + this.checkIndex(index); + this.provingInputs[index].resolve(input); + } + + private checkIndex(index: number) { + if (index >= NUM_BASE_PARITY_PER_ROOT_PARITY) { + throw new Error('Invalid child parity index.'); + } + } + + private async getSimulationInputs() { + const inputs = await awaitTuple( + mapTuple(this.simulationInputs, input => + input.promise.then(child => RootParityInput.withEmptyProof(child, NESTED_RECURSIVE_PROOF_LENGTH)), + ), + ); + return new RootParityInputs(inputs); + } + + private async getProvingInputs() { + const inputs = await Promise.all(mapTuple(this.provingInputs, input => input.promise)); + return new RootParityInputs(inputs); + } + + @memoize + public async simulate(): Promise { + return this.context.simulator.simulate({ + type: ProvingRequestType.ROOT_PARITY, + inputs: await this.getSimulationInputs(), + }); + } + + @memoize + public async prove(): Promise> { + const result = await this.context.prover.prove({ + type: ProvingRequestType.ROOT_PARITY, + inputs: await this.getProvingInputs(), + }); + + if (this.context.options.checkSimulationMatchesProof && !result.publicInputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/root-rollup.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/root-rollup.ts new file mode 100644 index 000000000000..8fbc1018e469 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/root-rollup.ts @@ -0,0 +1,82 @@ +import { ProvingRequestType, type PublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { + type BlockRootOrBlockMergePublicInputs, + PreviousRollupBlockData, + RootRollupInputs, + type RootRollupPublicInputs, +} from '@aztec/circuits.js'; +import { makeTuple } from '@aztec/foundation/array'; +import { memoize } from '@aztec/foundation/decorators'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { mapTuple } from '@aztec/foundation/serialize'; +import { getVKMembershipWitness } from '@aztec/noir-protocol-circuits-types'; + +import { type OrchestratorContext , type Circuit } from '../types.js'; + +export class RootRollupCircuit implements Circuit { + private readonly simulationInputs = makeTuple(2, () => promiseWithResolvers()); + private readonly provingInputs = makeTuple(2, () => + promiseWithResolvers>(), + ); + + constructor(private context: OrchestratorContext) {} + + public setNestedSimulation(simulation: BlockRootOrBlockMergePublicInputs, index: number) { + this.checkIndex(index); + this.simulationInputs[index].resolve(simulation); + } + + public setNestedProof(proof: PublicInputsAndRecursiveProof, index: number) { + this.checkIndex(index); + this.provingInputs[index].resolve(proof); + } + + private checkIndex(index: number) { + if (index > 1) { + throw new Error('Invalid child merge index.'); + } + } + + private async getSimulationInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.simulationInputs, ({ promise }) => promise)); + return new RootRollupInputs( + mapTuple(inputs, input => PreviousRollupBlockData.withEmptyProof(input)), + this.context.proverId, + ); + } + + private async getProvingInputs(): Promise { + const inputs = await Promise.all(mapTuple(this.provingInputs, ({ promise }) => promise)); + + return new RootRollupInputs( + mapTuple( + inputs, + ({ inputs, proof, verificationKey }) => + new PreviousRollupBlockData( + inputs, + proof, + verificationKey.keyAsFields, + getVKMembershipWitness(verificationKey), + ), + ), + this.context.proverId, + ); + } + + @memoize + public async simulate(): Promise { + const inputs = await this.getSimulationInputs(); + return this.context.simulator.simulate({ type: ProvingRequestType.ROOT_ROLLUP, inputs }); + } + + @memoize + public async prove(): Promise> { + const inputs = await this.getProvingInputs(); + const result = await this.context.prover.prove({ type: ProvingRequestType.ROOT_ROLLUP, inputs }); + + if (this.context.options.checkSimulationMatchesProof && !result.inputs.equals(await this.simulate())) { + throw new Error(`Simulation output and proof public inputs do not match`); + } + return result; + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/circuits/tube.ts b/yarn-project/prover-client/src/orchestrator-2/circuits/tube.ts new file mode 100644 index 000000000000..9e803a39c290 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/circuits/tube.ts @@ -0,0 +1,19 @@ +import { type ProcessedTx, ProvingRequestType, type TubeProofAndVK } from '@aztec/circuit-types'; +import { TubeInputs } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; + +import { type Circuit, type OrchestratorContext } from '../types.js'; + +export class TubeCircuit implements Circuit { + constructor(public readonly tx: ProcessedTx, private context: OrchestratorContext) {} + + public simulate(): Promise { + return Promise.resolve(null); + } + + @memoize + public prove(): Promise { + const inputs = new TubeInputs(this.tx.clientIvcProof); + return this.context.prover.prove({ type: ProvingRequestType.TUBE_PROOF, inputs }); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.test.ts new file mode 100644 index 000000000000..0bd3fd3bbe8e --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.test.ts @@ -0,0 +1,45 @@ +import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { fr } from '@aztec/circuits.js/testing'; +import { range } from '@aztec/foundation/array'; +import { times } from '@aztec/foundation/collection'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; + +import { makeBloatedProcessedTx, updateExpectedTreesFromTxs } from '../mocks/fixtures.js'; +import { TestContext } from './test/context.js'; + +describe.skip('EpochOrchestrator', () => { + let context: TestContext; + let logger: DebugLogger; + + beforeAll(() => { + logger = createDebugLogger('aztec:prover-client:test:epoch-orchestrator'); + }); + + beforeEach(async () => { + context = await TestContext.new(logger); + }); + + it('proves an epoch with 4 blocks with 4 txs each', async () => { + const numBlocks = 4; + const orchestrator = context.buildEpochOrchestrator(numBlocks); + + for (let blockNum = 0; blockNum < numBlocks; blockNum++) { + const txs = times(4, j => makeBloatedProcessedTx(context.actualDb, blockNum * 0xffffff + j + 1)); + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, blockNum * 0xffffff + 0xff).map(fr); + const globals = context.buildGlobals(blockNum + 1); + await updateExpectedTreesFromTxs(context.expectsDb, txs, l1ToL2Messages); + + await orchestrator.addBlock(txs.length, globals, l1ToL2Messages); + for (const tx of txs) { + await orchestrator.addTx(tx); + } + await orchestrator.endBlock(); + await context.assertDbsMatch(); + } + + const result = await orchestrator.simulate(); + const proof = await orchestrator.prove(); + expect(proof.inputs).toEqual(result); + expect(proof.proof).toBeDefined(); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.ts b/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.ts new file mode 100644 index 000000000000..c5704a17e08a --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/epoch-orchestrator.ts @@ -0,0 +1,119 @@ +import { type ProcessedTx, type PublicInputsAndRecursiveProof } from '@aztec/circuit-types'; +import { type Fr, type GlobalVariables, type RootRollupPublicInputs } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; + +import { BlockOrchestrator } from './block-orchestrator.js'; +import { BlockMergeCircuit } from './circuits/block-merge.js'; +import { RootRollupCircuit } from './circuits/root-rollup.js'; +import { type OrchestratorContext } from './types.js'; +import { getMergeLocation } from './utils.js'; + +/** + * Orchestrates the proving of an epoch (ie many consecutive blocks). + */ +export class EpochOrchestrator { + private readonly blocks: BlockOrchestrator[] = []; + private readonly merges: BlockMergeCircuit[] = []; + + private readonly logger = createDebugLogger('aztec:prover-client:epoch-orchestrator'); + + constructor(public readonly numBlocks: number, private readonly context: OrchestratorContext) { + if (this.context.options.simulationOnly) { + throw new Error('Simulation only mode not intended for epoch orchestrator'); + } + this.handleError = this.handleError.bind(this); + } + + /** + * Adds a new block to the epoch. Updates world-state with L1 to L2 messages and starts proving its parity circuits. + * @param numTxs - The number of transactions in the block. + * @param globalVariables - The global variables for the block. + * @param l1ToL2Messages - The L1 to L2 messages for the block. + */ + public async addBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) { + // Creates new block orchestrator + const index = this.blocks.length; + const blockOrchestrator = new BlockOrchestrator(index, numTxs, globalVariables, l1ToL2Messages, this.context); + this.blocks.push(blockOrchestrator); + // Updates world state and kicks off proving + await blockOrchestrator.start(); + // Wires output of this block orchestrator to the parent block merge circuit + this.wire(blockOrchestrator); + } + + /** + * Adds a new tx to the current block in the epoch. Updates world-state with the effects of the tx and starts proving it. + * @param processedTx - The processed tx to add. + */ + public async addTx(processedTx: ProcessedTx): Promise { + await this.currentBlock.addTx(processedTx); + } + + /** + * Marks the current block as ended. Adds padding txs if needed and updates the world-state archive tree with its new header. + */ + public async endBlock(): Promise { + await this.currentBlock.endBlock(); + await this.currentBlock.updateState(); + } + + private get currentBlock() { + return this.blocks[this.blocks.length - 1]; + } + + /** Starts simulation and proving for the given circuit, and wires its output to the parent merge or root circuit. */ + private wire(source: BlockOrchestrator | BlockMergeCircuit) { + const { index } = source; + + // Merge level, or maxlevels + 1 if it's a tx + const mergeLevelCount = Math.ceil(Math.log2(this.numBlocks)) - 1; + const level = 'level' in source ? source.level : mergeLevelCount + 1; + + void source + .simulate() + .then(simulation => this.getParentMerge(level, index).setNestedSimulation(simulation, index % 2)) + .catch(this.handleError); + + void source + .prove() + .then(proof => this.getParentMerge(level, index).setNestedProof(proof, index % 2)) + .catch(this.handleError); + } + + /** Returns or creates a parent block merge or root circuit for the given circuit in the proving tree. */ + private getParentMerge(level: number, index: number): BlockMergeCircuit | RootRollupCircuit { + const location = getMergeLocation({ level, index, total: this.numBlocks }); + if (location.level === 0) { + return this.getRootRollup(); + } + + if (!this.merges[location.indexWithinTree]) { + const merge = new BlockMergeCircuit(location.level, location.indexWithinLevel, this.context); + this.wire(merge); + this.merges[location.indexWithinTree] = merge; + } + + return this.merges[location.indexWithinTree]; + } + + /** Returns the simulation output of the root rollup for the epoch. */ + public simulate(): Promise { + return this.getRootRollup().simulate(); + } + + /** Returns the proof of the root rollup for the epoch. */ + public prove(): Promise> { + return this.getRootRollup().prove(); + } + + @memoize + private getRootRollup() { + return new RootRollupCircuit(this.context); + } + + private handleError(err: Error) { + this.logger.error(`Error in epoch orchestrator`, err); + throw new Error('Unimplemented'); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/index.ts b/yarn-project/prover-client/src/orchestrator-2/index.ts new file mode 100644 index 000000000000..eb8fb9ef4ac6 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/index.ts @@ -0,0 +1,2 @@ +export * from './block-orchestrator.js'; +export * from './epoch-orchestrator.js'; diff --git a/yarn-project/prover-client/src/orchestrator-2/parity-orchestrator.ts b/yarn-project/prover-client/src/orchestrator-2/parity-orchestrator.ts new file mode 100644 index 000000000000..dfa85ae1f5fb --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/parity-orchestrator.ts @@ -0,0 +1,130 @@ +import { MerkleTreeId } from '@aztec/circuit-types'; +import { + BaseParityInputs, + Fr, + L1_TO_L2_MSG_SUBTREE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + type NESTED_RECURSIVE_PROOF_LENGTH, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + type ParityPublicInputs, + type RootParityInput, +} from '@aztec/circuits.js'; +import { padArrayEnd, times } from '@aztec/foundation/collection'; +import { memoize } from '@aztec/foundation/decorators'; +import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { type Tuple } from '@aztec/foundation/serialize'; + +import { getSubtreeSiblingPath, getTreeSnapshot } from '../orchestrator/block-building-helpers.js'; +import { BaseParityCircuit } from './circuits/base-parity.js'; +import { RootParityCircuit } from './circuits/root-parity.js'; +import { type OrchestratorContext } from './types.js'; + +export class ParityOrchestrator { + private baseParityJobs?: BaseParityCircuit[]; + private rootParityJob?: RootParityCircuit; + + private simulationPromise = promiseWithResolvers(); + private proofPromise = promiseWithResolvers>(); + + constructor(private readonly unpaddedl1ToL2Messages: Fr[], private readonly context: OrchestratorContext) { + this.handleError = this.handleError.bind(this); + } + + @memoize + public async updateState() { + const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.context.db); + + const newL1ToL2MessageTreeRootSiblingPath = padArrayEnd( + await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, this.context.db), + Fr.ZERO, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + ); + + // Update the local trees to include the new l1 to l2 messages + await this.context.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages); + + return { + l1ToL2Messages: this.l1ToL2Messages, + messageTreeSnapshot, + newL1ToL2MessageTreeRootSiblingPath, + }; + } + + @memoize + public simulate() { + this.start(); + return this.simulationPromise.promise; + } + + @memoize + public prove() { + this.start(); + return this.proofPromise.promise; + } + + private get l1ToL2Messages() { + return padArrayEnd( + this.unpaddedl1ToL2Messages, + Fr.ZERO, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + 'Too many L1 to L2 messages', + ); + } + + @memoize + private start() { + const rootParityJob = this.createRootParityProvingJob(); + this.rootParityJob = rootParityJob; + + const count = NUM_BASE_PARITY_PER_ROOT_PARITY; + const messageBatches = times(count, i => BaseParityInputs.sliceMessages(this.l1ToL2Messages, i)); + + this.baseParityJobs = messageBatches.map((messages, index) => { + return this.createBaseParityProvingJob(messages, index, rootParityJob); + }); + } + + private createBaseParityProvingJob( + messages: Tuple, + index: number, + rootParityJob: RootParityCircuit, + ) { + const job = new BaseParityCircuit(messages, index, this.context); + void job + .simulate() + .then(simulation => rootParityJob.setNestedSimulation(simulation, index)) + .catch(this.handleError); + + if (!this.context.options.simulationOnly) { + void job + .prove() + .then(proof => rootParityJob.setNestedProof(proof, index)) + .catch(this.handleError); + } + + return job; + } + + private createRootParityProvingJob() { + const rootParityJob = new RootParityCircuit(this.context); + + void rootParityJob + .simulate() + .then(simulation => this.simulationPromise.resolve(simulation)) + .catch(this.handleError); + + if (!this.context.options.simulationOnly) { + void rootParityJob + .prove() + .then(proof => this.proofPromise.resolve(proof)) + .catch(this.handleError); + } + + return rootParityJob; + } + + private handleError(err: Error) { + throw err; // TODO: proper error handling + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/test/context.ts b/yarn-project/prover-client/src/orchestrator-2/test/context.ts new file mode 100644 index 000000000000..2a0d311cae82 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/test/context.ts @@ -0,0 +1,270 @@ +import { type BBProverConfig } from '@aztec/bb-prover'; +import { + type BlockProver, + type MerkleTreeAdminOperations, + MerkleTreeId, + type ProcessedTx, + type PublicExecutionRequest, + type ServerCircuitProver, + type TreeInfo, + type Tx, + type TxValidator, + merkleTreeIds, +} from '@aztec/circuit-types'; +import { type Gas, GlobalVariables, Header, type Nullifier, type TxContext } from '@aztec/circuits.js'; +import { Fr } from '@aztec/foundation/fields'; +import { type DebugLogger } from '@aztec/foundation/log'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { + type ContractsDataSourcePublicDB, + type PublicExecutionResult, + PublicExecutionResultBuilder, + type PublicExecutor, + PublicProcessor, + RealPublicKernelCircuitSimulator, + type SimulationProvider, + WASMSimulator, + type WorldStatePublicDB, +} from '@aztec/simulator'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { MerkleTrees } from '@aztec/world-state'; +import { NativeWorldStateService } from '@aztec/world-state/native'; + +import { expect } from '@jest/globals'; +import * as fs from 'fs/promises'; +import { type MockProxy, mock } from 'jest-mock-extended'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { TestCircuitProver } from '../../../../bb-prover/src/test/test_circuit_prover.js'; +import { getEnvironmentConfig, getSimulationProvider, makeGlobals } from '../../mocks/fixtures.js'; +import { MemoryProvingQueue } from '../../prover-agent/memory-proving-queue.js'; +import { ProverAgent } from '../../prover-agent/prover-agent.js'; +import { ProverAdapter } from '../adapters/prover.js'; +import { SimulatorAdapter } from '../adapters/simulator.js'; +import { BlockOrchestrator } from '../block-orchestrator.js'; +import { EpochOrchestrator } from '../epoch-orchestrator.js'; +import { type OrchestratorContext } from '../types.js'; + +// Adapted from mocks/test_context +export class TestContext { + constructor( + public publicExecutor: MockProxy, + public publicContractsDB: MockProxy, + public publicWorldStateDB: MockProxy, + public publicProcessor: PublicProcessor, + public simulationProvider: SimulationProvider, + public actualDb: MerkleTreeAdminOperations, + public expectsDb: MerkleTreeAdminOperations, + public prover: ServerCircuitProver, + public proverAgent: ProverAgent, + public epochNumber: number, + public orchestratorContext: OrchestratorContext, + public directoriesToCleanup: string[], + public logger: DebugLogger, + ) {} + + static async new( + logger: DebugLogger, + worldState: 'native' | 'legacy' = 'legacy', + proverCount = 4, + createProver: (bbConfig: BBProverConfig) => Promise = _ => + Promise.resolve(new TestCircuitProver(new NoopTelemetryClient(), new WASMSimulator())), + epochNumber = 3, + ) { + const directoriesToCleanup: string[] = []; + + const publicExecutor = mock(); + const publicContractsDB = mock(); + const publicWorldStateDB = mock(); + const publicKernel = new RealPublicKernelCircuitSimulator(new WASMSimulator()); + const telemetry = new NoopTelemetryClient(); + + let actualDb: MerkleTreeAdminOperations; + + if (worldState === 'native') { + const dir = await fs.mkdtemp(join(tmpdir(), 'prover-client-world-state-')); + directoriesToCleanup.push(dir); + const ws = await NativeWorldStateService.create(dir); + actualDb = ws.asLatest(); + } else { + const ws = await MerkleTrees.new(openTmpStore(), telemetry); + actualDb = ws.asLatest(); + } + + const expectsDb = await MerkleTrees.new(openTmpStore(), new NoopTelemetryClient()).then(t => t.asLatest()); + + const processor = new PublicProcessor( + actualDb, + publicExecutor, + publicKernel, + GlobalVariables.empty(), + Header.empty(), + publicContractsDB, + publicWorldStateDB, + telemetry, + ); + + let localProver: ServerCircuitProver; + const config = await getEnvironmentConfig(logger); + const simulationProvider = await getSimulationProvider({ + acvmWorkingDirectory: config?.acvmWorkingDirectory, + acvmBinaryPath: config?.expectedAcvmPath, + }); + if (!config) { + localProver = new TestCircuitProver(new NoopTelemetryClient(), simulationProvider); + } else { + const bbConfig: BBProverConfig = { + acvmBinaryPath: config.expectedAcvmPath, + acvmWorkingDirectory: config.acvmWorkingDirectory, + bbBinaryPath: config.expectedBBPath, + bbWorkingDirectory: config.bbWorkingDirectory, + }; + localProver = await createProver(bbConfig); + } + + if (config?.directoryToCleanup) { + directoriesToCleanup.push(config.directoryToCleanup); + } + + const queue = new MemoryProvingQueue(telemetry); + const agent = new ProverAgent(localProver, proverCount); + + queue.start(); + agent.start(queue); + + const context: OrchestratorContext = { + db: actualDb, + simulator: new SimulatorAdapter( + epochNumber, + new TestCircuitProver(new NoopTelemetryClient(), new WASMSimulator()), + undefined, + ), + prover: new ProverAdapter(epochNumber, queue, undefined), + proverId: Fr.fromString('0x1234'), + telemetryClient: new NoopTelemetryClient(), + options: { + checkSimulationMatchesProof: true, + avmProvingStrict: false, + simulationOnly: false, + }, + }; + + return new this( + publicExecutor, + publicContractsDB, + publicWorldStateDB, + processor, + simulationProvider, + actualDb, + expectsDb, + localProver, + agent, + epochNumber, + context, + directoriesToCleanup, + logger, + ); + } + + buildGlobals(blockNumber: number) { + return makeGlobals(blockNumber); + } + + buildBlockOrchestrator(args: { + numTxs: number; + l1ToL2Messages?: Fr[]; + blockNumber?: number; + simulationOnly?: boolean; + }) { + const globals = this.buildGlobals(args.blockNumber ?? 10); + this.orchestratorContext.options.simulationOnly = args.simulationOnly ?? false; + return new BlockOrchestrator(0, args.numTxs, globals, args.l1ToL2Messages ?? [], this.orchestratorContext); + } + + buildEpochOrchestrator(numBlocks: number) { + return new EpochOrchestrator(numBlocks, this.orchestratorContext); + } + + async assertDbsMatch() { + const inspect = (tree: TreeInfo) => `size=${tree.size} root=${tree.root.toString('hex')} id=${tree.treeId}`; + for (const treeId of merkleTreeIds()) { + // It's not trivial to compute the block hash just from a batch of txs, so we skip this check + if (treeId === MerkleTreeId.ARCHIVE) { + continue; + } + const actualTree = await this.actualDb.getTreeInfo(treeId); + const expectedTree = await this.expectsDb.getTreeInfo(treeId); + expect(inspect(actualTree)).toEqual(inspect(expectedTree)); + } + } + + async cleanup() { + await this.proverAgent.stop(); + for (const dir of this.directoriesToCleanup.filter(x => x !== '')) { + await fs.rm(dir, { recursive: true, force: true }); + } + } + + public async processPublicFunctions( + txs: Tx[], + maxTransactions: number, + blockProver?: BlockProver, + txValidator?: TxValidator, + ) { + const defaultExecutorImplementation = ( + execution: PublicExecutionRequest, + _globalVariables: GlobalVariables, + availableGas: Gas, + _txContext: TxContext, + _pendingNullifiers: Nullifier[], + transactionFee?: Fr, + _sideEffectCounter?: number, + ) => { + for (const tx of txs) { + const allCalls = tx.publicTeardownFunctionCall.isEmpty() + ? tx.enqueuedPublicFunctionCalls + : [...tx.enqueuedPublicFunctionCalls, tx.publicTeardownFunctionCall]; + for (const request of allCalls) { + if (execution.contractAddress.equals(request.contractAddress)) { + const result = PublicExecutionResultBuilder.fromPublicExecutionRequest({ request }).build({ + startGasLeft: availableGas, + endGasLeft: availableGas, + transactionFee, + }); + return Promise.resolve(result); + } + } + } + throw new Error(`Unexpected execution request: ${execution}`); + }; + return await this.processPublicFunctionsWithMockExecutorImplementation( + txs, + maxTransactions, + blockProver, + txValidator, + defaultExecutorImplementation, + ); + } + + public async processPublicFunctionsWithMockExecutorImplementation( + txs: Tx[], + maxTransactions: number, + blockProver?: BlockProver, + txValidator?: TxValidator, + executorMock?: ( + execution: PublicExecutionRequest, + globalVariables: GlobalVariables, + availableGas: Gas, + txContext: TxContext, + pendingNullifiers: Nullifier[], + transactionFee?: Fr, + sideEffectCounter?: number, + ) => Promise, + ) { + if (executorMock) { + this.publicExecutor.simulate.mockImplementation(executorMock); + } + return await this.publicProcessor.process(txs, maxTransactions, blockProver, txValidator); + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/tx-orchestrator.ts b/yarn-project/prover-client/src/orchestrator-2/tx-orchestrator.ts new file mode 100644 index 000000000000..2182fa11b12e --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/tx-orchestrator.ts @@ -0,0 +1,182 @@ +import { + type AvmProvingRequest, + EncryptedNoteTxL2Logs, + EncryptedTxL2Logs, + type ProcessedTx, + type PublicInputsAndRecursiveProof, + UnencryptedTxL2Logs, + toTxEffect, +} from '@aztec/circuit-types'; +import { type BaseOrMergeRollupPublicInputs, Fr, type GlobalVariables } from '@aztec/circuits.js'; +import { memoize } from '@aztec/foundation/decorators'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { toFriendlyJSON } from '@aztec/foundation/serialize'; + +import { AvmCircuit } from './circuits/avm.js'; +import { BaseRollupCircuit } from './circuits/base-rollup.js'; +import { PublicKernelNonTailCircuit } from './circuits/public-kernel-non-tail.js'; +import { PublicKernelTailCircuit } from './circuits/public-kernel-tail.js'; +import { TubeCircuit } from './circuits/tube.js'; +import { type OrchestratorContext } from './types.js'; + +/** + * Orchestrates the proving of a single transaction. + */ +export class TxOrchestrator { + private readonly baseRollup: BaseRollupCircuit; + + private readonly logger = createDebugLogger('aztec:prover-client:tx-orchestrator'); + + constructor( + public readonly tx: ProcessedTx, + public readonly globalVariables: GlobalVariables, + public readonly index: number, + private readonly context: OrchestratorContext, + ) { + this.validateTx(); + this.baseRollup = this.createBaseRollup(); + this.handleError = this.handleError.bind(this); + } + + /** + * Updates world-state with the outputs of the transaction. + * Required in order for simulate and prove to complete. + */ + @memoize + public async updateState() { + await this.baseRollup.updateState(); + } + + /** + * Simulates the base rollup circuit for the transaction. + * Requires updateState to have been called, blocks otherwise. + */ + @memoize + public simulate(): Promise { + return this.baseRollup.simulate(); + } + + /** + * Proves all public proof requests in the transaction and returns the base rollup proof. + * Requires updateState to have been called, blocks otherwise. + */ + @memoize + public async prove(): Promise> { + void this.startProving(); + return await this.baseRollup.prove(); + } + + /** + * Returns the tx effect for the transaction. + */ + @memoize + public getTxEffect() { + return toTxEffect(this.tx, this.globalVariables.gasFees); + } + + private createBaseRollup() { + return new BaseRollupCircuit(this.tx, this.globalVariables, this.index, this.context); + } + + private startProving() { + const tube = new TubeCircuit(this.tx, this.context); + + // The first kernel will be fed the output of the tube proof + let previousKernel: TubeCircuit | PublicKernelNonTailCircuit | PublicKernelTailCircuit = tube; + + for (let i = 0; i < this.tx.publicProvingRequests.length; i++) { + const provingRequest = this.tx.publicProvingRequests[i]; + + // Public kernels are either tied to an AVM proof or a tail + const publicKernel = + provingRequest.type === 'AVM' + ? this.createPublicKernelForAvm(provingRequest) + : new PublicKernelTailCircuit(provingRequest, this.context); + + // Kick off proving for the previous kernel and wire its output onto the next one + void previousKernel + .prove() + .then(proof => publicKernel.setPreviousKernelProof(proof)) + .catch(this.handleError); + + previousKernel = publicKernel; + } + + // Prove the last kernel and wire it into the base rollup + previousKernel + .prove() + .then(proof => this.baseRollup.setNestedKernelProof(proof)) + .catch(this.handleError); + } + + private createPublicKernelForAvm(provingRequest: AvmProvingRequest) { + const avm = new AvmCircuit(provingRequest, this.context); + const publicKernelNonTail = new PublicKernelNonTailCircuit(provingRequest.kernelRequest, this.context); + + // Kick off AVM proving and wire it into its kernel + void avm + .prove() + .then(proof => publicKernelNonTail.setNestedAvmProof(proof)) + .catch(this.handleError); + + return publicKernelNonTail; + } + + private handleError(err: Error) { + this.logger.error(`Error in tx orchestrator`, err); + throw new Error('Unimplemented'); + } + + private validateTx() { + const tx = this.tx; + const kernelData = tx.data; + const txHeader = kernelData.constants.historicalHeader; + + if (txHeader.state.l1ToL2MessageTree.isZero()) { + throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.noteHashTree.isZero()) { + throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.nullifierTree.isZero()) { + throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.publicDataTree.isZero()) { + throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`); + } + + const txNoteEncryptedLogs = EncryptedNoteTxL2Logs.hashNoteLogs( + kernelData.end.noteEncryptedLogsHashes.filter(log => !log.isEmpty()).map(log => log.value.toBuffer()), + ); + + if (!txNoteEncryptedLogs.equals(tx.noteEncryptedLogs.hash())) { + throw new Error( + `Note encrypted logs hash mismatch: ${Fr.fromBuffer(txNoteEncryptedLogs)} !== ${Fr.fromBuffer( + tx.noteEncryptedLogs.hash(), + )}`, + ); + } + + const txEncryptedLogs = EncryptedTxL2Logs.hashSiloedLogs( + kernelData.end.encryptedLogsHashes.filter(log => !log.isEmpty()).map(log => log.getSiloedHash()), + ); + + if (!txEncryptedLogs.equals(tx.encryptedLogs.hash())) { + throw new Error( + `Encrypted logs hash mismatch: ${Fr.fromBuffer(txEncryptedLogs)} !== ${Fr.fromBuffer(tx.encryptedLogs.hash())}`, + ); + } + + const txUnencryptedLogs = UnencryptedTxL2Logs.hashSiloedLogs( + kernelData.end.unencryptedLogsHashes.filter(log => !log.isEmpty()).map(log => log.getSiloedHash()), + ); + + if (!txUnencryptedLogs.equals(tx.unencryptedLogs.hash())) { + throw new Error( + `Unencrypted logs hash mismatch: ${Fr.fromBuffer(txUnencryptedLogs)} !== ${Fr.fromBuffer( + tx.unencryptedLogs.hash(), + )}`, + ); + } + } +} diff --git a/yarn-project/prover-client/src/orchestrator-2/types.ts b/yarn-project/prover-client/src/orchestrator-2/types.ts new file mode 100644 index 000000000000..a30a75601e11 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/types.ts @@ -0,0 +1,60 @@ +import { + type MerkleTreeOperations, + type ProvingRequest, + type ProvingRequestResult, + type ProvingRequestType, + type SimulationRequestResult, +} from '@aztec/circuit-types'; +import { + type AppendOnlyTreeSnapshot, + type Fr, + type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + type NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, +} from '@aztec/circuits.js'; +import { type Tuple } from '@aztec/foundation/serialize'; +import { type TelemetryClient } from '@aztec/telemetry-client'; + +export type ParityState = { + l1ToL2Messages: Tuple; + newL1ToL2MessageTreeRootSiblingPath: Tuple; + messageTreeSnapshot: AppendOnlyTreeSnapshot; +}; + +type PayloadUrl = string; + +export interface Prover { + prove(request: Request): Promise>; +} + +export interface Simulator { + simulate(request: Request): Promise>; +} + +export interface PayloadStore { + save(id: string, payload: Buffer): Promise; + load(payloadUrl: PayloadUrl): Promise; +} + +export interface MetadataStore { + save(id: string, metadata: any): Promise; + load(id: string): Promise; + list(parentId: string): Promise; +} + +export interface Circuit { + simulate(): Promise>; + prove(): Promise>; +} + +export type OrchestratorContext = { + db: MerkleTreeOperations; + simulator: Simulator; + prover: Prover; + telemetryClient: TelemetryClient; + proverId: Fr; + options: { + checkSimulationMatchesProof: boolean; + avmProvingStrict: boolean; + simulationOnly: boolean; + }; +}; diff --git a/yarn-project/prover-client/src/orchestrator-2/utils.ts b/yarn-project/prover-client/src/orchestrator-2/utils.ts new file mode 100644 index 000000000000..6e10b47cb3fe --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator-2/utils.ts @@ -0,0 +1,51 @@ +type TreeNodeLocation = { + /** Level of the node in the tree. Leaves are considered level zero. */ + level: number; + /** Index of the node within its level. */ + indexWithinLevel: number; + /** Whether it's a left or a right node (0 for left, 1 for right). */ + indexLeftOrRight: number; + /** Index of the node within a flattened representation of the tree in level ordering. */ + indexWithinTree: number; +}; + +/** + * Calculates the index and level of the parent rollup circuit in the proving tree + * Based on tree implementation in UnbalancedTree.batchInsert + * @returns A TreeNodeLocation locating the parent node + */ +export function getMergeLocation(args: { level: number; index: number; total: number }): TreeNodeLocation { + const { level: currentLevel, index: currentIndex, total } = args; + const numMergeLevels = Math.ceil(Math.log2(total)) - 1; + + const moveUpMergeLevel = (levelSize: number, index: number, nodeToShift: boolean) => { + levelSize /= 2; + if (levelSize & 1) { + [levelSize, nodeToShift] = nodeToShift ? [levelSize + 1, false] : [levelSize - 1, true]; + } + index >>= 1; + return { thisLevelSize: levelSize, thisIndex: index, shiftUp: nodeToShift }; + }; + + let [thisLevelSize, shiftUp] = total & 1 ? [total - 1, true] : [total, false]; + const maxLevel = numMergeLevels + 1; + let placeholder = currentIndex; + for (let i = 0; i < maxLevel - currentLevel; i++) { + ({ thisLevelSize, thisIndex: placeholder, shiftUp } = moveUpMergeLevel(thisLevelSize, placeholder, shiftUp)); + } + let thisIndex = currentIndex; + let mergeLevel = currentLevel; + while (thisIndex >= thisLevelSize && mergeLevel != 0) { + mergeLevel -= 1; + ({ thisLevelSize, thisIndex, shiftUp } = moveUpMergeLevel(thisLevelSize, thisIndex, shiftUp)); + } + + const level = mergeLevel - 1; + const indexWithinLevel = thisIndex >> 1; + return { + level, + indexWithinLevel, + indexLeftOrRight: thisIndex & 1, + indexWithinTree: 2 ** level - 1 + indexWithinLevel, + }; +} diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 278e6f7a65e1..544512175ae5 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -1,6 +1,5 @@ -import { MerkleTreeId, type ProcessedTx } from '@aztec/circuit-types'; +import { MerkleTreeId, type ProcessedTx, type TreeHeights, getTreeHeight } from '@aztec/circuit-types'; import { - ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, BaseRollupInputs, @@ -155,12 +154,7 @@ export async function buildBaseRollupInput( }); const blockHash = tx.data.constants.historicalHeader.hash(); - const archiveRootMembershipWitness = await getMembershipWitnessFor( - blockHash, - MerkleTreeId.ARCHIVE, - ARCHIVE_HEIGHT, - db, - ); + const archiveRootMembershipWitness = await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, db); return BaseRollupInputs.from({ kernelData: getKernelDataFor(tx, kernelVk, proof), @@ -258,21 +252,9 @@ export async function getBlockRootRollupInput( getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), ]; - const getRootTreeSiblingPath = async (treeId: MerkleTreeId) => { - const { size } = await db.getTreeInfo(treeId); - const path = await db.getSiblingPath(treeId, size); - return path.toFields(); - }; - // Get blocks tree const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); - const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); - - const newArchiveSiblingPath = makeTuple( - ARCHIVE_HEIGHT, - i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), - 0, - ); + const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db); return BlockRootRollupInputs.from({ previousRollupData, @@ -288,6 +270,12 @@ export async function getBlockRootRollupInput( }); } +export async function getRootTreeSiblingPath(treeId: TID, db: MerkleTreeOperations) { + const { size } = await db.getTreeInfo(treeId); + const path = await db.getSiblingPath(treeId, size); + return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId)); +} + // Builds the inputs for the final root rollup circuit, without making any changes to trees // TODO(#7346): Integrate batch rollup circuits and test below export function getRootRollupInput( @@ -459,18 +447,19 @@ export async function getSubtreeSiblingPath( } // Scan a tree searching for a specific value and return a membership witness proof for it -export async function getMembershipWitnessFor( +export async function getMembershipWitnessFor( value: Fr, - treeId: MerkleTreeId, - height: N, + treeId: TID, db: MerkleTreeOperations, -): Promise> { +): Promise> { + const height = getTreeHeight(treeId); + // If this is an empty tx, then just return zeroes if (value.isZero()) { return makeEmptyMembershipWitness(height); } - const index = await db.findLeafIndex(treeId, value.toBuffer()); + const index = await db.findLeafIndex(treeId as MerkleTreeId, value.toBuffer()); if (index === undefined) { throw new Error(`Leaf with value ${value} not found in tree ${MerkleTreeId[treeId]}`); } @@ -480,15 +469,19 @@ export async function getMembershipWitnessFor( export function validatePartialState( partialState: PartialStateReference, - treeSnapshots: Map, + treeSnapshots: + | Map + | Record< + MerkleTreeId.NOTE_HASH_TREE | MerkleTreeId.NULLIFIER_TREE | MerkleTreeId.PUBLIC_DATA_TREE, + AppendOnlyTreeSnapshot + >, ) { - validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)!, partialState.noteHashTree, 'NoteHashTree'); - validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NULLIFIER_TREE)!, partialState.nullifierTree, 'NullifierTree'); - validateSimulatedTree( - treeSnapshots.get(MerkleTreeId.PUBLIC_DATA_TREE)!, - partialState.publicDataTree, - 'PublicDataTree', - ); + const getSnapshot = ( + treeId: MerkleTreeId.NOTE_HASH_TREE | MerkleTreeId.NULLIFIER_TREE | MerkleTreeId.PUBLIC_DATA_TREE, + ) => ('get' in treeSnapshots ? treeSnapshots.get(treeId) : treeSnapshots[treeId])!; + validateSimulatedTree(getSnapshot(MerkleTreeId.NOTE_HASH_TREE), partialState.noteHashTree, 'NoteHashTree'); + validateSimulatedTree(getSnapshot(MerkleTreeId.NULLIFIER_TREE), partialState.nullifierTree, 'NullifierTree'); + validateSimulatedTree(getSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), partialState.publicDataTree, 'PublicDataTree'); } // Helper for comparing two trees snapshots diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index b0ea8d36e60d..2bdb7e1325d1 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -790,13 +790,13 @@ export class ProvingOrchestrator implements BlockProver { ), result => { logger.debug(`Completed tube proof for tx index: ${txIndex}`); - const nextKernelRequest = txProvingState.getNextPublicKernelFromTubeProof(result.tubeProof, result.tubeVK); + const nextKernelRequest = txProvingState.getNextPublicKernelFromTubeProof(result.proof, result.verificationKey); this.checkAndEnqueueNextTxCircuit( provingState, txIndex, -1, - result.tubeProof, - result.tubeVK, + result.proof, + result.verificationKey, nextKernelRequest, ); }, diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts index 7dc8e10012ad..d19903f29a9e 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts @@ -9,6 +9,7 @@ import { type PublicKernelNonTailRequest, type PublicKernelTailRequest, type ServerCircuitProver, + type TubeProofAndVK, } from '@aztec/circuit-types'; import type { AvmCircuitInputs, @@ -24,13 +25,11 @@ import type { PrivateKernelEmptyInputData, PublicKernelCircuitPublicInputs, RECURSIVE_PROOF_LENGTH, - RecursiveProof, RootParityInput, RootParityInputs, RootRollupInputs, RootRollupPublicInputs, TubeInputs, - VerificationKeyData, } from '@aztec/circuits.js'; import { randomBytes } from '@aztec/foundation/crypto'; import { AbortError, TimeoutError } from '@aztec/foundation/error'; @@ -273,11 +272,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource return this.enqueue({ type: ProvingRequestType.PRIVATE_KERNEL_EMPTY, inputs }, signal, epochNumber); } - getTubeProof( - inputs: TubeInputs, - signal?: AbortSignal, - epochNumber?: number, - ): Promise<{ tubeVK: VerificationKeyData; tubeProof: RecursiveProof }> { + getTubeProof(inputs: TubeInputs, signal?: AbortSignal, epochNumber?: number): Promise { return this.enqueue({ type: ProvingRequestType.TUBE_PROOF, inputs }, signal, epochNumber); } diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index af54ea81fd08..b0bbaf5a4aa2 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -137,12 +137,12 @@ export class MockProver implements ServerCircuitProver { } getTubeProof(): Promise<{ - tubeVK: VerificationKeyData; - tubeProof: RecursiveProof; + verificationKey: VerificationKeyData; + proof: RecursiveProof; }> { return Promise.resolve({ - tubeVK: VerificationKeyData.makeFake(), - tubeProof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyData.makeFake(), + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), }); } } diff --git a/yarn-project/prover-node/package.json b/yarn-project/prover-node/package.json index 1ab15127b80d..6d141d3c3cd9 100644 --- a/yarn-project/prover-node/package.json +++ b/yarn-project/prover-node/package.json @@ -31,6 +31,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index 1f0e9c1aaf45..b36c544458df 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -38,6 +38,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/scripts/package.json b/yarn-project/scripts/package.json index 0f5849a2ab2e..99dc133984f4 100644 --- a/yarn-project/scripts/package.json +++ b/yarn-project/scripts/package.json @@ -65,6 +65,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 835dd9bbea1e..9b7e9212da44 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -89,6 +89,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/simulator/package.json b/yarn-project/simulator/package.json index 8356b976f28b..33694b0aea72 100644 --- a/yarn-project/simulator/package.json +++ b/yarn-project/simulator/package.json @@ -38,6 +38,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json index 6647847ff448..ecbc5444c34d 100644 --- a/yarn-project/telemetry-client/package.json +++ b/yarn-project/telemetry-client/package.json @@ -56,6 +56,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/txe/package.json b/yarn-project/txe/package.json index ca291a0e2dbd..f9c6c329220e 100644 --- a/yarn-project/txe/package.json +++ b/yarn-project/txe/package.json @@ -39,6 +39,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 4fe9e6e145b8..ac680f836866 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -41,6 +41,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index 23ca3b591068..a3cb3cf04faf 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -38,6 +38,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/world-state/package.json b/yarn-project/world-state/package.json index 8c7a1f793e7f..f1d6e33fd609 100644 --- a/yarn-project/world-state/package.json +++ b/yarn-project/world-state/package.json @@ -39,6 +39,9 @@ "parser": { "syntax": "typescript", "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" } } } diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts index 39975c305513..004a6e03ea47 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts @@ -1,4 +1,10 @@ -import { type BatchInsertionResult, type L2Block, type MerkleTreeId, type SiblingPath } from '@aztec/circuit-types'; +import { + type BatchInsertionResult, + type L2Block, + type MerkleTreeId, + type SiblingPath, + TreeHeights, +} from '@aztec/circuit-types'; import { type HandleL2BlockAndMessagesResult, type IndexedTreeId, @@ -172,11 +178,11 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { * @param subtreeHeight - Height of the subtree. * @returns The data for the leaves to be updated when inserting the new ones. */ - public batchInsert( - treeId: IndexedTreeId, + public batchInsert( + treeId: TID, leaves: Buffer[], subtreeHeight: number, - ): Promise> { + ): Promise> { return this.trees.batchInsert(treeId, leaves, subtreeHeight); } }